1 /* Description of builtins used by the ARM backend.
2    Copyright (C) 2014-2020 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify it
7    under the terms of the GNU General Public License as published
8    by the Free Software Foundation; either version 3, or (at your
9    option) any later version.
10 
11    GCC is distributed in the hope that it will be useful, but WITHOUT
12    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
14    License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING3.  If not see
18    <http://www.gnu.org/licenses/>.  */
19 
20 #define IN_TARGET_CODE 1
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "function.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple-expr.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "profile-count.h"
33 #include "optabs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "explow.h"
40 #include "expr.h"
41 #include "langhooks.h"
42 #include "case-cfn-macros.h"
43 #include "sbitmap.h"
44 #include "stringpool.h"
45 #include "arm-builtins.h"
46 #include "stringpool.h"
47 #include "attribs.h"
48 
49 #define SIMD_MAX_BUILTIN_ARGS 7
50 
51 enum arm_type_qualifiers
52 {
53   /* T foo.  */
54   qualifier_none = 0x0,
55   /* unsigned T foo.  */
56   qualifier_unsigned = 0x1, /* 1 << 0  */
57   /* const T foo.  */
58   qualifier_const = 0x2, /* 1 << 1  */
59   /* T *foo.  */
60   qualifier_pointer = 0x4, /* 1 << 2  */
61   /* const T * foo.  */
62   qualifier_const_pointer = 0x6,
63   /* Used when expanding arguments if an operand could
64      be an immediate.  */
65   qualifier_immediate = 0x8, /* 1 << 3  */
66   qualifier_unsigned_immediate = 0x9,
67   qualifier_maybe_immediate = 0x10, /* 1 << 4  */
68   /* void foo (...).  */
69   qualifier_void = 0x20, /* 1 << 5  */
70   /* Some patterns may have internal operands, this qualifier is an
71      instruction to the initialisation code to skip this operand.  */
72   qualifier_internal = 0x40, /* 1 << 6  */
73   /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
74      rather than using the type of the operand.  */
75   qualifier_map_mode = 0x80, /* 1 << 7  */
76   /* qualifier_pointer | qualifier_map_mode  */
77   qualifier_pointer_map_mode = 0x84,
78   /* qualifier_const_pointer | qualifier_map_mode  */
79   qualifier_const_pointer_map_mode = 0x86,
80   /* Polynomial types.  */
81   qualifier_poly = 0x100,
82   /* Lane indices - must be within range of previous argument = a vector.  */
83   qualifier_lane_index = 0x200,
84   /* Lane indices for single lane structure loads and stores.  */
85   qualifier_struct_load_store_lane_index = 0x400,
86   /* A void pointer.  */
87   qualifier_void_pointer = 0x800,
88   /* A const void pointer.  */
89   qualifier_const_void_pointer = 0x802,
90   /* Lane indices selected in pairs - must be within range of previous
91      argument = a vector.  */
92   qualifier_lane_pair_index = 0x1000,
93   /* Lane indices selected in quadtuplets - must be within range of previous
94      argument = a vector.  */
95   qualifier_lane_quadtup_index = 0x2000
96 };
97 
98 /*  The qualifier_internal allows generation of a unary builtin from
99     a pattern with a third pseudo-operand such as a match_scratch.
100     T (T).  */
101 static enum arm_type_qualifiers
102 arm_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
103   = { qualifier_none, qualifier_none, qualifier_internal };
104 #define UNOP_QUALIFIERS (arm_unop_qualifiers)
105 
106 /* unsigned T (unsigned T).  */
107 static enum arm_type_qualifiers
108 arm_bswap_qualifiers[SIMD_MAX_BUILTIN_ARGS]
109   = { qualifier_unsigned, qualifier_unsigned };
110 #define BSWAP_QUALIFIERS (arm_bswap_qualifiers)
111 
112 /* T (T, T [maybe_immediate]).  */
113 static enum arm_type_qualifiers
114 arm_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
115   = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
116 #define BINOP_QUALIFIERS (arm_binop_qualifiers)
117 
118 /* T (T, T, T).  */
119 static enum arm_type_qualifiers
120 arm_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
121   = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
122 #define TERNOP_QUALIFIERS (arm_ternop_qualifiers)
123 
124 /* unsigned T (unsigned T, unsigned T, unsigned T).  */
125 static enum arm_type_qualifiers
126 arm_unsigned_uternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
127   = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
128       qualifier_unsigned };
129 #define UTERNOP_QUALIFIERS (arm_unsigned_uternop_qualifiers)
130 
131 /* T (T, unsigned T, T).  */
132 static enum arm_type_qualifiers
133 arm_usternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
134   = { qualifier_none, qualifier_none, qualifier_unsigned,
135       qualifier_none };
136 #define USTERNOP_QUALIFIERS (arm_usternop_qualifiers)
137 
138 /* T (T, immediate).  */
139 static enum arm_type_qualifiers
140 arm_binop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
141   = { qualifier_none, qualifier_none, qualifier_immediate };
142 #define BINOP_IMM_QUALIFIERS (arm_binop_imm_qualifiers)
143 
144 /* T (T, unsigned immediate).  */
145 static enum arm_type_qualifiers
146 arm_sat_binop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
147   = { qualifier_unsigned, qualifier_none, qualifier_unsigned_immediate };
148 #define SAT_BINOP_UNSIGNED_IMM_QUALIFIERS \
149   (arm_sat_binop_imm_qualifiers)
150 
151 /* unsigned T (T, unsigned immediate).  */
152 static enum arm_type_qualifiers
153 arm_unsigned_sat_binop_unsigned_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
154   = { qualifier_unsigned, qualifier_none, qualifier_unsigned_immediate };
155 #define UNSIGNED_SAT_BINOP_UNSIGNED_IMM_QUALIFIERS \
156   (arm_unsigned_sat_binop_unsigned_imm_qualifiers)
157 
158 /* T (T, lane index).  */
159 static enum arm_type_qualifiers
160 arm_getlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
161   = { qualifier_none, qualifier_none, qualifier_lane_index };
162 #define GETLANE_QUALIFIERS (arm_getlane_qualifiers)
163 
164 /* T (T, T, T, immediate).  */
165 static enum arm_type_qualifiers
166 arm_mac_n_qualifiers[SIMD_MAX_BUILTIN_ARGS]
167   = { qualifier_none, qualifier_none, qualifier_none,
168       qualifier_none, qualifier_immediate };
169 #define MAC_N_QUALIFIERS (arm_mac_n_qualifiers)
170 
171 /* T (T, T, T, lane index).  */
172 static enum arm_type_qualifiers
173 arm_mac_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
174   = { qualifier_none, qualifier_none, qualifier_none,
175       qualifier_none, qualifier_lane_index };
176 #define MAC_LANE_QUALIFIERS (arm_mac_lane_qualifiers)
177 
178 /* T (T, T, T, lane pair index).  */
179 static enum arm_type_qualifiers
180 arm_mac_lane_pair_qualifiers[SIMD_MAX_BUILTIN_ARGS]
181   = { qualifier_none, qualifier_none, qualifier_none,
182       qualifier_none, qualifier_lane_pair_index };
183 #define MAC_LANE_PAIR_QUALIFIERS (arm_mac_lane_pair_qualifiers)
184 
185 /* unsigned T (unsigned T, unsigned T, unsigend T, lane index).  */
186 static enum arm_type_qualifiers
187 arm_umac_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
188   = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
189       qualifier_unsigned, qualifier_lane_index };
190 #define UMAC_LANE_QUALIFIERS (arm_umac_lane_qualifiers)
191 
192 /* T (T, unsigned T, T, lane index).  */
193 static enum arm_type_qualifiers
194 arm_usmac_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
195   = { qualifier_none, qualifier_none, qualifier_unsigned,
196       qualifier_none, qualifier_lane_quadtup_index };
197 #define USMAC_LANE_QUADTUP_QUALIFIERS (arm_usmac_lane_quadtup_qualifiers)
198 
199 /* T (T, T, unsigend T, lane index).  */
200 static enum arm_type_qualifiers
201 arm_sumac_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
202   = { qualifier_none, qualifier_none, qualifier_none,
203       qualifier_unsigned, qualifier_lane_quadtup_index };
204 #define SUMAC_LANE_QUADTUP_QUALIFIERS (arm_sumac_lane_quadtup_qualifiers)
205 
206 /* T (T, T, immediate).  */
207 static enum arm_type_qualifiers
208 arm_ternop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
209   = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate };
210 #define TERNOP_IMM_QUALIFIERS (arm_ternop_imm_qualifiers)
211 
212 /* T (T, T, lane index).  */
213 static enum arm_type_qualifiers
214 arm_setlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
215   = { qualifier_none, qualifier_none, qualifier_none, qualifier_lane_index };
216 #define SETLANE_QUALIFIERS (arm_setlane_qualifiers)
217 
218 /* T (T, T).  */
219 static enum arm_type_qualifiers
220 arm_combine_qualifiers[SIMD_MAX_BUILTIN_ARGS]
221   = { qualifier_none, qualifier_none, qualifier_none };
222 #define COMBINE_QUALIFIERS (arm_combine_qualifiers)
223 
224 /* T ([T element type] *).  */
225 static enum arm_type_qualifiers
226 arm_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
227   = { qualifier_none, qualifier_const_pointer_map_mode };
228 #define LOAD1_QUALIFIERS (arm_load1_qualifiers)
229 
230 /* T ([T element type] *, T, immediate).  */
231 static enum arm_type_qualifiers
232 arm_load1_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
233   = { qualifier_none, qualifier_const_pointer_map_mode,
234       qualifier_none, qualifier_struct_load_store_lane_index };
235 #define LOAD1LANE_QUALIFIERS (arm_load1_lane_qualifiers)
236 
237 /* unsigned T (unsigned T, unsigned T, unsigned T).  */
238 static enum arm_type_qualifiers
239 arm_unsigned_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
240   = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
241       qualifier_unsigned };
242 #define UBINOP_QUALIFIERS (arm_unsigned_binop_qualifiers)
243 
244 /* void (unsigned immediate, unsigned immediate, unsigned immediate,
245 	 unsigned immediate, unsigned immediate, unsigned immediate).  */
246 static enum arm_type_qualifiers
247 arm_cdp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
248   = { qualifier_void, qualifier_unsigned_immediate,
249       qualifier_unsigned_immediate,
250       qualifier_unsigned_immediate,
251       qualifier_unsigned_immediate,
252       qualifier_unsigned_immediate,
253       qualifier_unsigned_immediate };
254 #define CDP_QUALIFIERS \
255   (arm_cdp_qualifiers)
256 
257 /* void (unsigned immediate, unsigned immediate,  const void *).  */
258 static enum arm_type_qualifiers
259 arm_ldc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
260   = { qualifier_void, qualifier_unsigned_immediate,
261       qualifier_unsigned_immediate, qualifier_const_void_pointer };
262 #define LDC_QUALIFIERS \
263   (arm_ldc_qualifiers)
264 
265 /* void (unsigned immediate, unsigned immediate,  void *).  */
266 static enum arm_type_qualifiers
267 arm_stc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
268   = { qualifier_void, qualifier_unsigned_immediate,
269       qualifier_unsigned_immediate, qualifier_void_pointer };
270 #define STC_QUALIFIERS \
271   (arm_stc_qualifiers)
272 
273 /* void (unsigned immediate, unsigned immediate,  T, unsigned immediate,
274 	 unsigned immediate, unsigned immediate).  */
275 static enum arm_type_qualifiers
276 arm_mcr_qualifiers[SIMD_MAX_BUILTIN_ARGS]
277   = { qualifier_void, qualifier_unsigned_immediate,
278       qualifier_unsigned_immediate, qualifier_none,
279       qualifier_unsigned_immediate, qualifier_unsigned_immediate,
280       qualifier_unsigned_immediate };
281 #define MCR_QUALIFIERS \
282   (arm_mcr_qualifiers)
283 
284 /* T (unsigned immediate, unsigned immediate, unsigned immediate,
285       unsigned immediate, unsigned immediate).  */
286 static enum arm_type_qualifiers
287 arm_mrc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
288   = { qualifier_none, qualifier_unsigned_immediate,
289       qualifier_unsigned_immediate, qualifier_unsigned_immediate,
290       qualifier_unsigned_immediate, qualifier_unsigned_immediate };
291 #define MRC_QUALIFIERS \
292   (arm_mrc_qualifiers)
293 
294 /* void (unsigned immediate, unsigned immediate,  T, unsigned immediate).  */
295 static enum arm_type_qualifiers
296 arm_mcrr_qualifiers[SIMD_MAX_BUILTIN_ARGS]
297   = { qualifier_void, qualifier_unsigned_immediate,
298       qualifier_unsigned_immediate, qualifier_none,
299       qualifier_unsigned_immediate };
300 #define MCRR_QUALIFIERS \
301   (arm_mcrr_qualifiers)
302 
303 /* T (unsigned immediate, unsigned immediate, unsigned immediate).  */
304 static enum arm_type_qualifiers
305 arm_mrrc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
306   = { qualifier_none, qualifier_unsigned_immediate,
307       qualifier_unsigned_immediate, qualifier_unsigned_immediate };
308 #define MRRC_QUALIFIERS \
309   (arm_mrrc_qualifiers)
310 
311 /* T (immediate, unsigned immediate).  */
312 static enum arm_type_qualifiers
313 arm_cx_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
314   = { qualifier_none, qualifier_immediate, qualifier_unsigned_immediate };
315 #define CX_IMM_QUALIFIERS (arm_cx_imm_qualifiers)
316 
317 /* T (immediate, T, unsigned immediate).  */
318 static enum arm_type_qualifiers
319 arm_cx_unary_qualifiers[SIMD_MAX_BUILTIN_ARGS]
320   = { qualifier_none, qualifier_immediate, qualifier_none,
321       qualifier_unsigned_immediate };
322 #define CX_UNARY_QUALIFIERS (arm_cx_unary_qualifiers)
323 
324 /* T (immediate, T, T, unsigned immediate).  */
325 static enum arm_type_qualifiers
326 arm_cx_binary_qualifiers[SIMD_MAX_BUILTIN_ARGS]
327   = { qualifier_none, qualifier_immediate,
328       qualifier_none, qualifier_none,
329       qualifier_unsigned_immediate };
330 #define CX_BINARY_QUALIFIERS (arm_cx_binary_qualifiers)
331 
332 /* T (immediate, T, T, T, unsigned immediate).  */
333 static enum arm_type_qualifiers
334 arm_cx_ternary_qualifiers[SIMD_MAX_BUILTIN_ARGS]
335   = { qualifier_none, qualifier_immediate,
336       qualifier_none, qualifier_none, qualifier_none,
337       qualifier_unsigned_immediate };
338 #define CX_TERNARY_QUALIFIERS (arm_cx_ternary_qualifiers)
339 
340 /* T (immediate, T, unsigned immediate).  */
341 static enum arm_type_qualifiers
342 arm_cx_unary_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
343   = { qualifier_none, qualifier_immediate, qualifier_none,
344       qualifier_unsigned_immediate,
345       qualifier_unsigned };
346 #define CX_UNARY_UNONE_QUALIFIERS (arm_cx_unary_unone_qualifiers)
347 
348 /* T (immediate, T, T, unsigned immediate).  */
349 static enum arm_type_qualifiers
350 arm_cx_binary_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
351   = { qualifier_none, qualifier_immediate,
352       qualifier_none, qualifier_none,
353       qualifier_unsigned_immediate,
354       qualifier_unsigned };
355 #define CX_BINARY_UNONE_QUALIFIERS (arm_cx_binary_unone_qualifiers)
356 
357 /* T (immediate, T, T, T, unsigned immediate).  */
358 static enum arm_type_qualifiers
359 arm_cx_ternary_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
360   = { qualifier_none, qualifier_immediate,
361       qualifier_none, qualifier_none, qualifier_none,
362       qualifier_unsigned_immediate,
363       qualifier_unsigned };
364 #define CX_TERNARY_UNONE_QUALIFIERS (arm_cx_ternary_unone_qualifiers)
365 
366 /* The first argument (return type) of a store should be void type,
367    which we represent with qualifier_void.  Their first operand will be
368    a DImode pointer to the location to store to, so we must use
369    qualifier_map_mode | qualifier_pointer to build a pointer to the
370    element type of the vector.
371 
372    void ([T element type] *, T).  */
373 static enum arm_type_qualifiers
374 arm_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
375   = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
376 #define STORE1_QUALIFIERS (arm_store1_qualifiers)
377 
378 /* Qualifiers for MVE builtins.  */
379 
380 static enum arm_type_qualifiers
381 arm_unop_none_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
382   = { qualifier_none, qualifier_none };
383 #define UNOP_NONE_NONE_QUALIFIERS \
384   (arm_unop_none_none_qualifiers)
385 
386 static enum arm_type_qualifiers
387 arm_unop_none_snone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
388   = { qualifier_none, qualifier_none };
389 #define UNOP_NONE_SNONE_QUALIFIERS \
390   (arm_unop_none_snone_qualifiers)
391 
392 static enum arm_type_qualifiers
393 arm_unop_none_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
394   = { qualifier_none, qualifier_unsigned };
395 #define UNOP_NONE_UNONE_QUALIFIERS \
396   (arm_unop_none_unone_qualifiers)
397 
398 static enum arm_type_qualifiers
399 arm_unop_snone_snone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
400   = { qualifier_none, qualifier_none };
401 #define UNOP_SNONE_SNONE_QUALIFIERS \
402   (arm_unop_snone_snone_qualifiers)
403 
404 static enum arm_type_qualifiers
405 arm_unop_snone_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
406   = { qualifier_none, qualifier_none };
407 #define UNOP_SNONE_NONE_QUALIFIERS \
408   (arm_unop_snone_none_qualifiers)
409 
410 static enum arm_type_qualifiers
411 arm_unop_snone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
412   = { qualifier_none, qualifier_immediate };
413 #define UNOP_SNONE_IMM_QUALIFIERS \
414   (arm_unop_snone_imm_qualifiers)
415 
416 static enum arm_type_qualifiers
417 arm_unop_unone_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
418   = { qualifier_unsigned, qualifier_none };
419 #define UNOP_UNONE_NONE_QUALIFIERS \
420   (arm_unop_unone_none_qualifiers)
421 
422 static enum arm_type_qualifiers
423 arm_unop_unone_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
424   = { qualifier_unsigned, qualifier_unsigned };
425 #define UNOP_UNONE_UNONE_QUALIFIERS \
426   (arm_unop_unone_unone_qualifiers)
427 
428 static enum arm_type_qualifiers
429 arm_unop_unone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
430   = { qualifier_unsigned, qualifier_immediate };
431 #define UNOP_UNONE_IMM_QUALIFIERS \
432   (arm_unop_unone_imm_qualifiers)
433 
434 static enum arm_type_qualifiers
435 arm_binop_none_none_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
436   = { qualifier_none, qualifier_none, qualifier_none };
437 #define BINOP_NONE_NONE_NONE_QUALIFIERS \
438   (arm_binop_none_none_none_qualifiers)
439 
440 static enum arm_type_qualifiers
441 arm_binop_none_none_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
442   = { qualifier_none, qualifier_none, qualifier_immediate };
443 #define BINOP_NONE_NONE_IMM_QUALIFIERS \
444   (arm_binop_none_none_imm_qualifiers)
445 
446 static enum arm_type_qualifiers
447 arm_binop_none_unone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
448   = { qualifier_none, qualifier_unsigned, qualifier_immediate };
449 #define BINOP_NONE_UNONE_IMM_QUALIFIERS \
450   (arm_binop_none_unone_imm_qualifiers)
451 
452 static enum arm_type_qualifiers
453 arm_binop_none_unone_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
454   = { qualifier_none, qualifier_unsigned, qualifier_unsigned };
455 #define BINOP_NONE_UNONE_UNONE_QUALIFIERS \
456   (arm_binop_none_unone_unone_qualifiers)
457 
458 static enum arm_type_qualifiers
459 arm_binop_unone_unone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
460   = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate };
461 #define BINOP_UNONE_UNONE_IMM_QUALIFIERS \
462   (arm_binop_unone_unone_imm_qualifiers)
463 
464 static enum arm_type_qualifiers
465 arm_binop_unone_unone_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
466   = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
467 #define BINOP_UNONE_UNONE_UNONE_QUALIFIERS \
468   (arm_binop_unone_unone_unone_qualifiers)
469 
470 static enum arm_type_qualifiers
471 arm_binop_unone_none_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
472   = { qualifier_unsigned, qualifier_none, qualifier_immediate };
473 #define BINOP_UNONE_NONE_IMM_QUALIFIERS \
474   (arm_binop_unone_none_imm_qualifiers)
475 
476 static enum arm_type_qualifiers
477 arm_binop_none_none_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
478   = { qualifier_none, qualifier_none, qualifier_unsigned };
479 #define BINOP_NONE_NONE_UNONE_QUALIFIERS \
480   (arm_binop_none_none_unone_qualifiers)
481 
482 static enum arm_type_qualifiers
483 arm_binop_unone_none_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
484   = { qualifier_unsigned, qualifier_none, qualifier_none };
485 #define BINOP_UNONE_NONE_NONE_QUALIFIERS \
486   (arm_binop_unone_none_none_qualifiers)
487 
488 static enum arm_type_qualifiers
489 arm_binop_unone_unone_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
490   = { qualifier_unsigned, qualifier_unsigned, qualifier_none };
491 #define BINOP_UNONE_UNONE_NONE_QUALIFIERS \
492   (arm_binop_unone_unone_none_qualifiers)
493 
494 static enum arm_type_qualifiers
495 arm_ternop_unone_unone_unone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
496   = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
497     qualifier_immediate };
498 #define TERNOP_UNONE_UNONE_UNONE_IMM_QUALIFIERS \
499   (arm_ternop_unone_unone_unone_imm_qualifiers)
500 
501 static enum arm_type_qualifiers
502 arm_ternop_unone_unone_none_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
503   = { qualifier_unsigned, qualifier_unsigned, qualifier_none, qualifier_none };
504 #define TERNOP_UNONE_UNONE_NONE_NONE_QUALIFIERS \
505   (arm_ternop_unone_unone_none_none_qualifiers)
506 
507 static enum arm_type_qualifiers
508 arm_ternop_unone_none_unone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
509   = { qualifier_unsigned, qualifier_none, qualifier_unsigned,
510       qualifier_immediate };
511 #define TERNOP_UNONE_NONE_UNONE_IMM_QUALIFIERS \
512   (arm_ternop_unone_none_unone_imm_qualifiers)
513 
514 static enum arm_type_qualifiers
515 arm_ternop_none_none_unone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
516   = { qualifier_none, qualifier_none, qualifier_unsigned, qualifier_immediate };
517 #define TERNOP_NONE_NONE_UNONE_IMM_QUALIFIERS \
518   (arm_ternop_none_none_unone_imm_qualifiers)
519 
520 static enum arm_type_qualifiers
521 arm_ternop_unone_unone_none_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
522   = { qualifier_unsigned, qualifier_unsigned, qualifier_none,
523     qualifier_immediate };
524 #define TERNOP_UNONE_UNONE_NONE_IMM_QUALIFIERS \
525   (arm_ternop_unone_unone_none_imm_qualifiers)
526 
527 static enum arm_type_qualifiers
528 arm_ternop_unone_unone_none_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
529   = { qualifier_unsigned, qualifier_unsigned, qualifier_none,
530       qualifier_unsigned };
531 #define TERNOP_UNONE_UNONE_NONE_UNONE_QUALIFIERS \
532   (arm_ternop_unone_unone_none_unone_qualifiers)
533 
534 static enum arm_type_qualifiers
535 arm_ternop_unone_unone_imm_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
536   = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate,
537     qualifier_unsigned };
538 #define TERNOP_UNONE_UNONE_IMM_UNONE_QUALIFIERS \
539   (arm_ternop_unone_unone_imm_unone_qualifiers)
540 
541 static enum arm_type_qualifiers
542 arm_ternop_unone_none_none_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
543   = { qualifier_unsigned, qualifier_none, qualifier_none, qualifier_unsigned };
544 #define TERNOP_UNONE_NONE_NONE_UNONE_QUALIFIERS \
545   (arm_ternop_unone_none_none_unone_qualifiers)
546 
547 static enum arm_type_qualifiers
548 arm_ternop_none_none_none_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
549   = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate };
550 #define TERNOP_NONE_NONE_NONE_IMM_QUALIFIERS \
551   (arm_ternop_none_none_none_imm_qualifiers)
552 
553 static enum arm_type_qualifiers
554 arm_ternop_none_none_none_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
555   = { qualifier_none, qualifier_none, qualifier_none, qualifier_unsigned };
556 #define TERNOP_NONE_NONE_NONE_UNONE_QUALIFIERS \
557   (arm_ternop_none_none_none_unone_qualifiers)
558 
559 static enum arm_type_qualifiers
560 arm_ternop_none_none_imm_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
561   = { qualifier_none, qualifier_none, qualifier_immediate, qualifier_unsigned };
562 #define TERNOP_NONE_NONE_IMM_UNONE_QUALIFIERS \
563   (arm_ternop_none_none_imm_unone_qualifiers)
564 
565 static enum arm_type_qualifiers
566 arm_ternop_none_none_unone_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
567   = { qualifier_none, qualifier_none, qualifier_unsigned, qualifier_unsigned };
568 #define TERNOP_NONE_NONE_UNONE_UNONE_QUALIFIERS \
569   (arm_ternop_none_none_unone_unone_qualifiers)
570 
571 static enum arm_type_qualifiers
572 arm_ternop_unone_unone_unone_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
573   = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
574     qualifier_unsigned };
575 #define TERNOP_UNONE_UNONE_UNONE_UNONE_QUALIFIERS \
576   (arm_ternop_unone_unone_unone_unone_qualifiers)
577 
578 static enum arm_type_qualifiers
579 arm_ternop_none_none_none_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
580   = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
581 #define TERNOP_NONE_NONE_NONE_NONE_QUALIFIERS \
582   (arm_ternop_none_none_none_none_qualifiers)
583 
584 static enum arm_type_qualifiers
585 arm_quadop_unone_unone_none_none_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
586   = { qualifier_unsigned, qualifier_unsigned, qualifier_none, qualifier_none,
587     qualifier_unsigned };
588 #define QUADOP_UNONE_UNONE_NONE_NONE_UNONE_QUALIFIERS \
589   (arm_quadop_unone_unone_none_none_unone_qualifiers)
590 
591 static enum arm_type_qualifiers
592 arm_quadop_none_none_none_none_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
593   = { qualifier_none, qualifier_none, qualifier_none, qualifier_none,
594     qualifier_unsigned };
595 #define QUADOP_NONE_NONE_NONE_NONE_UNONE_QUALIFIERS \
596   (arm_quadop_none_none_none_none_unone_qualifiers)
597 
598 static enum arm_type_qualifiers
599 arm_quadop_none_none_none_imm_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
600   = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate,
601     qualifier_unsigned };
602 #define QUADOP_NONE_NONE_NONE_IMM_UNONE_QUALIFIERS \
603   (arm_quadop_none_none_none_imm_unone_qualifiers)
604 
605 static enum arm_type_qualifiers
606 arm_quadop_unone_unone_unone_unone_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
607   = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
608     qualifier_unsigned, qualifier_unsigned };
609 #define QUADOP_UNONE_UNONE_UNONE_UNONE_UNONE_QUALIFIERS \
610   (arm_quadop_unone_unone_unone_unone_unone_qualifiers)
611 
612 static enum arm_type_qualifiers
613 arm_quadop_unone_unone_none_imm_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
614   = { qualifier_unsigned, qualifier_unsigned, qualifier_none,
615     qualifier_immediate, qualifier_unsigned };
616 #define QUADOP_UNONE_UNONE_NONE_IMM_UNONE_QUALIFIERS \
617   (arm_quadop_unone_unone_none_imm_unone_qualifiers)
618 
619 static enum arm_type_qualifiers
620 arm_quadop_none_none_unone_imm_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
621   = { qualifier_none, qualifier_none, qualifier_unsigned, qualifier_immediate,
622     qualifier_unsigned };
623 #define QUADOP_NONE_NONE_UNONE_IMM_UNONE_QUALIFIERS \
624   (arm_quadop_none_none_unone_imm_unone_qualifiers)
625 
626 static enum arm_type_qualifiers
627 arm_quadop_unone_unone_unone_imm_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
628   = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
629     qualifier_immediate, qualifier_unsigned };
630 #define QUADOP_UNONE_UNONE_UNONE_IMM_UNONE_QUALIFIERS \
631   (arm_quadop_unone_unone_unone_imm_unone_qualifiers)
632 
633 static enum arm_type_qualifiers
634 arm_quadop_unone_unone_unone_none_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
635   = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
636     qualifier_none, qualifier_unsigned };
637 #define QUADOP_UNONE_UNONE_UNONE_NONE_UNONE_QUALIFIERS \
638   (arm_quadop_unone_unone_unone_none_unone_qualifiers)
639 
640 static enum arm_type_qualifiers
641 arm_strs_qualifiers[SIMD_MAX_BUILTIN_ARGS]
642   = { qualifier_void, qualifier_pointer, qualifier_none };
643 #define STRS_QUALIFIERS (arm_strs_qualifiers)
644 
645 static enum arm_type_qualifiers
646 arm_stru_qualifiers[SIMD_MAX_BUILTIN_ARGS]
647   = { qualifier_void, qualifier_pointer, qualifier_unsigned };
648 #define STRU_QUALIFIERS (arm_stru_qualifiers)
649 
650 static enum arm_type_qualifiers
651 arm_strss_qualifiers[SIMD_MAX_BUILTIN_ARGS]
652   = { qualifier_void, qualifier_pointer, qualifier_unsigned,
653       qualifier_none};
654 #define STRSS_QUALIFIERS (arm_strss_qualifiers)
655 
656 static enum arm_type_qualifiers
657 arm_strsu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
658   = { qualifier_void, qualifier_pointer, qualifier_unsigned,
659       qualifier_unsigned};
660 #define STRSU_QUALIFIERS (arm_strsu_qualifiers)
661 
662 static enum arm_type_qualifiers
663 arm_strsbs_qualifiers[SIMD_MAX_BUILTIN_ARGS]
664   = { qualifier_void, qualifier_unsigned, qualifier_immediate, qualifier_none};
665 #define STRSBS_QUALIFIERS (arm_strsbs_qualifiers)
666 
667 static enum arm_type_qualifiers
668 arm_strsbu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
669   = { qualifier_void, qualifier_unsigned, qualifier_immediate,
670       qualifier_unsigned};
671 #define STRSBU_QUALIFIERS (arm_strsbu_qualifiers)
672 
673 static enum arm_type_qualifiers
674 arm_strs_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
675   = { qualifier_void, qualifier_pointer, qualifier_none, qualifier_unsigned};
676 #define STRS_P_QUALIFIERS (arm_strs_p_qualifiers)
677 
678 static enum arm_type_qualifiers
679 arm_stru_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
680   = { qualifier_void, qualifier_pointer, qualifier_unsigned,
681       qualifier_unsigned};
682 #define STRU_P_QUALIFIERS (arm_stru_p_qualifiers)
683 
684 static enum arm_type_qualifiers
685 arm_strsu_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
686   = { qualifier_void, qualifier_pointer, qualifier_unsigned,
687       qualifier_unsigned, qualifier_unsigned};
688 #define STRSU_P_QUALIFIERS (arm_strsu_p_qualifiers)
689 
690 static enum arm_type_qualifiers
691 arm_strss_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
692   = { qualifier_void, qualifier_pointer, qualifier_unsigned,
693       qualifier_none, qualifier_unsigned};
694 #define STRSS_P_QUALIFIERS (arm_strss_p_qualifiers)
695 
696 static enum arm_type_qualifiers
697 arm_strsbs_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
698   = { qualifier_void, qualifier_unsigned, qualifier_immediate,
699       qualifier_none, qualifier_unsigned};
700 #define STRSBS_P_QUALIFIERS (arm_strsbs_p_qualifiers)
701 
702 static enum arm_type_qualifiers
703 arm_strsbu_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
704   = { qualifier_void, qualifier_unsigned, qualifier_immediate,
705       qualifier_unsigned, qualifier_unsigned};
706 #define STRSBU_P_QUALIFIERS (arm_strsbu_p_qualifiers)
707 
708 static enum arm_type_qualifiers
709 arm_ldrgu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
710   = { qualifier_unsigned, qualifier_pointer, qualifier_unsigned};
711 #define LDRGU_QUALIFIERS (arm_ldrgu_qualifiers)
712 
713 static enum arm_type_qualifiers
714 arm_ldrgs_qualifiers[SIMD_MAX_BUILTIN_ARGS]
715   = { qualifier_none, qualifier_pointer, qualifier_unsigned};
716 #define LDRGS_QUALIFIERS (arm_ldrgs_qualifiers)
717 
718 static enum arm_type_qualifiers
719 arm_ldrs_qualifiers[SIMD_MAX_BUILTIN_ARGS]
720   = { qualifier_none, qualifier_pointer};
721 #define LDRS_QUALIFIERS (arm_ldrs_qualifiers)
722 
723 static enum arm_type_qualifiers
724 arm_ldru_qualifiers[SIMD_MAX_BUILTIN_ARGS]
725   = { qualifier_unsigned, qualifier_pointer};
726 #define LDRU_QUALIFIERS (arm_ldru_qualifiers)
727 
728 static enum arm_type_qualifiers
729 arm_ldrgbs_qualifiers[SIMD_MAX_BUILTIN_ARGS]
730   = { qualifier_none, qualifier_unsigned, qualifier_immediate};
731 #define LDRGBS_QUALIFIERS (arm_ldrgbs_qualifiers)
732 
733 static enum arm_type_qualifiers
734 arm_ldrgbu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
735   = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate};
736 #define LDRGBU_QUALIFIERS (arm_ldrgbu_qualifiers)
737 
738 static enum arm_type_qualifiers
739 arm_ldrgbs_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
740   = { qualifier_none, qualifier_unsigned, qualifier_immediate,
741       qualifier_unsigned};
742 #define LDRGBS_Z_QUALIFIERS (arm_ldrgbs_z_qualifiers)
743 
744 static enum arm_type_qualifiers
745 arm_ldrgbu_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
746   = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate,
747       qualifier_unsigned};
748 #define LDRGBU_Z_QUALIFIERS (arm_ldrgbu_z_qualifiers)
749 
750 static enum arm_type_qualifiers
751 arm_ldrgs_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
752   = { qualifier_none, qualifier_pointer, qualifier_unsigned,
753       qualifier_unsigned};
754 #define LDRGS_Z_QUALIFIERS (arm_ldrgs_z_qualifiers)
755 
756 static enum arm_type_qualifiers
757 arm_ldrgu_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
758   = { qualifier_unsigned, qualifier_pointer, qualifier_unsigned,
759       qualifier_unsigned};
760 #define LDRGU_Z_QUALIFIERS (arm_ldrgu_z_qualifiers)
761 
762 static enum arm_type_qualifiers
763 arm_ldrs_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
764   = { qualifier_none, qualifier_pointer, qualifier_unsigned};
765 #define LDRS_Z_QUALIFIERS (arm_ldrs_z_qualifiers)
766 
767 static enum arm_type_qualifiers
768 arm_ldru_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
769   = { qualifier_unsigned, qualifier_pointer, qualifier_unsigned};
770 #define LDRU_Z_QUALIFIERS (arm_ldru_z_qualifiers)
771 
772 static enum arm_type_qualifiers
773 arm_quinop_unone_unone_unone_unone_imm_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
774   = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
775       qualifier_unsigned, qualifier_immediate, qualifier_unsigned };
776 #define QUINOP_UNONE_UNONE_UNONE_UNONE_IMM_UNONE_QUALIFIERS \
777   (arm_quinop_unone_unone_unone_unone_imm_unone_qualifiers)
778 
779 static enum arm_type_qualifiers
780 arm_ldrgbwbxu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
781   = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate};
782 #define LDRGBWBXU_QUALIFIERS (arm_ldrgbwbxu_qualifiers)
783 
784 static enum arm_type_qualifiers
785 arm_ldrgbwbxu_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
786   = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate,
787       qualifier_unsigned};
788 #define LDRGBWBXU_Z_QUALIFIERS (arm_ldrgbwbxu_z_qualifiers)
789 
790 static enum arm_type_qualifiers
791 arm_ldrgbwbs_qualifiers[SIMD_MAX_BUILTIN_ARGS]
792   = { qualifier_none, qualifier_unsigned, qualifier_immediate};
793 #define LDRGBWBS_QUALIFIERS (arm_ldrgbwbs_qualifiers)
794 
795 static enum arm_type_qualifiers
796 arm_ldrgbwbu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
797   = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate};
798 #define LDRGBWBU_QUALIFIERS (arm_ldrgbwbu_qualifiers)
799 
800 static enum arm_type_qualifiers
801 arm_ldrgbwbs_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
802   = { qualifier_none, qualifier_unsigned, qualifier_immediate,
803       qualifier_unsigned};
804 #define LDRGBWBS_Z_QUALIFIERS (arm_ldrgbwbs_z_qualifiers)
805 
806 static enum arm_type_qualifiers
807 arm_ldrgbwbu_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
808   = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate,
809       qualifier_unsigned};
810 #define LDRGBWBU_Z_QUALIFIERS (arm_ldrgbwbu_z_qualifiers)
811 
812 static enum arm_type_qualifiers
813 arm_strsbwbs_qualifiers[SIMD_MAX_BUILTIN_ARGS]
814   = { qualifier_unsigned, qualifier_unsigned, qualifier_const, qualifier_none};
815 #define STRSBWBS_QUALIFIERS (arm_strsbwbs_qualifiers)
816 
817 static enum arm_type_qualifiers
818 arm_strsbwbu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
819   = { qualifier_unsigned, qualifier_unsigned, qualifier_const, qualifier_unsigned};
820 #define STRSBWBU_QUALIFIERS (arm_strsbwbu_qualifiers)
821 
822 static enum arm_type_qualifiers
823 arm_strsbwbs_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
824   = { qualifier_unsigned, qualifier_unsigned, qualifier_const,
825       qualifier_none, qualifier_unsigned};
826 #define STRSBWBS_P_QUALIFIERS (arm_strsbwbs_p_qualifiers)
827 
828 static enum arm_type_qualifiers
829 arm_strsbwbu_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
830   = { qualifier_unsigned, qualifier_unsigned, qualifier_const,
831       qualifier_unsigned, qualifier_unsigned};
832 #define STRSBWBU_P_QUALIFIERS (arm_strsbwbu_p_qualifiers)
833 
834 static enum arm_type_qualifiers
835 arm_lsll_qualifiers[SIMD_MAX_BUILTIN_ARGS]
836   = { qualifier_unsigned, qualifier_unsigned, qualifier_none};
837 #define LSLL_QUALIFIERS (arm_lsll_qualifiers)
838 
839 static enum arm_type_qualifiers
840 arm_uqshl_qualifiers[SIMD_MAX_BUILTIN_ARGS]
841   = { qualifier_unsigned, qualifier_unsigned, qualifier_const};
842 #define UQSHL_QUALIFIERS (arm_uqshl_qualifiers)
843 
844 static enum arm_type_qualifiers
845 arm_asrl_qualifiers[SIMD_MAX_BUILTIN_ARGS]
846   = { qualifier_none, qualifier_none, qualifier_none};
847 #define ASRL_QUALIFIERS (arm_asrl_qualifiers)
848 
849 static enum arm_type_qualifiers
850 arm_sqshl_qualifiers[SIMD_MAX_BUILTIN_ARGS]
851   = { qualifier_unsigned, qualifier_unsigned, qualifier_const};
852 #define SQSHL_QUALIFIERS (arm_sqshl_qualifiers)
853 
854 /* End of Qualifier for MVE builtins.  */
855 
856    /* void ([T element type] *, T, immediate).  */
857 static enum arm_type_qualifiers
858 arm_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
859   = { qualifier_void, qualifier_pointer_map_mode,
860       qualifier_none, qualifier_struct_load_store_lane_index };
861 #define STORE1LANE_QUALIFIERS (arm_storestruct_lane_qualifiers)
862 
863    /* int (void).  */
864 static enum arm_type_qualifiers
865 arm_sat_occurred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
866   = { qualifier_none, qualifier_void };
867 #define SAT_OCCURRED_QUALIFIERS (arm_sat_occurred_qualifiers)
868 
869    /* void (int).  */
870 static enum arm_type_qualifiers
871 arm_set_sat_qualifiers[SIMD_MAX_BUILTIN_ARGS]
872   = { qualifier_void, qualifier_none };
873 #define SET_SAT_QUALIFIERS (arm_set_sat_qualifiers)
874 
875 #define v8qi_UP  E_V8QImode
876 #define v4hi_UP  E_V4HImode
877 #define v4hf_UP  E_V4HFmode
878 #define v4bf_UP  E_V4BFmode
879 #define v2si_UP  E_V2SImode
880 #define v2sf_UP  E_V2SFmode
881 #define v2bf_UP  E_V2BFmode
882 #define di_UP    E_DImode
883 #define v16qi_UP E_V16QImode
884 #define v8hi_UP  E_V8HImode
885 #define v8hf_UP  E_V8HFmode
886 #define v8bf_UP  E_V8BFmode
887 #define v4si_UP  E_V4SImode
888 #define v4sf_UP  E_V4SFmode
889 #define v2di_UP  E_V2DImode
890 #define ti_UP	 E_TImode
891 #define ei_UP	 E_EImode
892 #define oi_UP	 E_OImode
893 #define hf_UP	 E_HFmode
894 #define bf_UP    E_BFmode
895 #define si_UP	 E_SImode
896 #define hi_UP    E_HImode
897 #define void_UP	 E_VOIDmode
898 #define sf_UP	 E_SFmode
899 #define UP(X) X##_UP
900 
901 typedef struct {
902   const char *name;
903   machine_mode mode;
904   const enum insn_code code;
905   unsigned int fcode;
906   enum arm_type_qualifiers *qualifiers;
907 } arm_builtin_datum;
908 
909 #define CF(N,X) CODE_FOR_neon_##N##X
910 
911 #define VAR1(T, N, A) \
912   {#N #A, UP (A), CF (N, A), 0, T##_QUALIFIERS},
913 #define VAR2(T, N, A, B) \
914   VAR1 (T, N, A) \
915   VAR1 (T, N, B)
916 #define VAR3(T, N, A, B, C) \
917   VAR2 (T, N, A, B) \
918   VAR1 (T, N, C)
919 #define VAR4(T, N, A, B, C, D) \
920   VAR3 (T, N, A, B, C) \
921   VAR1 (T, N, D)
922 #define VAR5(T, N, A, B, C, D, E) \
923   VAR4 (T, N, A, B, C, D) \
924   VAR1 (T, N, E)
925 #define VAR6(T, N, A, B, C, D, E, F) \
926   VAR5 (T, N, A, B, C, D, E) \
927   VAR1 (T, N, F)
928 #define VAR7(T, N, A, B, C, D, E, F, G) \
929   VAR6 (T, N, A, B, C, D, E, F) \
930   VAR1 (T, N, G)
931 #define VAR8(T, N, A, B, C, D, E, F, G, H) \
932   VAR7 (T, N, A, B, C, D, E, F, G) \
933   VAR1 (T, N, H)
934 #define VAR9(T, N, A, B, C, D, E, F, G, H, I) \
935   VAR8 (T, N, A, B, C, D, E, F, G, H) \
936   VAR1 (T, N, I)
937 #define VAR10(T, N, A, B, C, D, E, F, G, H, I, J) \
938   VAR9 (T, N, A, B, C, D, E, F, G, H, I) \
939   VAR1 (T, N, J)
940 #define VAR11(T, N, A, B, C, D, E, F, G, H, I, J, K) \
941   VAR10 (T, N, A, B, C, D, E, F, G, H, I, J) \
942   VAR1 (T, N, K)
943 #define VAR12(T, N, A, B, C, D, E, F, G, H, I, J, K, L) \
944   VAR11 (T, N, A, B, C, D, E, F, G, H, I, J, K) \
945   VAR1 (T, N, L)
946 #define VAR13(T, N, A, B, C, D, E, F, G, H, I, J, K, L, M) \
947   VAR12 (T, N, A, B, C, D, E, F, G, H, I, J, K, L) \
948   VAR1 (T, N, M)
949 #define VAR14(T, N, A, B, C, D, E, F, G, H, I, J, K, L, M, O) \
950   VAR13 (T, N, A, B, C, D, E, F, G, H, I, J, K, L, M) \
951   VAR1 (T, N, O)
952 
953 /* The builtin data can be found in arm_neon_builtins.def, arm_vfp_builtins.def
954    and arm_acle_builtins.def.  The entries in arm_neon_builtins.def require
955    TARGET_NEON to be true.  The feature tests are checked when the builtins are
956    expanded.
957 
958    The mode entries in the following table correspond to the "key" type of the
959    instruction variant, i.e. equivalent to that which would be specified after
960    the assembler mnemonic for neon instructions, which usually refers to the
961    last vector operand.  The modes listed per instruction should be the same as
962    those defined for that instruction's pattern, for instance in neon.md.  */
963 
964 static arm_builtin_datum vfp_builtin_data[] =
965 {
966 #include "arm_vfp_builtins.def"
967 };
968 
969 static arm_builtin_datum neon_builtin_data[] =
970 {
971 #include "arm_neon_builtins.def"
972 };
973 
974 #undef CF
975 #define CF(N,X) CODE_FOR_mve_##N##X
976 static arm_builtin_datum mve_builtin_data[] =
977 {
978 #include "arm_mve_builtins.def"
979 };
980 
981 #undef CF
982 #undef VAR1
983 #define VAR1(T, N, A) \
984   {#N, UP (A), CODE_FOR_arm_##N, 0, T##_QUALIFIERS},
985 
986 static arm_builtin_datum acle_builtin_data[] =
987 {
988 #include "arm_acle_builtins.def"
989 };
990 
991 #undef VAR1
992 /* IMM_MAX sets the maximum valid value of the CDE immediate operand.
993    ECF_FLAG sets the flag used for set_call_expr_flags.  */
994 #define VAR1(T, N, A, IMM_MAX, ECF_FLAG) \
995   {{#N #A, UP (A), CODE_FOR_arm_##N##A, 0, T##_QUALIFIERS}, IMM_MAX, ECF_FLAG},
996 
997 typedef struct {
998   arm_builtin_datum base;
999   unsigned int imm_max;
1000   int ecf_flag;
1001 } arm_builtin_cde_datum;
1002 
1003 static arm_builtin_cde_datum cde_builtin_data[] =
1004 {
1005 #include "arm_cde_builtins.def"
1006 };
1007 
1008 #undef VAR1
1009 #define VAR1(T, N, X) \
1010   ARM_BUILTIN_NEON_##N##X,
1011 
1012 enum arm_builtins
1013 {
1014   ARM_BUILTIN_GETWCGR0,
1015   ARM_BUILTIN_GETWCGR1,
1016   ARM_BUILTIN_GETWCGR2,
1017   ARM_BUILTIN_GETWCGR3,
1018 
1019   ARM_BUILTIN_SETWCGR0,
1020   ARM_BUILTIN_SETWCGR1,
1021   ARM_BUILTIN_SETWCGR2,
1022   ARM_BUILTIN_SETWCGR3,
1023 
1024   ARM_BUILTIN_WZERO,
1025 
1026   ARM_BUILTIN_WAVG2BR,
1027   ARM_BUILTIN_WAVG2HR,
1028   ARM_BUILTIN_WAVG2B,
1029   ARM_BUILTIN_WAVG2H,
1030 
1031   ARM_BUILTIN_WACCB,
1032   ARM_BUILTIN_WACCH,
1033   ARM_BUILTIN_WACCW,
1034 
1035   ARM_BUILTIN_WMACS,
1036   ARM_BUILTIN_WMACSZ,
1037   ARM_BUILTIN_WMACU,
1038   ARM_BUILTIN_WMACUZ,
1039 
1040   ARM_BUILTIN_WSADB,
1041   ARM_BUILTIN_WSADBZ,
1042   ARM_BUILTIN_WSADH,
1043   ARM_BUILTIN_WSADHZ,
1044 
1045   ARM_BUILTIN_WALIGNI,
1046   ARM_BUILTIN_WALIGNR0,
1047   ARM_BUILTIN_WALIGNR1,
1048   ARM_BUILTIN_WALIGNR2,
1049   ARM_BUILTIN_WALIGNR3,
1050 
1051   ARM_BUILTIN_TMIA,
1052   ARM_BUILTIN_TMIAPH,
1053   ARM_BUILTIN_TMIABB,
1054   ARM_BUILTIN_TMIABT,
1055   ARM_BUILTIN_TMIATB,
1056   ARM_BUILTIN_TMIATT,
1057 
1058   ARM_BUILTIN_TMOVMSKB,
1059   ARM_BUILTIN_TMOVMSKH,
1060   ARM_BUILTIN_TMOVMSKW,
1061 
1062   ARM_BUILTIN_TBCSTB,
1063   ARM_BUILTIN_TBCSTH,
1064   ARM_BUILTIN_TBCSTW,
1065 
1066   ARM_BUILTIN_WMADDS,
1067   ARM_BUILTIN_WMADDU,
1068 
1069   ARM_BUILTIN_WPACKHSS,
1070   ARM_BUILTIN_WPACKWSS,
1071   ARM_BUILTIN_WPACKDSS,
1072   ARM_BUILTIN_WPACKHUS,
1073   ARM_BUILTIN_WPACKWUS,
1074   ARM_BUILTIN_WPACKDUS,
1075 
1076   ARM_BUILTIN_WADDB,
1077   ARM_BUILTIN_WADDH,
1078   ARM_BUILTIN_WADDW,
1079   ARM_BUILTIN_WADDSSB,
1080   ARM_BUILTIN_WADDSSH,
1081   ARM_BUILTIN_WADDSSW,
1082   ARM_BUILTIN_WADDUSB,
1083   ARM_BUILTIN_WADDUSH,
1084   ARM_BUILTIN_WADDUSW,
1085   ARM_BUILTIN_WSUBB,
1086   ARM_BUILTIN_WSUBH,
1087   ARM_BUILTIN_WSUBW,
1088   ARM_BUILTIN_WSUBSSB,
1089   ARM_BUILTIN_WSUBSSH,
1090   ARM_BUILTIN_WSUBSSW,
1091   ARM_BUILTIN_WSUBUSB,
1092   ARM_BUILTIN_WSUBUSH,
1093   ARM_BUILTIN_WSUBUSW,
1094 
1095   ARM_BUILTIN_WAND,
1096   ARM_BUILTIN_WANDN,
1097   ARM_BUILTIN_WOR,
1098   ARM_BUILTIN_WXOR,
1099 
1100   ARM_BUILTIN_WCMPEQB,
1101   ARM_BUILTIN_WCMPEQH,
1102   ARM_BUILTIN_WCMPEQW,
1103   ARM_BUILTIN_WCMPGTUB,
1104   ARM_BUILTIN_WCMPGTUH,
1105   ARM_BUILTIN_WCMPGTUW,
1106   ARM_BUILTIN_WCMPGTSB,
1107   ARM_BUILTIN_WCMPGTSH,
1108   ARM_BUILTIN_WCMPGTSW,
1109 
1110   ARM_BUILTIN_TEXTRMSB,
1111   ARM_BUILTIN_TEXTRMSH,
1112   ARM_BUILTIN_TEXTRMSW,
1113   ARM_BUILTIN_TEXTRMUB,
1114   ARM_BUILTIN_TEXTRMUH,
1115   ARM_BUILTIN_TEXTRMUW,
1116   ARM_BUILTIN_TINSRB,
1117   ARM_BUILTIN_TINSRH,
1118   ARM_BUILTIN_TINSRW,
1119 
1120   ARM_BUILTIN_WMAXSW,
1121   ARM_BUILTIN_WMAXSH,
1122   ARM_BUILTIN_WMAXSB,
1123   ARM_BUILTIN_WMAXUW,
1124   ARM_BUILTIN_WMAXUH,
1125   ARM_BUILTIN_WMAXUB,
1126   ARM_BUILTIN_WMINSW,
1127   ARM_BUILTIN_WMINSH,
1128   ARM_BUILTIN_WMINSB,
1129   ARM_BUILTIN_WMINUW,
1130   ARM_BUILTIN_WMINUH,
1131   ARM_BUILTIN_WMINUB,
1132 
1133   ARM_BUILTIN_WMULUM,
1134   ARM_BUILTIN_WMULSM,
1135   ARM_BUILTIN_WMULUL,
1136 
1137   ARM_BUILTIN_PSADBH,
1138   ARM_BUILTIN_WSHUFH,
1139 
1140   ARM_BUILTIN_WSLLH,
1141   ARM_BUILTIN_WSLLW,
1142   ARM_BUILTIN_WSLLD,
1143   ARM_BUILTIN_WSRAH,
1144   ARM_BUILTIN_WSRAW,
1145   ARM_BUILTIN_WSRAD,
1146   ARM_BUILTIN_WSRLH,
1147   ARM_BUILTIN_WSRLW,
1148   ARM_BUILTIN_WSRLD,
1149   ARM_BUILTIN_WRORH,
1150   ARM_BUILTIN_WRORW,
1151   ARM_BUILTIN_WRORD,
1152   ARM_BUILTIN_WSLLHI,
1153   ARM_BUILTIN_WSLLWI,
1154   ARM_BUILTIN_WSLLDI,
1155   ARM_BUILTIN_WSRAHI,
1156   ARM_BUILTIN_WSRAWI,
1157   ARM_BUILTIN_WSRADI,
1158   ARM_BUILTIN_WSRLHI,
1159   ARM_BUILTIN_WSRLWI,
1160   ARM_BUILTIN_WSRLDI,
1161   ARM_BUILTIN_WRORHI,
1162   ARM_BUILTIN_WRORWI,
1163   ARM_BUILTIN_WRORDI,
1164 
1165   ARM_BUILTIN_WUNPCKIHB,
1166   ARM_BUILTIN_WUNPCKIHH,
1167   ARM_BUILTIN_WUNPCKIHW,
1168   ARM_BUILTIN_WUNPCKILB,
1169   ARM_BUILTIN_WUNPCKILH,
1170   ARM_BUILTIN_WUNPCKILW,
1171 
1172   ARM_BUILTIN_WUNPCKEHSB,
1173   ARM_BUILTIN_WUNPCKEHSH,
1174   ARM_BUILTIN_WUNPCKEHSW,
1175   ARM_BUILTIN_WUNPCKEHUB,
1176   ARM_BUILTIN_WUNPCKEHUH,
1177   ARM_BUILTIN_WUNPCKEHUW,
1178   ARM_BUILTIN_WUNPCKELSB,
1179   ARM_BUILTIN_WUNPCKELSH,
1180   ARM_BUILTIN_WUNPCKELSW,
1181   ARM_BUILTIN_WUNPCKELUB,
1182   ARM_BUILTIN_WUNPCKELUH,
1183   ARM_BUILTIN_WUNPCKELUW,
1184 
1185   ARM_BUILTIN_WABSB,
1186   ARM_BUILTIN_WABSH,
1187   ARM_BUILTIN_WABSW,
1188 
1189   ARM_BUILTIN_WADDSUBHX,
1190   ARM_BUILTIN_WSUBADDHX,
1191 
1192   ARM_BUILTIN_WABSDIFFB,
1193   ARM_BUILTIN_WABSDIFFH,
1194   ARM_BUILTIN_WABSDIFFW,
1195 
1196   ARM_BUILTIN_WADDCH,
1197   ARM_BUILTIN_WADDCW,
1198 
1199   ARM_BUILTIN_WAVG4,
1200   ARM_BUILTIN_WAVG4R,
1201 
1202   ARM_BUILTIN_WMADDSX,
1203   ARM_BUILTIN_WMADDUX,
1204 
1205   ARM_BUILTIN_WMADDSN,
1206   ARM_BUILTIN_WMADDUN,
1207 
1208   ARM_BUILTIN_WMULWSM,
1209   ARM_BUILTIN_WMULWUM,
1210 
1211   ARM_BUILTIN_WMULWSMR,
1212   ARM_BUILTIN_WMULWUMR,
1213 
1214   ARM_BUILTIN_WMULWL,
1215 
1216   ARM_BUILTIN_WMULSMR,
1217   ARM_BUILTIN_WMULUMR,
1218 
1219   ARM_BUILTIN_WQMULM,
1220   ARM_BUILTIN_WQMULMR,
1221 
1222   ARM_BUILTIN_WQMULWM,
1223   ARM_BUILTIN_WQMULWMR,
1224 
1225   ARM_BUILTIN_WADDBHUSM,
1226   ARM_BUILTIN_WADDBHUSL,
1227 
1228   ARM_BUILTIN_WQMIABB,
1229   ARM_BUILTIN_WQMIABT,
1230   ARM_BUILTIN_WQMIATB,
1231   ARM_BUILTIN_WQMIATT,
1232 
1233   ARM_BUILTIN_WQMIABBN,
1234   ARM_BUILTIN_WQMIABTN,
1235   ARM_BUILTIN_WQMIATBN,
1236   ARM_BUILTIN_WQMIATTN,
1237 
1238   ARM_BUILTIN_WMIABB,
1239   ARM_BUILTIN_WMIABT,
1240   ARM_BUILTIN_WMIATB,
1241   ARM_BUILTIN_WMIATT,
1242 
1243   ARM_BUILTIN_WMIABBN,
1244   ARM_BUILTIN_WMIABTN,
1245   ARM_BUILTIN_WMIATBN,
1246   ARM_BUILTIN_WMIATTN,
1247 
1248   ARM_BUILTIN_WMIAWBB,
1249   ARM_BUILTIN_WMIAWBT,
1250   ARM_BUILTIN_WMIAWTB,
1251   ARM_BUILTIN_WMIAWTT,
1252 
1253   ARM_BUILTIN_WMIAWBBN,
1254   ARM_BUILTIN_WMIAWBTN,
1255   ARM_BUILTIN_WMIAWTBN,
1256   ARM_BUILTIN_WMIAWTTN,
1257 
1258   ARM_BUILTIN_WMERGE,
1259 
1260   ARM_BUILTIN_GET_FPSCR,
1261   ARM_BUILTIN_SET_FPSCR,
1262   ARM_BUILTIN_GET_FPSCR_NZCVQC,
1263   ARM_BUILTIN_SET_FPSCR_NZCVQC,
1264 
1265   ARM_BUILTIN_CMSE_NONSECURE_CALLER,
1266   ARM_BUILTIN_SIMD_LANE_CHECK,
1267 
1268 #undef CRYPTO1
1269 #undef CRYPTO2
1270 #undef CRYPTO3
1271 
1272 #define CRYPTO1(L, U, M1, M2) \
1273   ARM_BUILTIN_CRYPTO_##U,
1274 #define CRYPTO2(L, U, M1, M2, M3) \
1275   ARM_BUILTIN_CRYPTO_##U,
1276 #define CRYPTO3(L, U, M1, M2, M3, M4) \
1277   ARM_BUILTIN_CRYPTO_##U,
1278 
1279   ARM_BUILTIN_CRYPTO_BASE,
1280 
1281 #include "crypto.def"
1282 
1283 #undef CRYPTO1
1284 #undef CRYPTO2
1285 #undef CRYPTO3
1286 
1287   ARM_BUILTIN_VFP_BASE,
1288 
1289 #include "arm_vfp_builtins.def"
1290 
1291   ARM_BUILTIN_NEON_BASE,
1292 
1293 #include "arm_neon_builtins.def"
1294 
1295 #undef VAR1
1296 #define VAR1(T, N, X) \
1297   ARM_BUILTIN_##N,
1298 
1299   ARM_BUILTIN_ACLE_BASE,
1300   ARM_BUILTIN_SAT_IMM_CHECK = ARM_BUILTIN_ACLE_BASE,
1301 
1302 #include "arm_acle_builtins.def"
1303 
1304 #undef VAR1
1305 #define VAR1(T, N, X, ... ) \
1306   ARM_BUILTIN_##N##X,
1307 
1308   ARM_BUILTIN_CDE_BASE,
1309 
1310 #include "arm_cde_builtins.def"
1311 
1312   ARM_BUILTIN_MVE_BASE,
1313 
1314 #undef VAR1
1315 #define VAR1(T, N, X) \
1316   ARM_BUILTIN_MVE_##N##X,
1317 #include "arm_mve_builtins.def"
1318 
1319   ARM_BUILTIN_MAX
1320 };
1321 
1322 #define ARM_BUILTIN_VFP_PATTERN_START \
1323   (ARM_BUILTIN_VFP_BASE + 1)
1324 
1325 #define ARM_BUILTIN_NEON_PATTERN_START \
1326   (ARM_BUILTIN_NEON_BASE + 1)
1327 
1328 #define ARM_BUILTIN_MVE_PATTERN_START \
1329   (ARM_BUILTIN_MVE_BASE + 1)
1330 
1331 #define ARM_BUILTIN_ACLE_PATTERN_START \
1332   (ARM_BUILTIN_ACLE_BASE + 1)
1333 
1334 #define ARM_BUILTIN_CDE_PATTERN_START \
1335   (ARM_BUILTIN_CDE_BASE + 1)
1336 
1337 #define ARM_BUILTIN_CDE_PATTERN_END \
1338   (ARM_BUILTIN_CDE_BASE + ARRAY_SIZE (cde_builtin_data))
1339 
1340 #undef CF
1341 #undef VAR1
1342 #undef VAR2
1343 #undef VAR3
1344 #undef VAR4
1345 #undef VAR5
1346 #undef VAR6
1347 #undef VAR7
1348 #undef VAR8
1349 #undef VAR9
1350 #undef VAR10
1351 
1352 static GTY(()) tree arm_builtin_decls[ARM_BUILTIN_MAX];
1353 
1354 #define NUM_DREG_TYPES 5
1355 #define NUM_QREG_TYPES 6
1356 
1357 /* Internal scalar builtin types.  These types are used to support
1358    neon intrinsic builtins.  They are _not_ user-visible types.  Therefore
1359    the mangling for these types are implementation defined.  */
1360 const char *arm_scalar_builtin_types[] = {
1361   "__builtin_neon_qi",
1362   "__builtin_neon_hi",
1363   "__builtin_neon_si",
1364   "__builtin_neon_sf",
1365   "__builtin_neon_di",
1366   "__builtin_neon_df",
1367   "__builtin_neon_ti",
1368   "__builtin_neon_uqi",
1369   "__builtin_neon_uhi",
1370   "__builtin_neon_usi",
1371   "__builtin_neon_udi",
1372   "__builtin_neon_ei",
1373   "__builtin_neon_oi",
1374   "__builtin_neon_ci",
1375   "__builtin_neon_xi",
1376   "__builtin_neon_bf",
1377   NULL
1378 };
1379 
1380 #define ENTRY(E, M, Q, S, T, G) E,
1381 enum arm_simd_type
1382 {
1383 #include "arm-simd-builtin-types.def"
1384   __TYPE_FINAL
1385 };
1386 #undef ENTRY
1387 
1388 struct arm_simd_type_info
1389 {
1390   enum arm_simd_type type;
1391 
1392   /* Internal type name.  */
1393   const char *name;
1394 
1395   /* Internal type name(mangled).  The mangled names conform to the
1396      AAPCS (see "Procedure Call Standard for the ARM Architecture",
1397      Appendix A).  To qualify for emission with the mangled names defined in
1398      that document, a vector type must not only be of the correct mode but also
1399      be of the correct internal Neon vector type (e.g. __simd64_int8_t);
1400      these types are registered by arm_init_simd_builtin_types ().  In other
1401      words, vector types defined in other ways e.g. via vector_size attribute
1402      will get default mangled names.  */
1403   const char *mangle;
1404 
1405   /* Internal type.  */
1406   tree itype;
1407 
1408   /* Element type.  */
1409   tree eltype;
1410 
1411   /* Machine mode the internal type maps to.  */
1412   machine_mode mode;
1413 
1414   /* Qualifiers.  */
1415   enum arm_type_qualifiers q;
1416 };
1417 
1418 #define ENTRY(E, M, Q, S, T, G)		\
1419   {E,					\
1420    "__simd" #S "_" #T "_t",		\
1421    #G "__simd" #S "_" #T "_t",		\
1422    NULL_TREE, NULL_TREE, M##mode, qualifier_##Q},
1423 static struct arm_simd_type_info arm_simd_types [] = {
1424 #include "arm-simd-builtin-types.def"
1425 };
1426 #undef ENTRY
1427 
1428 /* The user-visible __fp16 type.  */
1429 tree arm_fp16_type_node = NULL_TREE;
1430 
1431 /* Back-end node type for brain float (bfloat) types.  */
1432 tree arm_bf16_type_node = NULL_TREE;
1433 tree arm_bf16_ptr_type_node = NULL_TREE;
1434 
1435 static tree arm_simd_intOI_type_node = NULL_TREE;
1436 static tree arm_simd_intEI_type_node = NULL_TREE;
1437 static tree arm_simd_intCI_type_node = NULL_TREE;
1438 static tree arm_simd_intXI_type_node = NULL_TREE;
1439 static tree arm_simd_polyQI_type_node = NULL_TREE;
1440 static tree arm_simd_polyHI_type_node = NULL_TREE;
1441 static tree arm_simd_polyDI_type_node = NULL_TREE;
1442 static tree arm_simd_polyTI_type_node = NULL_TREE;
1443 
1444 static const char *
arm_mangle_builtin_scalar_type(const_tree type)1445 arm_mangle_builtin_scalar_type (const_tree type)
1446 {
1447   int i = 0;
1448 
1449   while (arm_scalar_builtin_types[i] != NULL)
1450     {
1451       const char *name = arm_scalar_builtin_types[i];
1452 
1453       if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1454 	  && DECL_NAME (TYPE_NAME (type))
1455 	  && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))), name))
1456 	return arm_scalar_builtin_types[i];
1457       i++;
1458     }
1459   return NULL;
1460 }
1461 
1462 static const char *
arm_mangle_builtin_vector_type(const_tree type)1463 arm_mangle_builtin_vector_type (const_tree type)
1464 {
1465   int i;
1466   int nelts = sizeof (arm_simd_types) / sizeof (arm_simd_types[0]);
1467 
1468   for (i = 0; i < nelts; i++)
1469     if (arm_simd_types[i].mode ==  TYPE_MODE (type)
1470 	&& TYPE_NAME (type)
1471 	&& TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1472 	&& DECL_NAME (TYPE_NAME (type))
1473 	&& !strcmp
1474 	     (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
1475 	      arm_simd_types[i].name))
1476       return arm_simd_types[i].mangle;
1477 
1478   return NULL;
1479 }
1480 
1481 const char *
arm_mangle_builtin_type(const_tree type)1482 arm_mangle_builtin_type (const_tree type)
1483 {
1484   const char *mangle;
1485   /* Walk through all the Arm builtins types tables to filter out the
1486      incoming type.  */
1487   if ((mangle = arm_mangle_builtin_vector_type (type))
1488       || (mangle = arm_mangle_builtin_scalar_type (type)))
1489     return mangle;
1490 
1491   return NULL;
1492 }
1493 
1494 static tree
arm_simd_builtin_std_type(machine_mode mode,enum arm_type_qualifiers q)1495 arm_simd_builtin_std_type (machine_mode mode,
1496 			   enum arm_type_qualifiers q)
1497 {
1498 #define QUAL_TYPE(M)  \
1499   ((q == qualifier_none) ? int##M##_type_node : unsigned_int##M##_type_node);
1500   switch (mode)
1501     {
1502     case E_QImode:
1503       return QUAL_TYPE (QI);
1504     case E_HImode:
1505       return QUAL_TYPE (HI);
1506     case E_SImode:
1507       return QUAL_TYPE (SI);
1508     case E_DImode:
1509       return QUAL_TYPE (DI);
1510     case E_TImode:
1511       return QUAL_TYPE (TI);
1512     case E_OImode:
1513       return arm_simd_intOI_type_node;
1514     case E_EImode:
1515       return arm_simd_intEI_type_node;
1516     case E_CImode:
1517       return arm_simd_intCI_type_node;
1518     case E_XImode:
1519       return arm_simd_intXI_type_node;
1520     case E_HFmode:
1521       return arm_fp16_type_node;
1522     case E_SFmode:
1523       return float_type_node;
1524     case E_DFmode:
1525       return double_type_node;
1526     case E_BFmode:
1527       return arm_bf16_type_node;
1528     default:
1529       gcc_unreachable ();
1530     }
1531 #undef QUAL_TYPE
1532 }
1533 
1534 static tree
arm_lookup_simd_builtin_type(machine_mode mode,enum arm_type_qualifiers q)1535 arm_lookup_simd_builtin_type (machine_mode mode,
1536 			      enum arm_type_qualifiers q)
1537 {
1538   int i;
1539   int nelts = sizeof (arm_simd_types) / sizeof (arm_simd_types[0]);
1540 
1541   /* Non-poly scalar modes map to standard types not in the table.  */
1542   if (q != qualifier_poly && !VECTOR_MODE_P (mode))
1543     return arm_simd_builtin_std_type (mode, q);
1544 
1545   for (i = 0; i < nelts; i++)
1546     if (arm_simd_types[i].mode == mode
1547 	&& arm_simd_types[i].q == q)
1548       return arm_simd_types[i].itype;
1549 
1550   /* Note that we won't have caught the underlying type for poly64x2_t
1551      in the above table.  This gets default mangling.  */
1552 
1553   return NULL_TREE;
1554 }
1555 
1556 static tree
arm_simd_builtin_type(machine_mode mode,bool unsigned_p,bool poly_p)1557 arm_simd_builtin_type (machine_mode mode, bool unsigned_p, bool poly_p)
1558 {
1559   if (poly_p)
1560     return arm_lookup_simd_builtin_type (mode, qualifier_poly);
1561   else if (unsigned_p)
1562     return arm_lookup_simd_builtin_type (mode, qualifier_unsigned);
1563   else
1564     return arm_lookup_simd_builtin_type (mode, qualifier_none);
1565 }
1566 
1567 static void
arm_init_simd_builtin_types(void)1568 arm_init_simd_builtin_types (void)
1569 {
1570   int i;
1571   int nelts = sizeof (arm_simd_types) / sizeof (arm_simd_types[0]);
1572   tree tdecl;
1573 
1574   /* Poly types are a world of their own.  In order to maintain legacy
1575      ABI, they get initialized using the old interface, and don't get
1576      an entry in our mangling table, consequently, they get default
1577      mangling.  As a further gotcha, poly8_t and poly16_t are signed
1578      types, poly64_t and poly128_t are unsigned types.  */
1579   if (!TARGET_HAVE_MVE)
1580     {
1581       arm_simd_polyQI_type_node
1582 	= build_distinct_type_copy (intQI_type_node);
1583       (*lang_hooks.types.register_builtin_type) (arm_simd_polyQI_type_node,
1584 						 "__builtin_neon_poly8");
1585       arm_simd_polyHI_type_node
1586 	= build_distinct_type_copy (intHI_type_node);
1587       (*lang_hooks.types.register_builtin_type) (arm_simd_polyHI_type_node,
1588 						 "__builtin_neon_poly16");
1589       arm_simd_polyDI_type_node
1590 	= build_distinct_type_copy (unsigned_intDI_type_node);
1591       (*lang_hooks.types.register_builtin_type) (arm_simd_polyDI_type_node,
1592 						 "__builtin_neon_poly64");
1593       arm_simd_polyTI_type_node
1594 	= build_distinct_type_copy (unsigned_intTI_type_node);
1595       (*lang_hooks.types.register_builtin_type) (arm_simd_polyTI_type_node,
1596 						 "__builtin_neon_poly128");
1597       /* Init poly vector element types with scalar poly types.  */
1598       arm_simd_types[Poly8x8_t].eltype = arm_simd_polyQI_type_node;
1599       arm_simd_types[Poly8x16_t].eltype = arm_simd_polyQI_type_node;
1600       arm_simd_types[Poly16x4_t].eltype = arm_simd_polyHI_type_node;
1601       arm_simd_types[Poly16x8_t].eltype = arm_simd_polyHI_type_node;
1602       /* Note: poly64x2_t is defined in arm_neon.h, to ensure it gets default
1603 	 mangling.  */
1604 
1605       /* Prevent front-ends from transforming poly vectors into string
1606 	 literals.  */
1607       TYPE_STRING_FLAG (arm_simd_polyQI_type_node) = false;
1608       TYPE_STRING_FLAG (arm_simd_polyHI_type_node) = false;
1609     }
1610   /* Init all the element types built by the front-end.  */
1611   arm_simd_types[Int8x8_t].eltype = intQI_type_node;
1612   arm_simd_types[Int8x16_t].eltype = intQI_type_node;
1613   arm_simd_types[Int16x4_t].eltype = intHI_type_node;
1614   arm_simd_types[Int16x8_t].eltype = intHI_type_node;
1615   arm_simd_types[Int32x2_t].eltype = intSI_type_node;
1616   arm_simd_types[Int32x4_t].eltype = intSI_type_node;
1617   arm_simd_types[Int64x2_t].eltype = intDI_type_node;
1618   arm_simd_types[Uint8x8_t].eltype = unsigned_intQI_type_node;
1619   arm_simd_types[Uint8x16_t].eltype = unsigned_intQI_type_node;
1620   arm_simd_types[Uint16x4_t].eltype = unsigned_intHI_type_node;
1621   arm_simd_types[Uint16x8_t].eltype = unsigned_intHI_type_node;
1622   arm_simd_types[Uint32x2_t].eltype = unsigned_intSI_type_node;
1623   arm_simd_types[Uint32x4_t].eltype = unsigned_intSI_type_node;
1624   arm_simd_types[Uint64x2_t].eltype = unsigned_intDI_type_node;
1625 
1626   /* Note: poly64x2_t is defined in arm_neon.h, to ensure it gets default
1627      mangling.  */
1628 
1629   /* Continue with standard types.  */
1630   /* The __builtin_simd{64,128}_float16 types are kept private unless
1631      we have a scalar __fp16 type.  */
1632   arm_simd_types[Float16x4_t].eltype = arm_fp16_type_node;
1633   arm_simd_types[Float16x8_t].eltype = arm_fp16_type_node;
1634   arm_simd_types[Float32x2_t].eltype = float_type_node;
1635   arm_simd_types[Float32x4_t].eltype = float_type_node;
1636 
1637   /* Init Bfloat vector types with underlying __bf16 scalar type.  */
1638   arm_simd_types[Bfloat16x2_t].eltype = arm_bf16_type_node;
1639   arm_simd_types[Bfloat16x4_t].eltype = arm_bf16_type_node;
1640   arm_simd_types[Bfloat16x8_t].eltype = arm_bf16_type_node;
1641 
1642   for (i = 0; i < nelts; i++)
1643     {
1644       tree eltype = arm_simd_types[i].eltype;
1645       machine_mode mode = arm_simd_types[i].mode;
1646 
1647       if (eltype == NULL)
1648 	continue;
1649       if (arm_simd_types[i].itype == NULL)
1650 	{
1651 	  tree type = build_vector_type (eltype, GET_MODE_NUNITS (mode));
1652 	  type = build_distinct_type_copy (type);
1653 	  SET_TYPE_STRUCTURAL_EQUALITY (type);
1654 
1655 	  TYPE_ATTRIBUTES (type)
1656 	    = tree_cons (get_identifier ("Advanced SIMD type"),
1657 			 NULL_TREE, TYPE_ATTRIBUTES (type));
1658 	  arm_simd_types[i].itype = type;
1659 	}
1660 
1661       tdecl = add_builtin_type (arm_simd_types[i].name,
1662 				arm_simd_types[i].itype);
1663       TYPE_NAME (arm_simd_types[i].itype) = tdecl;
1664       SET_TYPE_STRUCTURAL_EQUALITY (arm_simd_types[i].itype);
1665     }
1666 
1667 #define AARCH_BUILD_SIGNED_TYPE(mode)  \
1668   make_signed_type (GET_MODE_PRECISION (mode));
1669   arm_simd_intOI_type_node = AARCH_BUILD_SIGNED_TYPE (OImode);
1670   arm_simd_intEI_type_node = AARCH_BUILD_SIGNED_TYPE (EImode);
1671   arm_simd_intCI_type_node = AARCH_BUILD_SIGNED_TYPE (CImode);
1672   arm_simd_intXI_type_node = AARCH_BUILD_SIGNED_TYPE (XImode);
1673 #undef AARCH_BUILD_SIGNED_TYPE
1674 
1675   tdecl = add_builtin_type
1676 	    ("__builtin_neon_ei" , arm_simd_intEI_type_node);
1677   TYPE_NAME (arm_simd_intEI_type_node) = tdecl;
1678   tdecl = add_builtin_type
1679 	    ("__builtin_neon_oi" , arm_simd_intOI_type_node);
1680   TYPE_NAME (arm_simd_intOI_type_node) = tdecl;
1681   tdecl = add_builtin_type
1682 	    ("__builtin_neon_ci" , arm_simd_intCI_type_node);
1683   TYPE_NAME (arm_simd_intCI_type_node) = tdecl;
1684   tdecl = add_builtin_type
1685 	    ("__builtin_neon_xi" , arm_simd_intXI_type_node);
1686   TYPE_NAME (arm_simd_intXI_type_node) = tdecl;
1687 }
1688 
1689 static void
arm_init_simd_builtin_scalar_types(void)1690 arm_init_simd_builtin_scalar_types (void)
1691 {
1692   /* Define typedefs for all the standard scalar types.  */
1693   (*lang_hooks.types.register_builtin_type) (intQI_type_node,
1694 					     "__builtin_neon_qi");
1695   (*lang_hooks.types.register_builtin_type) (intHI_type_node,
1696 					     "__builtin_neon_hi");
1697   (*lang_hooks.types.register_builtin_type) (intSI_type_node,
1698 					     "__builtin_neon_si");
1699   (*lang_hooks.types.register_builtin_type) (float_type_node,
1700 					     "__builtin_neon_sf");
1701   (*lang_hooks.types.register_builtin_type) (intDI_type_node,
1702 					     "__builtin_neon_di");
1703   (*lang_hooks.types.register_builtin_type) (double_type_node,
1704 					     "__builtin_neon_df");
1705   (*lang_hooks.types.register_builtin_type) (intTI_type_node,
1706 					     "__builtin_neon_ti");
1707   (*lang_hooks.types.register_builtin_type) (arm_bf16_type_node,
1708                                              "__builtin_neon_bf");
1709   /* Unsigned integer types for various mode sizes.  */
1710   (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
1711 					     "__builtin_neon_uqi");
1712   (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
1713 					     "__builtin_neon_uhi");
1714   (*lang_hooks.types.register_builtin_type) (unsigned_intSI_type_node,
1715 					     "__builtin_neon_usi");
1716   (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
1717 					     "__builtin_neon_udi");
1718   (*lang_hooks.types.register_builtin_type) (unsigned_intTI_type_node,
1719 					     "__builtin_neon_uti");
1720 }
1721 
1722 /* Set up a builtin.  It will use information stored in the argument struct D to
1723    derive the builtin's type signature and name.  It will append the name in D
1724    to the PREFIX passed and use these to create a builtin declaration that is
1725    then stored in 'arm_builtin_decls' under index FCODE.  This FCODE is also
1726    written back to D for future use.  */
1727 
1728 static void
arm_init_builtin(unsigned int fcode,arm_builtin_datum * d,const char * prefix)1729 arm_init_builtin (unsigned int fcode, arm_builtin_datum *d,
1730 		  const char * prefix)
1731 {
1732   bool print_type_signature_p = false;
1733   char type_signature[SIMD_MAX_BUILTIN_ARGS] = { 0 };
1734   char namebuf[60];
1735   tree ftype = NULL;
1736   tree fndecl = NULL;
1737 
1738   d->fcode = fcode;
1739 
1740   /* We must track two variables here.  op_num is
1741      the operand number as in the RTL pattern.  This is
1742      required to access the mode (e.g. V4SF mode) of the
1743      argument, from which the base type can be derived.
1744      arg_num is an index in to the qualifiers data, which
1745      gives qualifiers to the type (e.g. const unsigned).
1746      The reason these two variables may differ by one is the
1747      void return type.  While all return types take the 0th entry
1748      in the qualifiers array, there is no operand for them in the
1749      RTL pattern.  */
1750   int op_num = insn_data[d->code].n_operands - 1;
1751   int arg_num = d->qualifiers[0] & qualifier_void
1752     ? op_num + 1
1753     : op_num;
1754   tree return_type = void_type_node, args = void_list_node;
1755   tree eltype;
1756 
1757   /* Build a function type directly from the insn_data for this
1758      builtin.  The build_function_type () function takes care of
1759      removing duplicates for us.  */
1760   for (; op_num >= 0; arg_num--, op_num--)
1761     {
1762       machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
1763       enum arm_type_qualifiers qualifiers = d->qualifiers[arg_num];
1764 
1765       if (qualifiers & qualifier_unsigned)
1766 	{
1767 	  type_signature[arg_num] = 'u';
1768 	  print_type_signature_p = true;
1769 	}
1770       else if (qualifiers & qualifier_poly)
1771 	{
1772 	  type_signature[arg_num] = 'p';
1773 	  print_type_signature_p = true;
1774 	}
1775       else
1776 	type_signature[arg_num] = 's';
1777 
1778       /* Skip an internal operand for vget_{low, high}.  */
1779       if (qualifiers & qualifier_internal)
1780 	continue;
1781 
1782       /* Some builtins have different user-facing types
1783 	 for certain arguments, encoded in d->mode.  */
1784       if (qualifiers & qualifier_map_mode)
1785 	op_mode = d->mode;
1786 
1787       /* For pointers, we want a pointer to the basic type
1788 	 of the vector.  */
1789       if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
1790 	op_mode = GET_MODE_INNER (op_mode);
1791 
1792       /* For void pointers we already have nodes constructed by the midend.  */
1793       if (qualifiers & qualifier_void_pointer)
1794 	eltype = qualifiers & qualifier_const
1795 		 ? const_ptr_type_node : ptr_type_node;
1796       else
1797 	{
1798 	  eltype
1799 	    = arm_simd_builtin_type (op_mode,
1800 				     (qualifiers & qualifier_unsigned) != 0,
1801 				     (qualifiers & qualifier_poly) != 0);
1802 	  gcc_assert (eltype != NULL);
1803 
1804 	  /* Add qualifiers.  */
1805 	  if (qualifiers & qualifier_const)
1806 	    eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
1807 
1808 	  if (qualifiers & qualifier_pointer)
1809 	    eltype = build_pointer_type (eltype);
1810 	}
1811       /* If we have reached arg_num == 0, we are at a non-void
1812 	 return type.  Otherwise, we are still processing
1813 	 arguments.  */
1814       if (arg_num == 0)
1815 	return_type = eltype;
1816       else
1817 	args = tree_cons (NULL_TREE, eltype, args);
1818     }
1819 
1820   ftype = build_function_type (return_type, args);
1821 
1822   gcc_assert (ftype != NULL);
1823 
1824   if (print_type_signature_p
1825       && IN_RANGE (fcode, ARM_BUILTIN_VFP_BASE, ARM_BUILTIN_ACLE_BASE - 1))
1826     snprintf (namebuf, sizeof (namebuf), "%s_%s_%s",
1827 	      prefix, d->name, type_signature);
1828   else
1829     snprintf (namebuf, sizeof (namebuf), "%s_%s",
1830 	      prefix, d->name);
1831 
1832   fndecl = add_builtin_function (namebuf, ftype, fcode, BUILT_IN_MD,
1833 				 NULL, NULL_TREE);
1834   arm_builtin_decls[fcode] = fndecl;
1835 }
1836 
1837 /* Initialize the backend REAL_TYPE type supporting bfloat types.  */
1838 static void
arm_init_bf16_types(void)1839 arm_init_bf16_types (void)
1840 {
1841   arm_bf16_type_node = make_node (REAL_TYPE);
1842   TYPE_PRECISION (arm_bf16_type_node) = 16;
1843   SET_TYPE_MODE (arm_bf16_type_node, BFmode);
1844   layout_type (arm_bf16_type_node);
1845 
1846   lang_hooks.types.register_builtin_type (arm_bf16_type_node, "__bf16");
1847   arm_bf16_ptr_type_node = build_pointer_type (arm_bf16_type_node);
1848 }
1849 
1850 /* Set up ACLE builtins, even builtins for instructions that are not
1851    in the current target ISA to allow the user to compile particular modules
1852    with different target specific options that differ from the command line
1853    options.  Such builtins will be rejected in arm_expand_builtin.  */
1854 
1855 static void
arm_init_acle_builtins(void)1856 arm_init_acle_builtins (void)
1857 {
1858   unsigned int i, fcode = ARM_BUILTIN_ACLE_PATTERN_START;
1859 
1860   tree sat_check_fpr = build_function_type_list (void_type_node,
1861 						 intSI_type_node,
1862 						 intSI_type_node,
1863 						 intSI_type_node,
1864 						 NULL);
1865   arm_builtin_decls[ARM_BUILTIN_SAT_IMM_CHECK]
1866     = add_builtin_function ("__builtin_sat_imm_check", sat_check_fpr,
1867 			    ARM_BUILTIN_SAT_IMM_CHECK, BUILT_IN_MD,
1868 			    NULL, NULL_TREE);
1869 
1870   for (i = 0; i < ARRAY_SIZE (acle_builtin_data); i++, fcode++)
1871     {
1872       arm_builtin_datum *d = &acle_builtin_data[i];
1873       arm_init_builtin (fcode, d, "__builtin_arm");
1874     }
1875 }
1876 
1877 static void
arm_init_cde_builtins(void)1878 arm_init_cde_builtins (void)
1879 {
1880   unsigned int i, fcode = ARM_BUILTIN_CDE_PATTERN_START;
1881   for (i = 0; i < ARRAY_SIZE (cde_builtin_data); i++, fcode++)
1882     {
1883       /* Only define CDE floating point builtins if the target has floating
1884 	 point registers.  NOTE: without HARD_FLOAT we don't have MVE, so we
1885 	 can break out of this loop directly here.  */
1886       if (!TARGET_MAYBE_HARD_FLOAT && fcode >= ARM_BUILTIN_vcx1si)
1887 	break;
1888       /* Only define CDE/MVE builtins if MVE is available.  */
1889       if (!TARGET_HAVE_MVE && fcode >= ARM_BUILTIN_vcx1qv16qi)
1890 	break;
1891       arm_builtin_cde_datum *cde = &cde_builtin_data[i];
1892       arm_builtin_datum *d = &cde->base;
1893       arm_init_builtin (fcode, d, "__builtin_arm");
1894       set_call_expr_flags (arm_builtin_decls[fcode], cde->ecf_flag);
1895     }
1896 }
1897 
1898 /* Set up all the MVE builtins mentioned in arm_mve_builtins.def file.  */
1899 static void
arm_init_mve_builtins(void)1900 arm_init_mve_builtins (void)
1901 {
1902   volatile unsigned int i, fcode = ARM_BUILTIN_MVE_PATTERN_START;
1903 
1904   arm_init_simd_builtin_scalar_types ();
1905   arm_init_simd_builtin_types ();
1906 
1907   /* Add support for __builtin_{get,set}_fpscr_nzcvqc, used by MVE intrinsics
1908      that read and/or write the carry bit.  */
1909   tree get_fpscr_nzcvqc = build_function_type_list (intSI_type_node,
1910 						    NULL);
1911   tree set_fpscr_nzcvqc = build_function_type_list (void_type_node,
1912 						    intSI_type_node,
1913 						    NULL);
1914   arm_builtin_decls[ARM_BUILTIN_GET_FPSCR_NZCVQC]
1915     = add_builtin_function ("__builtin_arm_get_fpscr_nzcvqc", get_fpscr_nzcvqc,
1916 			    ARM_BUILTIN_GET_FPSCR_NZCVQC, BUILT_IN_MD, NULL,
1917 			    NULL_TREE);
1918   arm_builtin_decls[ARM_BUILTIN_SET_FPSCR_NZCVQC]
1919     = add_builtin_function ("__builtin_arm_set_fpscr_nzcvqc", set_fpscr_nzcvqc,
1920 			    ARM_BUILTIN_SET_FPSCR_NZCVQC, BUILT_IN_MD, NULL,
1921 			    NULL_TREE);
1922 
1923   for (i = 0; i < ARRAY_SIZE (mve_builtin_data); i++, fcode++)
1924     {
1925       arm_builtin_datum *d = &mve_builtin_data[i];
1926       arm_init_builtin (fcode, d, "__builtin_mve");
1927     }
1928 }
1929 
1930 /* Set up all the NEON builtins, even builtins for instructions that are not
1931    in the current target ISA to allow the user to compile particular modules
1932    with different target specific options that differ from the command line
1933    options. Such builtins will be rejected in arm_expand_builtin.  */
1934 
1935 static void
arm_init_neon_builtins(void)1936 arm_init_neon_builtins (void)
1937 {
1938   unsigned int i, fcode = ARM_BUILTIN_NEON_PATTERN_START;
1939 
1940   arm_init_simd_builtin_types ();
1941 
1942   /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
1943      Therefore we need to preserve the old __builtin scalar types.  It can be
1944      removed once all the intrinsics become strongly typed using the qualifier
1945      system.  */
1946   arm_init_simd_builtin_scalar_types ();
1947 
1948   for (i = 0; i < ARRAY_SIZE (neon_builtin_data); i++, fcode++)
1949     {
1950       arm_builtin_datum *d = &neon_builtin_data[i];
1951       arm_init_builtin (fcode, d, "__builtin_neon");
1952     }
1953 }
1954 
1955 /* Set up all the scalar floating point builtins.  */
1956 
1957 static void
arm_init_vfp_builtins(void)1958 arm_init_vfp_builtins (void)
1959 {
1960   unsigned int i, fcode = ARM_BUILTIN_VFP_PATTERN_START;
1961 
1962   for (i = 0; i < ARRAY_SIZE (vfp_builtin_data); i++, fcode++)
1963     {
1964       arm_builtin_datum *d = &vfp_builtin_data[i];
1965       arm_init_builtin (fcode, d, "__builtin_neon");
1966     }
1967 }
1968 
1969 static void
arm_init_crypto_builtins(void)1970 arm_init_crypto_builtins (void)
1971 {
1972   tree V16UQI_type_node
1973     = arm_simd_builtin_type (V16QImode, true, false);
1974 
1975   tree V4USI_type_node
1976     = arm_simd_builtin_type (V4SImode, true, false);
1977 
1978   tree v16uqi_ftype_v16uqi
1979     = build_function_type_list (V16UQI_type_node, V16UQI_type_node,
1980 				NULL_TREE);
1981 
1982   tree v16uqi_ftype_v16uqi_v16uqi
1983 	= build_function_type_list (V16UQI_type_node, V16UQI_type_node,
1984 				    V16UQI_type_node, NULL_TREE);
1985 
1986   tree v4usi_ftype_v4usi
1987     = build_function_type_list (V4USI_type_node, V4USI_type_node,
1988 				NULL_TREE);
1989 
1990   tree v4usi_ftype_v4usi_v4usi
1991     = build_function_type_list (V4USI_type_node, V4USI_type_node,
1992 				V4USI_type_node, NULL_TREE);
1993 
1994   tree v4usi_ftype_v4usi_v4usi_v4usi
1995     = build_function_type_list (V4USI_type_node, V4USI_type_node,
1996 				V4USI_type_node, V4USI_type_node,
1997 				NULL_TREE);
1998 
1999   tree uti_ftype_udi_udi
2000     = build_function_type_list (unsigned_intTI_type_node,
2001 				unsigned_intDI_type_node,
2002 				unsigned_intDI_type_node,
2003 				NULL_TREE);
2004 
2005   #undef CRYPTO1
2006   #undef CRYPTO2
2007   #undef CRYPTO3
2008   #undef C
2009   #undef N
2010   #undef CF
2011   #undef FT1
2012   #undef FT2
2013   #undef FT3
2014 
2015   #define C(U) \
2016     ARM_BUILTIN_CRYPTO_##U
2017   #define N(L) \
2018     "__builtin_arm_crypto_"#L
2019   #define FT1(R, A) \
2020     R##_ftype_##A
2021   #define FT2(R, A1, A2) \
2022     R##_ftype_##A1##_##A2
2023   #define FT3(R, A1, A2, A3) \
2024     R##_ftype_##A1##_##A2##_##A3
2025   #define CRYPTO1(L, U, R, A) \
2026     arm_builtin_decls[C (U)] \
2027       = add_builtin_function (N (L), FT1 (R, A), \
2028 		  C (U), BUILT_IN_MD, NULL, NULL_TREE);
2029   #define CRYPTO2(L, U, R, A1, A2)  \
2030     arm_builtin_decls[C (U)]	\
2031       = add_builtin_function (N (L), FT2 (R, A1, A2), \
2032 		  C (U), BUILT_IN_MD, NULL, NULL_TREE);
2033 
2034   #define CRYPTO3(L, U, R, A1, A2, A3) \
2035     arm_builtin_decls[C (U)]	   \
2036       = add_builtin_function (N (L), FT3 (R, A1, A2, A3), \
2037 				  C (U), BUILT_IN_MD, NULL, NULL_TREE);
2038   #include "crypto.def"
2039 
2040   #undef CRYPTO1
2041   #undef CRYPTO2
2042   #undef CRYPTO3
2043   #undef C
2044   #undef N
2045   #undef FT1
2046   #undef FT2
2047   #undef FT3
2048 }
2049 
2050 #undef NUM_DREG_TYPES
2051 #undef NUM_QREG_TYPES
2052 
2053 #define def_mbuiltin(FLAG, NAME, TYPE, CODE)				\
2054   do									\
2055     {									\
2056       if (FLAG == isa_nobit						\
2057 	  || bitmap_bit_p (arm_active_target.isa, FLAG))		\
2058 	{								\
2059 	  tree bdecl;							\
2060 	  bdecl = add_builtin_function ((NAME), (TYPE), (CODE),		\
2061 					BUILT_IN_MD, NULL, NULL_TREE);	\
2062 	  arm_builtin_decls[CODE] = bdecl;				\
2063 	}								\
2064     }									\
2065   while (0)
2066 
2067 struct builtin_description
2068 {
2069   const enum isa_feature   feature;
2070   const enum insn_code     icode;
2071   const char * const       name;
2072   const enum arm_builtins  code;
2073   const enum rtx_code      comparison;
2074   const unsigned int       flag;
2075 };
2076 
2077 static const struct builtin_description bdesc_2arg[] =
2078 {
2079 #define IWMMXT_BUILTIN(code, string, builtin) \
2080   { isa_bit_iwmmxt, CODE_FOR_##code, \
2081     "__builtin_arm_" string,			     \
2082     ARM_BUILTIN_##builtin, UNKNOWN, 0 },
2083 
2084 #define IWMMXT2_BUILTIN(code, string, builtin) \
2085   { isa_bit_iwmmxt2, CODE_FOR_##code, \
2086     "__builtin_arm_" string,			      \
2087     ARM_BUILTIN_##builtin, UNKNOWN, 0 },
2088 
2089   IWMMXT_BUILTIN (addv8qi3, "waddb", WADDB)
2090   IWMMXT_BUILTIN (addv4hi3, "waddh", WADDH)
2091   IWMMXT_BUILTIN (addv2si3, "waddw", WADDW)
2092   IWMMXT_BUILTIN (subv8qi3, "wsubb", WSUBB)
2093   IWMMXT_BUILTIN (subv4hi3, "wsubh", WSUBH)
2094   IWMMXT_BUILTIN (subv2si3, "wsubw", WSUBW)
2095   IWMMXT_BUILTIN (ssaddv8qi3, "waddbss", WADDSSB)
2096   IWMMXT_BUILTIN (ssaddv4hi3, "waddhss", WADDSSH)
2097   IWMMXT_BUILTIN (ssaddv2si3, "waddwss", WADDSSW)
2098   IWMMXT_BUILTIN (sssubv8qi3, "wsubbss", WSUBSSB)
2099   IWMMXT_BUILTIN (sssubv4hi3, "wsubhss", WSUBSSH)
2100   IWMMXT_BUILTIN (sssubv2si3, "wsubwss", WSUBSSW)
2101   IWMMXT_BUILTIN (usaddv8qi3, "waddbus", WADDUSB)
2102   IWMMXT_BUILTIN (usaddv4hi3, "waddhus", WADDUSH)
2103   IWMMXT_BUILTIN (usaddv2si3, "waddwus", WADDUSW)
2104   IWMMXT_BUILTIN (ussubv8qi3, "wsubbus", WSUBUSB)
2105   IWMMXT_BUILTIN (ussubv4hi3, "wsubhus", WSUBUSH)
2106   IWMMXT_BUILTIN (ussubv2si3, "wsubwus", WSUBUSW)
2107   IWMMXT_BUILTIN (mulv4hi3, "wmulul", WMULUL)
2108   IWMMXT_BUILTIN (smulv4hi3_highpart, "wmulsm", WMULSM)
2109   IWMMXT_BUILTIN (umulv4hi3_highpart, "wmulum", WMULUM)
2110   IWMMXT_BUILTIN (eqv8qi3, "wcmpeqb", WCMPEQB)
2111   IWMMXT_BUILTIN (eqv4hi3, "wcmpeqh", WCMPEQH)
2112   IWMMXT_BUILTIN (eqv2si3, "wcmpeqw", WCMPEQW)
2113   IWMMXT_BUILTIN (gtuv8qi3, "wcmpgtub", WCMPGTUB)
2114   IWMMXT_BUILTIN (gtuv4hi3, "wcmpgtuh", WCMPGTUH)
2115   IWMMXT_BUILTIN (gtuv2si3, "wcmpgtuw", WCMPGTUW)
2116   IWMMXT_BUILTIN (gtv8qi3, "wcmpgtsb", WCMPGTSB)
2117   IWMMXT_BUILTIN (gtv4hi3, "wcmpgtsh", WCMPGTSH)
2118   IWMMXT_BUILTIN (gtv2si3, "wcmpgtsw", WCMPGTSW)
2119   IWMMXT_BUILTIN (umaxv8qi3, "wmaxub", WMAXUB)
2120   IWMMXT_BUILTIN (smaxv8qi3, "wmaxsb", WMAXSB)
2121   IWMMXT_BUILTIN (umaxv4hi3, "wmaxuh", WMAXUH)
2122   IWMMXT_BUILTIN (smaxv4hi3, "wmaxsh", WMAXSH)
2123   IWMMXT_BUILTIN (umaxv2si3, "wmaxuw", WMAXUW)
2124   IWMMXT_BUILTIN (smaxv2si3, "wmaxsw", WMAXSW)
2125   IWMMXT_BUILTIN (uminv8qi3, "wminub", WMINUB)
2126   IWMMXT_BUILTIN (sminv8qi3, "wminsb", WMINSB)
2127   IWMMXT_BUILTIN (uminv4hi3, "wminuh", WMINUH)
2128   IWMMXT_BUILTIN (sminv4hi3, "wminsh", WMINSH)
2129   IWMMXT_BUILTIN (uminv2si3, "wminuw", WMINUW)
2130   IWMMXT_BUILTIN (sminv2si3, "wminsw", WMINSW)
2131   IWMMXT_BUILTIN (iwmmxt_anddi3, "wand", WAND)
2132   IWMMXT_BUILTIN (iwmmxt_nanddi3, "wandn", WANDN)
2133   IWMMXT_BUILTIN (iwmmxt_iordi3, "wor", WOR)
2134   IWMMXT_BUILTIN (iwmmxt_xordi3, "wxor", WXOR)
2135   IWMMXT_BUILTIN (iwmmxt_uavgv8qi3, "wavg2b", WAVG2B)
2136   IWMMXT_BUILTIN (iwmmxt_uavgv4hi3, "wavg2h", WAVG2H)
2137   IWMMXT_BUILTIN (iwmmxt_uavgrndv8qi3, "wavg2br", WAVG2BR)
2138   IWMMXT_BUILTIN (iwmmxt_uavgrndv4hi3, "wavg2hr", WAVG2HR)
2139   IWMMXT_BUILTIN (iwmmxt_wunpckilb, "wunpckilb", WUNPCKILB)
2140   IWMMXT_BUILTIN (iwmmxt_wunpckilh, "wunpckilh", WUNPCKILH)
2141   IWMMXT_BUILTIN (iwmmxt_wunpckilw, "wunpckilw", WUNPCKILW)
2142   IWMMXT_BUILTIN (iwmmxt_wunpckihb, "wunpckihb", WUNPCKIHB)
2143   IWMMXT_BUILTIN (iwmmxt_wunpckihh, "wunpckihh", WUNPCKIHH)
2144   IWMMXT_BUILTIN (iwmmxt_wunpckihw, "wunpckihw", WUNPCKIHW)
2145   IWMMXT2_BUILTIN (iwmmxt_waddsubhx, "waddsubhx", WADDSUBHX)
2146   IWMMXT2_BUILTIN (iwmmxt_wsubaddhx, "wsubaddhx", WSUBADDHX)
2147   IWMMXT2_BUILTIN (iwmmxt_wabsdiffb, "wabsdiffb", WABSDIFFB)
2148   IWMMXT2_BUILTIN (iwmmxt_wabsdiffh, "wabsdiffh", WABSDIFFH)
2149   IWMMXT2_BUILTIN (iwmmxt_wabsdiffw, "wabsdiffw", WABSDIFFW)
2150   IWMMXT2_BUILTIN (iwmmxt_avg4, "wavg4", WAVG4)
2151   IWMMXT2_BUILTIN (iwmmxt_avg4r, "wavg4r", WAVG4R)
2152   IWMMXT2_BUILTIN (iwmmxt_wmulwsm, "wmulwsm", WMULWSM)
2153   IWMMXT2_BUILTIN (iwmmxt_wmulwum, "wmulwum", WMULWUM)
2154   IWMMXT2_BUILTIN (iwmmxt_wmulwsmr, "wmulwsmr", WMULWSMR)
2155   IWMMXT2_BUILTIN (iwmmxt_wmulwumr, "wmulwumr", WMULWUMR)
2156   IWMMXT2_BUILTIN (iwmmxt_wmulwl, "wmulwl", WMULWL)
2157   IWMMXT2_BUILTIN (iwmmxt_wmulsmr, "wmulsmr", WMULSMR)
2158   IWMMXT2_BUILTIN (iwmmxt_wmulumr, "wmulumr", WMULUMR)
2159   IWMMXT2_BUILTIN (iwmmxt_wqmulm, "wqmulm", WQMULM)
2160   IWMMXT2_BUILTIN (iwmmxt_wqmulmr, "wqmulmr", WQMULMR)
2161   IWMMXT2_BUILTIN (iwmmxt_wqmulwm, "wqmulwm", WQMULWM)
2162   IWMMXT2_BUILTIN (iwmmxt_wqmulwmr, "wqmulwmr", WQMULWMR)
2163   IWMMXT_BUILTIN (iwmmxt_walignr0, "walignr0", WALIGNR0)
2164   IWMMXT_BUILTIN (iwmmxt_walignr1, "walignr1", WALIGNR1)
2165   IWMMXT_BUILTIN (iwmmxt_walignr2, "walignr2", WALIGNR2)
2166   IWMMXT_BUILTIN (iwmmxt_walignr3, "walignr3", WALIGNR3)
2167 
2168 #define IWMMXT_BUILTIN2(code, builtin) \
2169   { isa_bit_iwmmxt, CODE_FOR_##code, NULL, \
2170     ARM_BUILTIN_##builtin, UNKNOWN, 0 },
2171 
2172 #define IWMMXT2_BUILTIN2(code, builtin) \
2173   { isa_bit_iwmmxt2, CODE_FOR_##code, NULL, \
2174     ARM_BUILTIN_##builtin, UNKNOWN, 0 },
2175 
2176   IWMMXT2_BUILTIN2 (iwmmxt_waddbhusm, WADDBHUSM)
2177   IWMMXT2_BUILTIN2 (iwmmxt_waddbhusl, WADDBHUSL)
2178   IWMMXT_BUILTIN2 (iwmmxt_wpackhss, WPACKHSS)
2179   IWMMXT_BUILTIN2 (iwmmxt_wpackwss, WPACKWSS)
2180   IWMMXT_BUILTIN2 (iwmmxt_wpackdss, WPACKDSS)
2181   IWMMXT_BUILTIN2 (iwmmxt_wpackhus, WPACKHUS)
2182   IWMMXT_BUILTIN2 (iwmmxt_wpackwus, WPACKWUS)
2183   IWMMXT_BUILTIN2 (iwmmxt_wpackdus, WPACKDUS)
2184   IWMMXT_BUILTIN2 (iwmmxt_wmacuz, WMACUZ)
2185   IWMMXT_BUILTIN2 (iwmmxt_wmacsz, WMACSZ)
2186 
2187 
2188 #define FP_BUILTIN(L, U) \
2189   {isa_nobit, CODE_FOR_##L, "__builtin_arm_"#L, ARM_BUILTIN_##U, \
2190    UNKNOWN, 0},
2191 
2192   FP_BUILTIN (get_fpscr, GET_FPSCR)
2193   FP_BUILTIN (set_fpscr, SET_FPSCR)
2194 #undef FP_BUILTIN
2195 
2196 #define CRYPTO_BUILTIN(L, U)					   \
2197   {isa_nobit, CODE_FOR_crypto_##L,	"__builtin_arm_crypto_"#L, \
2198    ARM_BUILTIN_CRYPTO_##U, UNKNOWN, 0},
2199 #undef CRYPTO1
2200 #undef CRYPTO2
2201 #undef CRYPTO3
2202 #define CRYPTO2(L, U, R, A1, A2) CRYPTO_BUILTIN (L, U)
2203 #define CRYPTO1(L, U, R, A)
2204 #define CRYPTO3(L, U, R, A1, A2, A3)
2205 #include "crypto.def"
2206 #undef CRYPTO1
2207 #undef CRYPTO2
2208 #undef CRYPTO3
2209 
2210 };
2211 
2212 static const struct builtin_description bdesc_1arg[] =
2213 {
2214   IWMMXT_BUILTIN (iwmmxt_tmovmskb, "tmovmskb", TMOVMSKB)
2215   IWMMXT_BUILTIN (iwmmxt_tmovmskh, "tmovmskh", TMOVMSKH)
2216   IWMMXT_BUILTIN (iwmmxt_tmovmskw, "tmovmskw", TMOVMSKW)
2217   IWMMXT_BUILTIN (iwmmxt_waccb, "waccb", WACCB)
2218   IWMMXT_BUILTIN (iwmmxt_wacch, "wacch", WACCH)
2219   IWMMXT_BUILTIN (iwmmxt_waccw, "waccw", WACCW)
2220   IWMMXT_BUILTIN (iwmmxt_wunpckehub, "wunpckehub", WUNPCKEHUB)
2221   IWMMXT_BUILTIN (iwmmxt_wunpckehuh, "wunpckehuh", WUNPCKEHUH)
2222   IWMMXT_BUILTIN (iwmmxt_wunpckehuw, "wunpckehuw", WUNPCKEHUW)
2223   IWMMXT_BUILTIN (iwmmxt_wunpckehsb, "wunpckehsb", WUNPCKEHSB)
2224   IWMMXT_BUILTIN (iwmmxt_wunpckehsh, "wunpckehsh", WUNPCKEHSH)
2225   IWMMXT_BUILTIN (iwmmxt_wunpckehsw, "wunpckehsw", WUNPCKEHSW)
2226   IWMMXT_BUILTIN (iwmmxt_wunpckelub, "wunpckelub", WUNPCKELUB)
2227   IWMMXT_BUILTIN (iwmmxt_wunpckeluh, "wunpckeluh", WUNPCKELUH)
2228   IWMMXT_BUILTIN (iwmmxt_wunpckeluw, "wunpckeluw", WUNPCKELUW)
2229   IWMMXT_BUILTIN (iwmmxt_wunpckelsb, "wunpckelsb", WUNPCKELSB)
2230   IWMMXT_BUILTIN (iwmmxt_wunpckelsh, "wunpckelsh", WUNPCKELSH)
2231   IWMMXT_BUILTIN (iwmmxt_wunpckelsw, "wunpckelsw", WUNPCKELSW)
2232   IWMMXT2_BUILTIN (iwmmxt_wabsv8qi3, "wabsb", WABSB)
2233   IWMMXT2_BUILTIN (iwmmxt_wabsv4hi3, "wabsh", WABSH)
2234   IWMMXT2_BUILTIN (iwmmxt_wabsv2si3, "wabsw", WABSW)
2235   IWMMXT_BUILTIN (tbcstv8qi, "tbcstb", TBCSTB)
2236   IWMMXT_BUILTIN (tbcstv4hi, "tbcsth", TBCSTH)
2237   IWMMXT_BUILTIN (tbcstv2si, "tbcstw", TBCSTW)
2238 
2239 #define CRYPTO1(L, U, R, A) CRYPTO_BUILTIN (L, U)
2240 #define CRYPTO2(L, U, R, A1, A2)
2241 #define CRYPTO3(L, U, R, A1, A2, A3)
2242 #include "crypto.def"
2243 #undef CRYPTO1
2244 #undef CRYPTO2
2245 #undef CRYPTO3
2246 };
2247 
2248 static const struct builtin_description bdesc_3arg[] =
2249 {
2250 #define CRYPTO3(L, U, R, A1, A2, A3) CRYPTO_BUILTIN (L, U)
2251 #define CRYPTO1(L, U, R, A)
2252 #define CRYPTO2(L, U, R, A1, A2)
2253 #include "crypto.def"
2254 #undef CRYPTO1
2255 #undef CRYPTO2
2256 #undef CRYPTO3
2257  };
2258 #undef CRYPTO_BUILTIN
2259 
2260 /* Set up all the iWMMXt builtins.  This is not called if
2261    TARGET_IWMMXT is zero.  */
2262 
2263 static void
arm_init_iwmmxt_builtins(void)2264 arm_init_iwmmxt_builtins (void)
2265 {
2266   const struct builtin_description * d;
2267   size_t i;
2268 
2269   tree V2SI_type_node = build_vector_type_for_mode (intSI_type_node, V2SImode);
2270   tree V4HI_type_node = build_vector_type_for_mode (intHI_type_node, V4HImode);
2271   tree V8QI_type_node = build_vector_type_for_mode (intQI_type_node, V8QImode);
2272 
2273   tree v8qi_ftype_v8qi_v8qi_int
2274     = build_function_type_list (V8QI_type_node,
2275 				V8QI_type_node, V8QI_type_node,
2276 				integer_type_node, NULL_TREE);
2277   tree v4hi_ftype_v4hi_int
2278     = build_function_type_list (V4HI_type_node,
2279 				V4HI_type_node, integer_type_node, NULL_TREE);
2280   tree v2si_ftype_v2si_int
2281     = build_function_type_list (V2SI_type_node,
2282 				V2SI_type_node, integer_type_node, NULL_TREE);
2283   tree v2si_ftype_di_di
2284     = build_function_type_list (V2SI_type_node,
2285 				long_long_integer_type_node,
2286 				long_long_integer_type_node,
2287 				NULL_TREE);
2288   tree di_ftype_di_int
2289     = build_function_type_list (long_long_integer_type_node,
2290 				long_long_integer_type_node,
2291 				integer_type_node, NULL_TREE);
2292   tree di_ftype_di_int_int
2293     = build_function_type_list (long_long_integer_type_node,
2294 				long_long_integer_type_node,
2295 				integer_type_node,
2296 				integer_type_node, NULL_TREE);
2297   tree int_ftype_v8qi
2298     = build_function_type_list (integer_type_node,
2299 				V8QI_type_node, NULL_TREE);
2300   tree int_ftype_v4hi
2301     = build_function_type_list (integer_type_node,
2302 				V4HI_type_node, NULL_TREE);
2303   tree int_ftype_v2si
2304     = build_function_type_list (integer_type_node,
2305 				V2SI_type_node, NULL_TREE);
2306   tree int_ftype_v8qi_int
2307     = build_function_type_list (integer_type_node,
2308 				V8QI_type_node, integer_type_node, NULL_TREE);
2309   tree int_ftype_v4hi_int
2310     = build_function_type_list (integer_type_node,
2311 				V4HI_type_node, integer_type_node, NULL_TREE);
2312   tree int_ftype_v2si_int
2313     = build_function_type_list (integer_type_node,
2314 				V2SI_type_node, integer_type_node, NULL_TREE);
2315   tree v8qi_ftype_v8qi_int_int
2316     = build_function_type_list (V8QI_type_node,
2317 				V8QI_type_node, integer_type_node,
2318 				integer_type_node, NULL_TREE);
2319   tree v4hi_ftype_v4hi_int_int
2320     = build_function_type_list (V4HI_type_node,
2321 				V4HI_type_node, integer_type_node,
2322 				integer_type_node, NULL_TREE);
2323   tree v2si_ftype_v2si_int_int
2324     = build_function_type_list (V2SI_type_node,
2325 				V2SI_type_node, integer_type_node,
2326 				integer_type_node, NULL_TREE);
2327   /* Miscellaneous.  */
2328   tree v8qi_ftype_v4hi_v4hi
2329     = build_function_type_list (V8QI_type_node,
2330 				V4HI_type_node, V4HI_type_node, NULL_TREE);
2331   tree v4hi_ftype_v2si_v2si
2332     = build_function_type_list (V4HI_type_node,
2333 				V2SI_type_node, V2SI_type_node, NULL_TREE);
2334   tree v8qi_ftype_v4hi_v8qi
2335     = build_function_type_list (V8QI_type_node,
2336 	                        V4HI_type_node, V8QI_type_node, NULL_TREE);
2337   tree v2si_ftype_v4hi_v4hi
2338     = build_function_type_list (V2SI_type_node,
2339 				V4HI_type_node, V4HI_type_node, NULL_TREE);
2340   tree v2si_ftype_v8qi_v8qi
2341     = build_function_type_list (V2SI_type_node,
2342 				V8QI_type_node, V8QI_type_node, NULL_TREE);
2343   tree v4hi_ftype_v4hi_di
2344     = build_function_type_list (V4HI_type_node,
2345 				V4HI_type_node, long_long_integer_type_node,
2346 				NULL_TREE);
2347   tree v2si_ftype_v2si_di
2348     = build_function_type_list (V2SI_type_node,
2349 				V2SI_type_node, long_long_integer_type_node,
2350 				NULL_TREE);
2351   tree di_ftype_void
2352     = build_function_type_list (long_long_unsigned_type_node, NULL_TREE);
2353   tree int_ftype_void
2354     = build_function_type_list (integer_type_node, NULL_TREE);
2355   tree di_ftype_v8qi
2356     = build_function_type_list (long_long_integer_type_node,
2357 				V8QI_type_node, NULL_TREE);
2358   tree di_ftype_v4hi
2359     = build_function_type_list (long_long_integer_type_node,
2360 				V4HI_type_node, NULL_TREE);
2361   tree di_ftype_v2si
2362     = build_function_type_list (long_long_integer_type_node,
2363 				V2SI_type_node, NULL_TREE);
2364   tree v2si_ftype_v4hi
2365     = build_function_type_list (V2SI_type_node,
2366 				V4HI_type_node, NULL_TREE);
2367   tree v4hi_ftype_v8qi
2368     = build_function_type_list (V4HI_type_node,
2369 				V8QI_type_node, NULL_TREE);
2370   tree v8qi_ftype_v8qi
2371     = build_function_type_list (V8QI_type_node,
2372 	                        V8QI_type_node, NULL_TREE);
2373   tree v4hi_ftype_v4hi
2374     = build_function_type_list (V4HI_type_node,
2375 	                        V4HI_type_node, NULL_TREE);
2376   tree v2si_ftype_v2si
2377     = build_function_type_list (V2SI_type_node,
2378 	                        V2SI_type_node, NULL_TREE);
2379 
2380   tree di_ftype_di_v4hi_v4hi
2381     = build_function_type_list (long_long_unsigned_type_node,
2382 				long_long_unsigned_type_node,
2383 				V4HI_type_node, V4HI_type_node,
2384 				NULL_TREE);
2385 
2386   tree di_ftype_v4hi_v4hi
2387     = build_function_type_list (long_long_unsigned_type_node,
2388 				V4HI_type_node,V4HI_type_node,
2389 				NULL_TREE);
2390 
2391   tree v2si_ftype_v2si_v4hi_v4hi
2392     = build_function_type_list (V2SI_type_node,
2393                                 V2SI_type_node, V4HI_type_node,
2394                                 V4HI_type_node, NULL_TREE);
2395 
2396   tree v2si_ftype_v2si_v8qi_v8qi
2397     = build_function_type_list (V2SI_type_node,
2398                                 V2SI_type_node, V8QI_type_node,
2399                                 V8QI_type_node, NULL_TREE);
2400 
2401   tree di_ftype_di_v2si_v2si
2402      = build_function_type_list (long_long_unsigned_type_node,
2403                                  long_long_unsigned_type_node,
2404                                  V2SI_type_node, V2SI_type_node,
2405                                  NULL_TREE);
2406 
2407    tree di_ftype_di_di_int
2408      = build_function_type_list (long_long_unsigned_type_node,
2409                                  long_long_unsigned_type_node,
2410                                  long_long_unsigned_type_node,
2411                                  integer_type_node, NULL_TREE);
2412 
2413    tree void_ftype_int
2414      = build_function_type_list (void_type_node,
2415                                  integer_type_node, NULL_TREE);
2416 
2417    tree v8qi_ftype_char
2418      = build_function_type_list (V8QI_type_node,
2419                                  signed_char_type_node, NULL_TREE);
2420 
2421    tree v4hi_ftype_short
2422      = build_function_type_list (V4HI_type_node,
2423                                  short_integer_type_node, NULL_TREE);
2424 
2425    tree v2si_ftype_int
2426      = build_function_type_list (V2SI_type_node,
2427                                  integer_type_node, NULL_TREE);
2428 
2429   /* Normal vector binops.  */
2430   tree v8qi_ftype_v8qi_v8qi
2431     = build_function_type_list (V8QI_type_node,
2432 				V8QI_type_node, V8QI_type_node, NULL_TREE);
2433   tree v4hi_ftype_v4hi_v4hi
2434     = build_function_type_list (V4HI_type_node,
2435 				V4HI_type_node,V4HI_type_node, NULL_TREE);
2436   tree v2si_ftype_v2si_v2si
2437     = build_function_type_list (V2SI_type_node,
2438 				V2SI_type_node, V2SI_type_node, NULL_TREE);
2439   tree di_ftype_di_di
2440     = build_function_type_list (long_long_unsigned_type_node,
2441 				long_long_unsigned_type_node,
2442 				long_long_unsigned_type_node,
2443 				NULL_TREE);
2444 
2445   /* Add all builtins that are more or less simple operations on two
2446      operands.  */
2447   for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
2448     {
2449       /* Use one of the operands; the target can have a different mode for
2450 	 mask-generating compares.  */
2451       machine_mode mode;
2452       tree type;
2453 
2454       if (d->name == 0
2455 	  || !(d->feature == isa_bit_iwmmxt
2456 	       || d->feature == isa_bit_iwmmxt2))
2457 	continue;
2458 
2459       mode = insn_data[d->icode].operand[1].mode;
2460 
2461       switch (mode)
2462 	{
2463 	case E_V8QImode:
2464 	  type = v8qi_ftype_v8qi_v8qi;
2465 	  break;
2466 	case E_V4HImode:
2467 	  type = v4hi_ftype_v4hi_v4hi;
2468 	  break;
2469 	case E_V2SImode:
2470 	  type = v2si_ftype_v2si_v2si;
2471 	  break;
2472 	case E_DImode:
2473 	  type = di_ftype_di_di;
2474 	  break;
2475 
2476 	default:
2477 	  gcc_unreachable ();
2478 	}
2479 
2480       def_mbuiltin (d->feature, d->name, type, d->code);
2481     }
2482 
2483   /* Add the remaining MMX insns with somewhat more complicated types.  */
2484 #define iwmmx_mbuiltin(NAME, TYPE, CODE)			\
2485   def_mbuiltin (isa_bit_iwmmxt, "__builtin_arm_" NAME, \
2486 		(TYPE), ARM_BUILTIN_ ## CODE)
2487 
2488 #define iwmmx2_mbuiltin(NAME, TYPE, CODE)                      \
2489   def_mbuiltin (isa_bit_iwmmxt2, "__builtin_arm_" NAME, \
2490 		(TYPE),	ARM_BUILTIN_ ## CODE)
2491 
2492   iwmmx_mbuiltin ("wzero", di_ftype_void, WZERO);
2493   iwmmx_mbuiltin ("setwcgr0", void_ftype_int, SETWCGR0);
2494   iwmmx_mbuiltin ("setwcgr1", void_ftype_int, SETWCGR1);
2495   iwmmx_mbuiltin ("setwcgr2", void_ftype_int, SETWCGR2);
2496   iwmmx_mbuiltin ("setwcgr3", void_ftype_int, SETWCGR3);
2497   iwmmx_mbuiltin ("getwcgr0", int_ftype_void, GETWCGR0);
2498   iwmmx_mbuiltin ("getwcgr1", int_ftype_void, GETWCGR1);
2499   iwmmx_mbuiltin ("getwcgr2", int_ftype_void, GETWCGR2);
2500   iwmmx_mbuiltin ("getwcgr3", int_ftype_void, GETWCGR3);
2501 
2502   iwmmx_mbuiltin ("wsllh", v4hi_ftype_v4hi_di, WSLLH);
2503   iwmmx_mbuiltin ("wsllw", v2si_ftype_v2si_di, WSLLW);
2504   iwmmx_mbuiltin ("wslld", di_ftype_di_di, WSLLD);
2505   iwmmx_mbuiltin ("wsllhi", v4hi_ftype_v4hi_int, WSLLHI);
2506   iwmmx_mbuiltin ("wsllwi", v2si_ftype_v2si_int, WSLLWI);
2507   iwmmx_mbuiltin ("wslldi", di_ftype_di_int, WSLLDI);
2508 
2509   iwmmx_mbuiltin ("wsrlh", v4hi_ftype_v4hi_di, WSRLH);
2510   iwmmx_mbuiltin ("wsrlw", v2si_ftype_v2si_di, WSRLW);
2511   iwmmx_mbuiltin ("wsrld", di_ftype_di_di, WSRLD);
2512   iwmmx_mbuiltin ("wsrlhi", v4hi_ftype_v4hi_int, WSRLHI);
2513   iwmmx_mbuiltin ("wsrlwi", v2si_ftype_v2si_int, WSRLWI);
2514   iwmmx_mbuiltin ("wsrldi", di_ftype_di_int, WSRLDI);
2515 
2516   iwmmx_mbuiltin ("wsrah", v4hi_ftype_v4hi_di, WSRAH);
2517   iwmmx_mbuiltin ("wsraw", v2si_ftype_v2si_di, WSRAW);
2518   iwmmx_mbuiltin ("wsrad", di_ftype_di_di, WSRAD);
2519   iwmmx_mbuiltin ("wsrahi", v4hi_ftype_v4hi_int, WSRAHI);
2520   iwmmx_mbuiltin ("wsrawi", v2si_ftype_v2si_int, WSRAWI);
2521   iwmmx_mbuiltin ("wsradi", di_ftype_di_int, WSRADI);
2522 
2523   iwmmx_mbuiltin ("wrorh", v4hi_ftype_v4hi_di, WRORH);
2524   iwmmx_mbuiltin ("wrorw", v2si_ftype_v2si_di, WRORW);
2525   iwmmx_mbuiltin ("wrord", di_ftype_di_di, WRORD);
2526   iwmmx_mbuiltin ("wrorhi", v4hi_ftype_v4hi_int, WRORHI);
2527   iwmmx_mbuiltin ("wrorwi", v2si_ftype_v2si_int, WRORWI);
2528   iwmmx_mbuiltin ("wrordi", di_ftype_di_int, WRORDI);
2529 
2530   iwmmx_mbuiltin ("wshufh", v4hi_ftype_v4hi_int, WSHUFH);
2531 
2532   iwmmx_mbuiltin ("wsadb", v2si_ftype_v2si_v8qi_v8qi, WSADB);
2533   iwmmx_mbuiltin ("wsadh", v2si_ftype_v2si_v4hi_v4hi, WSADH);
2534   iwmmx_mbuiltin ("wmadds", v2si_ftype_v4hi_v4hi, WMADDS);
2535   iwmmx2_mbuiltin ("wmaddsx", v2si_ftype_v4hi_v4hi, WMADDSX);
2536   iwmmx2_mbuiltin ("wmaddsn", v2si_ftype_v4hi_v4hi, WMADDSN);
2537   iwmmx_mbuiltin ("wmaddu", v2si_ftype_v4hi_v4hi, WMADDU);
2538   iwmmx2_mbuiltin ("wmaddux", v2si_ftype_v4hi_v4hi, WMADDUX);
2539   iwmmx2_mbuiltin ("wmaddun", v2si_ftype_v4hi_v4hi, WMADDUN);
2540   iwmmx_mbuiltin ("wsadbz", v2si_ftype_v8qi_v8qi, WSADBZ);
2541   iwmmx_mbuiltin ("wsadhz", v2si_ftype_v4hi_v4hi, WSADHZ);
2542 
2543   iwmmx_mbuiltin ("textrmsb", int_ftype_v8qi_int, TEXTRMSB);
2544   iwmmx_mbuiltin ("textrmsh", int_ftype_v4hi_int, TEXTRMSH);
2545   iwmmx_mbuiltin ("textrmsw", int_ftype_v2si_int, TEXTRMSW);
2546   iwmmx_mbuiltin ("textrmub", int_ftype_v8qi_int, TEXTRMUB);
2547   iwmmx_mbuiltin ("textrmuh", int_ftype_v4hi_int, TEXTRMUH);
2548   iwmmx_mbuiltin ("textrmuw", int_ftype_v2si_int, TEXTRMUW);
2549   iwmmx_mbuiltin ("tinsrb", v8qi_ftype_v8qi_int_int, TINSRB);
2550   iwmmx_mbuiltin ("tinsrh", v4hi_ftype_v4hi_int_int, TINSRH);
2551   iwmmx_mbuiltin ("tinsrw", v2si_ftype_v2si_int_int, TINSRW);
2552 
2553   iwmmx_mbuiltin ("waccb", di_ftype_v8qi, WACCB);
2554   iwmmx_mbuiltin ("wacch", di_ftype_v4hi, WACCH);
2555   iwmmx_mbuiltin ("waccw", di_ftype_v2si, WACCW);
2556 
2557   iwmmx_mbuiltin ("tmovmskb", int_ftype_v8qi, TMOVMSKB);
2558   iwmmx_mbuiltin ("tmovmskh", int_ftype_v4hi, TMOVMSKH);
2559   iwmmx_mbuiltin ("tmovmskw", int_ftype_v2si, TMOVMSKW);
2560 
2561   iwmmx2_mbuiltin ("waddbhusm", v8qi_ftype_v4hi_v8qi, WADDBHUSM);
2562   iwmmx2_mbuiltin ("waddbhusl", v8qi_ftype_v4hi_v8qi, WADDBHUSL);
2563 
2564   iwmmx_mbuiltin ("wpackhss", v8qi_ftype_v4hi_v4hi, WPACKHSS);
2565   iwmmx_mbuiltin ("wpackhus", v8qi_ftype_v4hi_v4hi, WPACKHUS);
2566   iwmmx_mbuiltin ("wpackwus", v4hi_ftype_v2si_v2si, WPACKWUS);
2567   iwmmx_mbuiltin ("wpackwss", v4hi_ftype_v2si_v2si, WPACKWSS);
2568   iwmmx_mbuiltin ("wpackdus", v2si_ftype_di_di, WPACKDUS);
2569   iwmmx_mbuiltin ("wpackdss", v2si_ftype_di_di, WPACKDSS);
2570 
2571   iwmmx_mbuiltin ("wunpckehub", v4hi_ftype_v8qi, WUNPCKEHUB);
2572   iwmmx_mbuiltin ("wunpckehuh", v2si_ftype_v4hi, WUNPCKEHUH);
2573   iwmmx_mbuiltin ("wunpckehuw", di_ftype_v2si, WUNPCKEHUW);
2574   iwmmx_mbuiltin ("wunpckehsb", v4hi_ftype_v8qi, WUNPCKEHSB);
2575   iwmmx_mbuiltin ("wunpckehsh", v2si_ftype_v4hi, WUNPCKEHSH);
2576   iwmmx_mbuiltin ("wunpckehsw", di_ftype_v2si, WUNPCKEHSW);
2577   iwmmx_mbuiltin ("wunpckelub", v4hi_ftype_v8qi, WUNPCKELUB);
2578   iwmmx_mbuiltin ("wunpckeluh", v2si_ftype_v4hi, WUNPCKELUH);
2579   iwmmx_mbuiltin ("wunpckeluw", di_ftype_v2si, WUNPCKELUW);
2580   iwmmx_mbuiltin ("wunpckelsb", v4hi_ftype_v8qi, WUNPCKELSB);
2581   iwmmx_mbuiltin ("wunpckelsh", v2si_ftype_v4hi, WUNPCKELSH);
2582   iwmmx_mbuiltin ("wunpckelsw", di_ftype_v2si, WUNPCKELSW);
2583 
2584   iwmmx_mbuiltin ("wmacs", di_ftype_di_v4hi_v4hi, WMACS);
2585   iwmmx_mbuiltin ("wmacsz", di_ftype_v4hi_v4hi, WMACSZ);
2586   iwmmx_mbuiltin ("wmacu", di_ftype_di_v4hi_v4hi, WMACU);
2587   iwmmx_mbuiltin ("wmacuz", di_ftype_v4hi_v4hi, WMACUZ);
2588 
2589   iwmmx_mbuiltin ("walign", v8qi_ftype_v8qi_v8qi_int, WALIGNI);
2590   iwmmx_mbuiltin ("tmia", di_ftype_di_int_int, TMIA);
2591   iwmmx_mbuiltin ("tmiaph", di_ftype_di_int_int, TMIAPH);
2592   iwmmx_mbuiltin ("tmiabb", di_ftype_di_int_int, TMIABB);
2593   iwmmx_mbuiltin ("tmiabt", di_ftype_di_int_int, TMIABT);
2594   iwmmx_mbuiltin ("tmiatb", di_ftype_di_int_int, TMIATB);
2595   iwmmx_mbuiltin ("tmiatt", di_ftype_di_int_int, TMIATT);
2596 
2597   iwmmx2_mbuiltin ("wabsb", v8qi_ftype_v8qi, WABSB);
2598   iwmmx2_mbuiltin ("wabsh", v4hi_ftype_v4hi, WABSH);
2599   iwmmx2_mbuiltin ("wabsw", v2si_ftype_v2si, WABSW);
2600 
2601   iwmmx2_mbuiltin ("wqmiabb", v2si_ftype_v2si_v4hi_v4hi, WQMIABB);
2602   iwmmx2_mbuiltin ("wqmiabt", v2si_ftype_v2si_v4hi_v4hi, WQMIABT);
2603   iwmmx2_mbuiltin ("wqmiatb", v2si_ftype_v2si_v4hi_v4hi, WQMIATB);
2604   iwmmx2_mbuiltin ("wqmiatt", v2si_ftype_v2si_v4hi_v4hi, WQMIATT);
2605 
2606   iwmmx2_mbuiltin ("wqmiabbn", v2si_ftype_v2si_v4hi_v4hi, WQMIABBN);
2607   iwmmx2_mbuiltin ("wqmiabtn", v2si_ftype_v2si_v4hi_v4hi, WQMIABTN);
2608   iwmmx2_mbuiltin ("wqmiatbn", v2si_ftype_v2si_v4hi_v4hi, WQMIATBN);
2609   iwmmx2_mbuiltin ("wqmiattn", v2si_ftype_v2si_v4hi_v4hi, WQMIATTN);
2610 
2611   iwmmx2_mbuiltin ("wmiabb", di_ftype_di_v4hi_v4hi, WMIABB);
2612   iwmmx2_mbuiltin ("wmiabt", di_ftype_di_v4hi_v4hi, WMIABT);
2613   iwmmx2_mbuiltin ("wmiatb", di_ftype_di_v4hi_v4hi, WMIATB);
2614   iwmmx2_mbuiltin ("wmiatt", di_ftype_di_v4hi_v4hi, WMIATT);
2615 
2616   iwmmx2_mbuiltin ("wmiabbn", di_ftype_di_v4hi_v4hi, WMIABBN);
2617   iwmmx2_mbuiltin ("wmiabtn", di_ftype_di_v4hi_v4hi, WMIABTN);
2618   iwmmx2_mbuiltin ("wmiatbn", di_ftype_di_v4hi_v4hi, WMIATBN);
2619   iwmmx2_mbuiltin ("wmiattn", di_ftype_di_v4hi_v4hi, WMIATTN);
2620 
2621   iwmmx2_mbuiltin ("wmiawbb", di_ftype_di_v2si_v2si, WMIAWBB);
2622   iwmmx2_mbuiltin ("wmiawbt", di_ftype_di_v2si_v2si, WMIAWBT);
2623   iwmmx2_mbuiltin ("wmiawtb", di_ftype_di_v2si_v2si, WMIAWTB);
2624   iwmmx2_mbuiltin ("wmiawtt", di_ftype_di_v2si_v2si, WMIAWTT);
2625 
2626   iwmmx2_mbuiltin ("wmiawbbn", di_ftype_di_v2si_v2si, WMIAWBBN);
2627   iwmmx2_mbuiltin ("wmiawbtn", di_ftype_di_v2si_v2si, WMIAWBTN);
2628   iwmmx2_mbuiltin ("wmiawtbn", di_ftype_di_v2si_v2si, WMIAWTBN);
2629   iwmmx2_mbuiltin ("wmiawttn", di_ftype_di_v2si_v2si, WMIAWTTN);
2630 
2631   iwmmx2_mbuiltin ("wmerge", di_ftype_di_di_int, WMERGE);
2632 
2633   iwmmx_mbuiltin ("tbcstb", v8qi_ftype_char, TBCSTB);
2634   iwmmx_mbuiltin ("tbcsth", v4hi_ftype_short, TBCSTH);
2635   iwmmx_mbuiltin ("tbcstw", v2si_ftype_int, TBCSTW);
2636 
2637 #undef iwmmx_mbuiltin
2638 #undef iwmmx2_mbuiltin
2639 }
2640 
2641 static void
arm_init_fp16_builtins(void)2642 arm_init_fp16_builtins (void)
2643 {
2644   arm_fp16_type_node = make_node (REAL_TYPE);
2645   TYPE_PRECISION (arm_fp16_type_node) = GET_MODE_PRECISION (HFmode);
2646   layout_type (arm_fp16_type_node);
2647   if (arm_fp16_format)
2648     (*lang_hooks.types.register_builtin_type) (arm_fp16_type_node,
2649 					       "__fp16");
2650 }
2651 
2652 void
arm_init_builtins(void)2653 arm_init_builtins (void)
2654 {
2655   if (TARGET_REALLY_IWMMXT)
2656     arm_init_iwmmxt_builtins ();
2657 
2658   /* This creates the arm_simd_floatHF_type_node so must come before
2659      arm_init_neon_builtins which uses it.  */
2660   arm_init_fp16_builtins ();
2661 
2662   arm_init_bf16_types ();
2663 
2664   if (TARGET_MAYBE_HARD_FLOAT)
2665     {
2666       tree lane_check_fpr = build_function_type_list (void_type_node,
2667 						      intSI_type_node,
2668 						      intSI_type_node,
2669 						      NULL);
2670       arm_builtin_decls[ARM_BUILTIN_SIMD_LANE_CHECK]
2671       = add_builtin_function ("__builtin_arm_lane_check", lane_check_fpr,
2672 			      ARM_BUILTIN_SIMD_LANE_CHECK, BUILT_IN_MD,
2673 			      NULL, NULL_TREE);
2674       if (TARGET_HAVE_MVE)
2675 	arm_init_mve_builtins ();
2676       else
2677 	arm_init_neon_builtins ();
2678       arm_init_vfp_builtins ();
2679       arm_init_crypto_builtins ();
2680     }
2681 
2682   if (TARGET_CDE)
2683     arm_init_cde_builtins ();
2684 
2685   arm_init_acle_builtins ();
2686 
2687   if (TARGET_MAYBE_HARD_FLOAT)
2688     {
2689       tree ftype_set_fpscr
2690 	= build_function_type_list (void_type_node, unsigned_type_node, NULL);
2691       tree ftype_get_fpscr
2692 	= build_function_type_list (unsigned_type_node, NULL);
2693 
2694       arm_builtin_decls[ARM_BUILTIN_GET_FPSCR]
2695 	= add_builtin_function ("__builtin_arm_get_fpscr", ftype_get_fpscr,
2696 				ARM_BUILTIN_GET_FPSCR, BUILT_IN_MD, NULL, NULL_TREE);
2697       arm_builtin_decls[ARM_BUILTIN_SET_FPSCR]
2698 	= add_builtin_function ("__builtin_arm_set_fpscr", ftype_set_fpscr,
2699 				ARM_BUILTIN_SET_FPSCR, BUILT_IN_MD, NULL, NULL_TREE);
2700     }
2701 
2702   if (use_cmse)
2703     {
2704       tree ftype_cmse_nonsecure_caller
2705 	= build_function_type_list (unsigned_type_node, NULL);
2706       arm_builtin_decls[ARM_BUILTIN_CMSE_NONSECURE_CALLER]
2707 	= add_builtin_function ("__builtin_arm_cmse_nonsecure_caller",
2708 				ftype_cmse_nonsecure_caller,
2709 				ARM_BUILTIN_CMSE_NONSECURE_CALLER, BUILT_IN_MD,
2710 				NULL, NULL_TREE);
2711     }
2712 }
2713 
2714 /* Return the ARM builtin for CODE.  */
2715 
2716 tree
arm_builtin_decl(unsigned code,bool initialize_p ATTRIBUTE_UNUSED)2717 arm_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2718 {
2719   if (code >= ARM_BUILTIN_MAX)
2720     return error_mark_node;
2721 
2722   return arm_builtin_decls[code];
2723 }
2724 
2725 /* Errors in the source file can cause expand_expr to return const0_rtx
2726    where we expect a vector.  To avoid crashing, use one of the vector
2727    clear instructions.  */
2728 
2729 static rtx
safe_vector_operand(rtx x,machine_mode mode)2730 safe_vector_operand (rtx x, machine_mode mode)
2731 {
2732   if (x != const0_rtx)
2733     return x;
2734   x = gen_reg_rtx (mode);
2735 
2736   emit_insn (gen_iwmmxt_clrdi (mode == DImode ? x
2737 			       : gen_rtx_SUBREG (DImode, x, 0)));
2738   return x;
2739 }
2740 
2741 /* Function to expand ternary builtins.  */
2742 static rtx
arm_expand_ternop_builtin(enum insn_code icode,tree exp,rtx target)2743 arm_expand_ternop_builtin (enum insn_code icode,
2744                            tree exp, rtx target)
2745 {
2746   rtx pat;
2747   tree arg0 = CALL_EXPR_ARG (exp, 0);
2748   tree arg1 = CALL_EXPR_ARG (exp, 1);
2749   tree arg2 = CALL_EXPR_ARG (exp, 2);
2750 
2751   rtx op0 = expand_normal (arg0);
2752   rtx op1 = expand_normal (arg1);
2753   rtx op2 = expand_normal (arg2);
2754 
2755   machine_mode tmode = insn_data[icode].operand[0].mode;
2756   machine_mode mode0 = insn_data[icode].operand[1].mode;
2757   machine_mode mode1 = insn_data[icode].operand[2].mode;
2758   machine_mode mode2 = insn_data[icode].operand[3].mode;
2759 
2760   if (VECTOR_MODE_P (mode0))
2761     op0 = safe_vector_operand (op0, mode0);
2762   if (VECTOR_MODE_P (mode1))
2763     op1 = safe_vector_operand (op1, mode1);
2764   if (VECTOR_MODE_P (mode2))
2765     op2 = safe_vector_operand (op2, mode2);
2766 
2767   if (! target
2768       || GET_MODE (target) != tmode
2769       || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2770     target = gen_reg_rtx (tmode);
2771 
2772   gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
2773 	      && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode)
2774 	      && (GET_MODE (op2) == mode2 || GET_MODE (op2) == VOIDmode));
2775 
2776   if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2777     op0 = copy_to_mode_reg (mode0, op0);
2778   if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
2779     op1 = copy_to_mode_reg (mode1, op1);
2780   if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
2781     op2 = copy_to_mode_reg (mode2, op2);
2782 
2783   pat = GEN_FCN (icode) (target, op0, op1, op2);
2784   if (! pat)
2785     return 0;
2786   emit_insn (pat);
2787   return target;
2788 }
2789 
2790 /* Subroutine of arm_expand_builtin to take care of binop insns.  */
2791 
2792 static rtx
arm_expand_binop_builtin(enum insn_code icode,tree exp,rtx target)2793 arm_expand_binop_builtin (enum insn_code icode,
2794 			  tree exp, rtx target)
2795 {
2796   rtx pat;
2797   tree arg0 = CALL_EXPR_ARG (exp, 0);
2798   tree arg1 = CALL_EXPR_ARG (exp, 1);
2799   rtx op0 = expand_normal (arg0);
2800   rtx op1 = expand_normal (arg1);
2801   machine_mode tmode = insn_data[icode].operand[0].mode;
2802   machine_mode mode0 = insn_data[icode].operand[1].mode;
2803   machine_mode mode1 = insn_data[icode].operand[2].mode;
2804 
2805   if (VECTOR_MODE_P (mode0))
2806     op0 = safe_vector_operand (op0, mode0);
2807   if (VECTOR_MODE_P (mode1))
2808     op1 = safe_vector_operand (op1, mode1);
2809 
2810   if (! target
2811       || GET_MODE (target) != tmode
2812       || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2813     target = gen_reg_rtx (tmode);
2814 
2815   gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
2816 	      && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));
2817 
2818   if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2819     op0 = copy_to_mode_reg (mode0, op0);
2820   if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
2821     op1 = copy_to_mode_reg (mode1, op1);
2822 
2823   pat = GEN_FCN (icode) (target, op0, op1);
2824   if (! pat)
2825     return 0;
2826   emit_insn (pat);
2827   return target;
2828 }
2829 
2830 /* Subroutine of arm_expand_builtin to take care of unop insns.  */
2831 
2832 static rtx
arm_expand_unop_builtin(enum insn_code icode,tree exp,rtx target,int do_load)2833 arm_expand_unop_builtin (enum insn_code icode,
2834 			 tree exp, rtx target, int do_load)
2835 {
2836   rtx pat;
2837   tree arg0 = CALL_EXPR_ARG (exp, 0);
2838   rtx op0 = expand_normal (arg0);
2839   machine_mode tmode = insn_data[icode].operand[0].mode;
2840   machine_mode mode0 = insn_data[icode].operand[1].mode;
2841 
2842   if (! target
2843       || GET_MODE (target) != tmode
2844       || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2845     target = gen_reg_rtx (tmode);
2846   if (do_load)
2847     op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
2848   else
2849     {
2850       if (VECTOR_MODE_P (mode0))
2851 	op0 = safe_vector_operand (op0, mode0);
2852 
2853       if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2854 	op0 = copy_to_mode_reg (mode0, op0);
2855     }
2856 
2857   pat = GEN_FCN (icode) (target, op0);
2858 
2859   if (! pat)
2860     return 0;
2861   emit_insn (pat);
2862   return target;
2863 }
2864 
2865 typedef enum {
2866   ARG_BUILTIN_COPY_TO_REG,
2867   ARG_BUILTIN_CONSTANT,
2868   ARG_BUILTIN_LANE_INDEX,
2869   ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX,
2870   ARG_BUILTIN_LANE_PAIR_INDEX,
2871   ARG_BUILTIN_LANE_QUADTUP_INDEX,
2872   ARG_BUILTIN_NEON_MEMORY,
2873   ARG_BUILTIN_MEMORY,
2874   ARG_BUILTIN_STOP
2875 } builtin_arg;
2876 
2877 
2878 /* EXP is a pointer argument to a Neon load or store intrinsic.  Derive
2879    and return an expression for the accessed memory.
2880 
2881    The intrinsic function operates on a block of registers that has
2882    mode REG_MODE.  This block contains vectors of type TYPE_MODE.  The
2883    function references the memory at EXP of type TYPE and in mode
2884    MEM_MODE; this mode may be BLKmode if no more suitable mode is
2885    available.  */
2886 
2887 static tree
neon_dereference_pointer(tree exp,tree type,machine_mode mem_mode,machine_mode reg_mode,machine_mode vector_mode)2888 neon_dereference_pointer (tree exp, tree type, machine_mode mem_mode,
2889 			  machine_mode reg_mode,
2890 			  machine_mode vector_mode)
2891 {
2892   HOST_WIDE_INT reg_size, vector_size, nvectors, nelems;
2893   tree elem_type, upper_bound, array_type;
2894 
2895   /* Work out the size of the register block in bytes.  */
2896   reg_size = GET_MODE_SIZE (reg_mode);
2897 
2898   /* Work out the size of each vector in bytes.  */
2899   vector_size = GET_MODE_SIZE (vector_mode);
2900 
2901   /* Work out how many vectors there are.  */
2902   gcc_assert (reg_size % vector_size == 0);
2903   nvectors = reg_size / vector_size;
2904 
2905   /* Work out the type of each element.  */
2906   gcc_assert (POINTER_TYPE_P (type));
2907   elem_type = TREE_TYPE (type);
2908 
2909   /* Work out how many elements are being loaded or stored.
2910      MEM_MODE == REG_MODE implies a one-to-one mapping between register
2911      and memory elements; anything else implies a lane load or store.  */
2912   if (mem_mode == reg_mode)
2913     nelems = vector_size * nvectors / int_size_in_bytes (elem_type);
2914   else
2915     nelems = nvectors;
2916 
2917   /* Create a type that describes the full access.  */
2918   upper_bound = build_int_cst (size_type_node, nelems - 1);
2919   array_type = build_array_type (elem_type, build_index_type (upper_bound));
2920 
2921   /* Dereference EXP using that type.  */
2922   return fold_build2 (MEM_REF, array_type, exp,
2923 		      build_int_cst (build_pointer_type (array_type), 0));
2924 }
2925 
2926 /* EXP is a pointer argument to a vector scatter store intrinsics.
2927 
2928    Consider the following example:
2929 	VSTRW<v>.<dt> Qd, [Qm{, #+/-<imm>}]!
2930    When <Qm> used as the base register for the target address,
2931    this function is used to derive and return an expression for the
2932    accessed memory.
2933 
2934    The intrinsic function operates on a block of registers that has mode
2935    REG_MODE.  This block contains vectors of type TYPE_MODE.  The function
2936    references the memory at EXP of type TYPE and in mode MEM_MODE.  This
2937    mode may be BLKmode if no more suitable mode is available.  */
2938 
2939 static tree
mve_dereference_pointer(tree exp,tree type,machine_mode reg_mode,machine_mode vector_mode)2940 mve_dereference_pointer (tree exp, tree type, machine_mode reg_mode,
2941 			 machine_mode vector_mode)
2942 {
2943   HOST_WIDE_INT reg_size, vector_size, nelems;
2944   tree elem_type, upper_bound, array_type;
2945 
2946   /* Work out the size of each vector in bytes.  */
2947   vector_size = GET_MODE_SIZE (vector_mode);
2948 
2949   /* Work out the size of the register block in bytes.  */
2950   reg_size = GET_MODE_SIZE (reg_mode);
2951 
2952   /* Work out the type of each element.  */
2953   gcc_assert (POINTER_TYPE_P (type));
2954   elem_type = TREE_TYPE (type);
2955 
2956   nelems = reg_size / vector_size;
2957 
2958   /* Create a type that describes the full access.  */
2959   upper_bound = build_int_cst (size_type_node, nelems - 1);
2960   array_type = build_array_type (elem_type, build_index_type (upper_bound));
2961 
2962   /* Dereference EXP using that type.  */
2963   return fold_build2 (MEM_REF, array_type, exp,
2964 		      build_int_cst (build_pointer_type (array_type), 0));
2965 }
2966 
2967 /* Expand a builtin.  */
2968 static rtx
arm_expand_builtin_args(rtx target,machine_mode map_mode,int fcode,int icode,int have_retval,tree exp,builtin_arg * args)2969 arm_expand_builtin_args (rtx target, machine_mode map_mode, int fcode,
2970 		      int icode, int have_retval, tree exp,
2971 		      builtin_arg *args)
2972 {
2973   rtx pat;
2974   tree arg[SIMD_MAX_BUILTIN_ARGS];
2975   rtx op[SIMD_MAX_BUILTIN_ARGS];
2976   machine_mode tmode = insn_data[icode].operand[0].mode;
2977   machine_mode mode[SIMD_MAX_BUILTIN_ARGS];
2978   tree formals;
2979   int argc = 0;
2980   rtx_insn * insn;
2981 
2982   if (have_retval
2983       && (!target
2984 	  || GET_MODE (target) != tmode
2985 	  || !(*insn_data[icode].operand[0].predicate) (target, tmode)))
2986     target = gen_reg_rtx (tmode);
2987 
2988   formals = TYPE_ARG_TYPES (TREE_TYPE (arm_builtin_decls[fcode]));
2989 
2990   for (;;)
2991     {
2992       builtin_arg thisarg = args[argc];
2993 
2994       if (thisarg == ARG_BUILTIN_STOP)
2995 	break;
2996       else
2997 	{
2998 	  int opno = argc + have_retval;
2999 	  arg[argc] = CALL_EXPR_ARG (exp, argc);
3000 	  mode[argc] = insn_data[icode].operand[opno].mode;
3001 	  if (thisarg == ARG_BUILTIN_NEON_MEMORY)
3002             {
3003               machine_mode other_mode
3004 		= insn_data[icode].operand[1 - opno].mode;
3005 	      if (TARGET_HAVE_MVE && mode[argc] != other_mode)
3006 		{
3007 		  arg[argc] = mve_dereference_pointer (arg[argc],
3008 						    TREE_VALUE (formals),
3009 						    other_mode, map_mode);
3010 		}
3011 	      else
3012 		arg[argc] = neon_dereference_pointer (arg[argc],
3013 						      TREE_VALUE (formals),
3014 						      mode[argc], other_mode,
3015 						      map_mode);
3016             }
3017 
3018 	  /* Use EXPAND_MEMORY for ARG_BUILTIN_MEMORY and
3019 	     ARG_BUILTIN_NEON_MEMORY to ensure a MEM_P be returned.  */
3020 	  op[argc] = expand_expr (arg[argc], NULL_RTX, VOIDmode,
3021 				  ((thisarg == ARG_BUILTIN_MEMORY
3022 				    || thisarg == ARG_BUILTIN_NEON_MEMORY)
3023 				   ? EXPAND_MEMORY : EXPAND_NORMAL));
3024 
3025 	  switch (thisarg)
3026 	    {
3027 	    case ARG_BUILTIN_MEMORY:
3028 	    case ARG_BUILTIN_COPY_TO_REG:
3029 	      if (POINTER_TYPE_P (TREE_TYPE (arg[argc])))
3030 		op[argc] = convert_memory_address (Pmode, op[argc]);
3031 	      /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
3032 	      if (!(*insn_data[icode].operand[opno].predicate)
3033 		  (op[argc], mode[argc]))
3034 		op[argc] = copy_to_mode_reg (mode[argc], op[argc]);
3035 	      break;
3036 
3037 	    case ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX:
3038 	      gcc_assert (argc > 1);
3039 	      if (CONST_INT_P (op[argc]))
3040 		{
3041 		  neon_lane_bounds (op[argc], 0,
3042 				    GET_MODE_NUNITS (map_mode), exp);
3043 		  /* Keep to GCC-vector-extension lane indices in the RTL.  */
3044 		  op[argc] =
3045 		    GEN_INT (NEON_ENDIAN_LANE_N (map_mode, INTVAL (op[argc])));
3046 		}
3047 	      goto constant_arg;
3048 
3049 	    case ARG_BUILTIN_LANE_INDEX:
3050 	      /* Previous argument must be a vector, which this indexes.  */
3051 	      gcc_assert (argc > 0);
3052 	      if (CONST_INT_P (op[argc]))
3053 		{
3054 		  machine_mode vmode = mode[argc - 1];
3055 		  neon_lane_bounds (op[argc], 0, GET_MODE_NUNITS (vmode), exp);
3056 		}
3057 	      /* If the lane index isn't a constant then error out.  */
3058 	      goto constant_arg;
3059 
3060 	    case ARG_BUILTIN_LANE_PAIR_INDEX:
3061 	      /* Previous argument must be a vector, which this indexes. The
3062 		 indexing will always select i and i+1 out of the vector, which
3063 		 puts a limit on i.  */
3064 	      gcc_assert (argc > 0);
3065 	      if (CONST_INT_P (op[argc]))
3066 		{
3067 		  machine_mode vmode = mode[argc - 1];
3068 		  neon_lane_bounds (op[argc], 0,
3069 				    GET_MODE_NUNITS (vmode) / 2, exp);
3070 		}
3071 	      /* If the lane index isn't a constant then error out.  */
3072 	      goto constant_arg;
3073 
3074 	    case ARG_BUILTIN_LANE_QUADTUP_INDEX:
3075 	      /* Previous argument must be a vector, which this indexes.  */
3076 	      gcc_assert (argc > 0);
3077 	      if (CONST_INT_P (op[argc]))
3078 		{
3079 		  machine_mode vmode = mode[argc - 1];
3080 		  neon_lane_bounds (op[argc], 0,
3081 				    GET_MODE_NUNITS (vmode) / 4, exp);
3082 		}
3083 	      /* If the lane index isn't a constant then error out.  */
3084 	      goto constant_arg;
3085 
3086 	    case ARG_BUILTIN_CONSTANT:
3087 constant_arg:
3088 	      if (!(*insn_data[icode].operand[opno].predicate)
3089 		  (op[argc], mode[argc]))
3090 		{
3091 		  if (IN_RANGE (fcode, ARM_BUILTIN_CDE_PATTERN_START,
3092 				ARM_BUILTIN_CDE_PATTERN_END))
3093 		    {
3094 		      if (argc == 0)
3095 			{
3096 			  unsigned int cp_bit = (CONST_INT_P (op[argc])
3097 						 ? UINTVAL (op[argc]) : -1);
3098 			  if (IN_RANGE (cp_bit, 0, ARM_CDE_CONST_COPROC))
3099 			    error ("%Kcoprocessor %d is not enabled "
3100 				   "with +cdecp%d", exp, cp_bit, cp_bit);
3101 			  else
3102 			    error ("%Kcoproc must be a constant immediate in "
3103 				   "range [0-%d] enabled with +cdecp<N>", exp,
3104 				   ARM_CDE_CONST_COPROC);
3105 			}
3106 		      else
3107 			/* Here we mention the builtin name to follow the same
3108 			   format that the C/C++ frontends use for referencing
3109 			   a given argument index.  */
3110 			error ("%Kargument %d to %qE must be a constant immediate "
3111 			       "in range [0-%d]", exp, argc + 1,
3112 			       arm_builtin_decls[fcode],
3113 			       cde_builtin_data[fcode -
3114 			       ARM_BUILTIN_CDE_PATTERN_START].imm_max);
3115 		    }
3116 		  else
3117 		    error ("%Kargument %d must be a constant immediate",
3118 			   exp, argc + 1);
3119 		  /* We have failed to expand the pattern, and are safely
3120 		     in to invalid code.  But the mid-end will still try to
3121 		     build an assignment for this node while it expands,
3122 		     before stopping for the error, just pass it back
3123 		     TARGET to ensure a valid assignment.  */
3124 		  return target;
3125 		}
3126 	      break;
3127 
3128 	      case ARG_BUILTIN_NEON_MEMORY:
3129 	      /* Check if expand failed.  */
3130 	      if (op[argc] == const0_rtx)
3131 		return 0;
3132 	      gcc_assert (MEM_P (op[argc]));
3133 	      PUT_MODE (op[argc], mode[argc]);
3134 	      /* ??? arm_neon.h uses the same built-in functions for signed
3135 		 and unsigned accesses, casting where necessary.  This isn't
3136 		 alias safe.  */
3137 	      set_mem_alias_set (op[argc], 0);
3138 	      if (!(*insn_data[icode].operand[opno].predicate)
3139                    (op[argc], mode[argc]))
3140 		op[argc] = (replace_equiv_address
3141 			    (op[argc],
3142 			     copy_to_mode_reg (Pmode, XEXP (op[argc], 0))));
3143               break;
3144 
3145 	    case ARG_BUILTIN_STOP:
3146 	      gcc_unreachable ();
3147 	    }
3148 
3149 	  argc++;
3150 	}
3151     }
3152 
3153   if (have_retval)
3154     switch (argc)
3155       {
3156       case 0:
3157 	pat = GEN_FCN (icode) (target);
3158 	break;
3159       case 1:
3160 	pat = GEN_FCN (icode) (target, op[0]);
3161 	break;
3162 
3163       case 2:
3164 	pat = GEN_FCN (icode) (target, op[0], op[1]);
3165 	break;
3166 
3167       case 3:
3168 	pat = GEN_FCN (icode) (target, op[0], op[1], op[2]);
3169 	break;
3170 
3171       case 4:
3172 	pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3]);
3173 	break;
3174 
3175       case 5:
3176 	pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4]);
3177 	break;
3178 
3179       case 6:
3180 	pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4], op[5]);
3181 	break;
3182 
3183       default:
3184 	gcc_unreachable ();
3185       }
3186   else
3187     switch (argc)
3188       {
3189       case 1:
3190 	pat = GEN_FCN (icode) (op[0]);
3191 	break;
3192 
3193       case 2:
3194 	pat = GEN_FCN (icode) (op[0], op[1]);
3195 	break;
3196 
3197       case 3:
3198 	pat = GEN_FCN (icode) (op[0], op[1], op[2]);
3199 	break;
3200 
3201       case 4:
3202 	pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
3203 	break;
3204 
3205       case 5:
3206 	pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
3207 	break;
3208 
3209       case 6:
3210 	pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
3211 	break;
3212 
3213       default:
3214 	gcc_unreachable ();
3215       }
3216 
3217   if (!pat)
3218     return 0;
3219 
3220   /* Check whether our current target implements the pattern chosen for this
3221      builtin and error out if not.  */
3222   start_sequence ();
3223   emit_insn (pat);
3224   insn = get_insns ();
3225   end_sequence ();
3226 
3227   if (recog_memoized (insn) < 0)
3228     error ("this builtin is not supported for this target");
3229   else
3230     emit_insn (insn);
3231 
3232   return target;
3233 }
3234 
3235 /* Expand a builtin.  These builtins are "special" because they don't have
3236    symbolic constants defined per-instruction or per instruction-variant.
3237    Instead, the required info is looked up in the ARM_BUILTIN_DATA record that
3238    is passed into the function.  */
3239 
3240 static rtx
arm_expand_builtin_1(int fcode,tree exp,rtx target,arm_builtin_datum * d)3241 arm_expand_builtin_1 (int fcode, tree exp, rtx target,
3242 			   arm_builtin_datum *d)
3243 {
3244   enum insn_code icode = d->code;
3245   builtin_arg args[SIMD_MAX_BUILTIN_ARGS + 1];
3246   int num_args = insn_data[d->code].n_operands;
3247   int is_void = 0;
3248   int k;
3249   bool neon = false;
3250   bool mve = false;
3251 
3252   if (IN_RANGE (fcode, ARM_BUILTIN_VFP_BASE, ARM_BUILTIN_ACLE_BASE - 1))
3253     neon = true;
3254 
3255   if (IN_RANGE (fcode, ARM_BUILTIN_MVE_BASE, ARM_BUILTIN_MAX - 1))
3256     mve = true;
3257 
3258   is_void = !!(d->qualifiers[0] & qualifier_void);
3259 
3260   num_args += is_void;
3261 
3262   for (k = 1; k < num_args; k++)
3263     {
3264       /* We have four arrays of data, each indexed in a different fashion.
3265 	 qualifiers - element 0 always describes the function return type.
3266 	 operands - element 0 is either the operand for return value (if
3267 	 the function has a non-void return type) or the operand for the
3268 	 first argument.
3269 	 expr_args - element 0 always holds the first argument.
3270 	 args - element 0 is always used for the return type.  */
3271       int qualifiers_k = k;
3272       int operands_k = k - is_void;
3273       int expr_args_k = k - 1;
3274 
3275       if (d->qualifiers[qualifiers_k] & qualifier_lane_index)
3276 	args[k] = ARG_BUILTIN_LANE_INDEX;
3277       else if (d->qualifiers[qualifiers_k] & qualifier_lane_pair_index)
3278 	args[k] = ARG_BUILTIN_LANE_PAIR_INDEX;
3279       else if (d->qualifiers[qualifiers_k] & qualifier_lane_quadtup_index)
3280 	args[k] = ARG_BUILTIN_LANE_QUADTUP_INDEX;
3281       else if (d->qualifiers[qualifiers_k] & qualifier_struct_load_store_lane_index)
3282 	args[k] = ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX;
3283       else if (d->qualifiers[qualifiers_k] & qualifier_immediate)
3284 	args[k] = ARG_BUILTIN_CONSTANT;
3285       else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
3286 	{
3287 	  rtx arg
3288 	    = expand_normal (CALL_EXPR_ARG (exp,
3289 					    (expr_args_k)));
3290 	  /* Handle constants only if the predicate allows it.  */
3291 	  bool op_const_int_p =
3292 	    (CONST_INT_P (arg)
3293 	     && (*insn_data[icode].operand[operands_k].predicate)
3294 	     (arg, insn_data[icode].operand[operands_k].mode));
3295 	  args[k] = op_const_int_p ? ARG_BUILTIN_CONSTANT : ARG_BUILTIN_COPY_TO_REG;
3296 	}
3297       else if (d->qualifiers[qualifiers_k] & qualifier_pointer)
3298 	{
3299 	  if (neon || mve)
3300 	    args[k] = ARG_BUILTIN_NEON_MEMORY;
3301 	  else
3302 	    args[k] = ARG_BUILTIN_MEMORY;
3303 	}
3304       else
3305 	args[k] = ARG_BUILTIN_COPY_TO_REG;
3306     }
3307   args[k] = ARG_BUILTIN_STOP;
3308 
3309   /* The interface to arm_expand_builtin_args expects a 0 if
3310      the function is void, and a 1 if it is not.  */
3311   return arm_expand_builtin_args
3312     (target, d->mode, fcode, icode, !is_void, exp,
3313      &args[1]);
3314 }
3315 
3316 /* Expand an ACLE builtin, i.e. those registered only if their respective
3317    target constraints are met.  This check happens within
3318    arm_expand_builtin_args.  */
3319 
3320 static rtx
arm_expand_acle_builtin(int fcode,tree exp,rtx target)3321 arm_expand_acle_builtin (int fcode, tree exp, rtx target)
3322 {
3323   if (fcode == ARM_BUILTIN_SAT_IMM_CHECK)
3324     {
3325       /* Check the saturation immediate bounds.  */
3326 
3327       rtx min_sat = expand_normal (CALL_EXPR_ARG (exp, 1));
3328       rtx max_sat = expand_normal (CALL_EXPR_ARG (exp, 2));
3329       gcc_assert (CONST_INT_P (min_sat));
3330       gcc_assert (CONST_INT_P (max_sat));
3331       rtx sat_imm = expand_normal (CALL_EXPR_ARG (exp, 0));
3332       if (CONST_INT_P (sat_imm))
3333 	{
3334 	  if (!IN_RANGE (sat_imm, min_sat, max_sat))
3335 	    error ("%Ksaturation bit range must be in the range [%wd, %wd]",
3336 		   exp, UINTVAL (min_sat), UINTVAL (max_sat));
3337 	}
3338       else
3339 	error ("%Ksaturation bit range must be a constant immediate", exp);
3340       /* Don't generate any RTL.  */
3341       return const0_rtx;
3342     }
3343 
3344   gcc_assert (fcode != ARM_BUILTIN_CDE_BASE);
3345   arm_builtin_datum *d
3346     = (fcode < ARM_BUILTIN_CDE_BASE)
3347       ? &acle_builtin_data[fcode - ARM_BUILTIN_ACLE_PATTERN_START]
3348       : &cde_builtin_data[fcode - ARM_BUILTIN_CDE_PATTERN_START].base;
3349 
3350   return arm_expand_builtin_1 (fcode, exp, target, d);
3351 }
3352 
3353 /* Expand an MVE builtin, i.e. those registered only if their respective target
3354    constraints are met.  This check happens within arm_expand_builtin.  */
3355 
3356 static rtx
arm_expand_mve_builtin(int fcode,tree exp,rtx target)3357 arm_expand_mve_builtin (int fcode, tree exp, rtx target)
3358 {
3359   if (fcode >= ARM_BUILTIN_MVE_BASE && !TARGET_HAVE_MVE)
3360   {
3361     fatal_error (input_location,
3362 		"You must enable MVE instructions"
3363 		" to use these intrinsics");
3364     return const0_rtx;
3365   }
3366 
3367   arm_builtin_datum *d
3368     = &mve_builtin_data[fcode - ARM_BUILTIN_MVE_PATTERN_START];
3369 
3370   return arm_expand_builtin_1 (fcode, exp, target, d);
3371 }
3372 
3373 /* Expand a Neon builtin, i.e. those registered only if TARGET_NEON holds.
3374    Most of these are "special" because they don't have symbolic
3375    constants defined per-instruction or per instruction-variant.  Instead, the
3376    required info is looked up in the table neon_builtin_data.  */
3377 
3378 static rtx
arm_expand_neon_builtin(int fcode,tree exp,rtx target)3379 arm_expand_neon_builtin (int fcode, tree exp, rtx target)
3380 {
3381   if (fcode >= ARM_BUILTIN_NEON_BASE && ! TARGET_NEON)
3382     {
3383       fatal_error (input_location,
3384 		   "You must enable NEON instructions"
3385 		   " (e.g. %<-mfloat-abi=softfp%> %<-mfpu=neon%>)"
3386 		   " to use these intrinsics.");
3387       return const0_rtx;
3388     }
3389 
3390   arm_builtin_datum *d
3391     = &neon_builtin_data[fcode - ARM_BUILTIN_NEON_PATTERN_START];
3392 
3393   return arm_expand_builtin_1 (fcode, exp, target, d);
3394 }
3395 
3396 /* Expand a VFP builtin.  These builtins are treated like
3397    neon builtins except that the data is looked up in table
3398    VFP_BUILTIN_DATA.  */
3399 
3400 static rtx
arm_expand_vfp_builtin(int fcode,tree exp,rtx target)3401 arm_expand_vfp_builtin (int fcode, tree exp, rtx target)
3402 {
3403   if (fcode >= ARM_BUILTIN_VFP_BASE && ! TARGET_HARD_FLOAT)
3404     {
3405       fatal_error (input_location,
3406 		   "You must enable VFP instructions"
3407 		   " to use these intrinsics.");
3408       return const0_rtx;
3409     }
3410 
3411   arm_builtin_datum *d
3412     = &vfp_builtin_data[fcode - ARM_BUILTIN_VFP_PATTERN_START];
3413 
3414   return arm_expand_builtin_1 (fcode, exp, target, d);
3415 }
3416 
3417 /* Expand an expression EXP that calls a built-in function,
3418    with result going to TARGET if that's convenient
3419    (and in mode MODE if that's convenient).
3420    SUBTARGET may be used as the target for computing one of EXP's operands.
3421    IGNORE is nonzero if the value is to be ignored.  */
3422 
3423 rtx
arm_expand_builtin(tree exp,rtx target,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)3424 arm_expand_builtin (tree exp,
3425 		    rtx target,
3426 		    rtx subtarget ATTRIBUTE_UNUSED,
3427 		    machine_mode mode ATTRIBUTE_UNUSED,
3428 		    int ignore ATTRIBUTE_UNUSED)
3429 {
3430   const struct builtin_description * d;
3431   enum insn_code    icode;
3432   tree              fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3433   tree              arg0;
3434   tree              arg1;
3435   tree              arg2;
3436   rtx               op0;
3437   rtx               op1;
3438   rtx               op2;
3439   rtx               pat;
3440   unsigned int      fcode = DECL_MD_FUNCTION_CODE (fndecl);
3441   size_t            i;
3442   machine_mode tmode;
3443   machine_mode mode0;
3444   machine_mode mode1;
3445   machine_mode mode2;
3446   int opint;
3447   int selector;
3448   int mask;
3449   int imm;
3450 
3451   if (fcode == ARM_BUILTIN_SIMD_LANE_CHECK)
3452     {
3453       /* Builtin is only to check bounds of the lane passed to some intrinsics
3454 	 that are implemented with gcc vector extensions in arm_neon.h.  */
3455 
3456       tree nlanes = CALL_EXPR_ARG (exp, 0);
3457       gcc_assert (TREE_CODE (nlanes) == INTEGER_CST);
3458       rtx lane_idx = expand_normal (CALL_EXPR_ARG (exp, 1));
3459       if (CONST_INT_P (lane_idx))
3460 	neon_lane_bounds (lane_idx, 0, TREE_INT_CST_LOW (nlanes), exp);
3461       else
3462 	error ("%Klane index must be a constant immediate", exp);
3463       /* Don't generate any RTL.  */
3464       return const0_rtx;
3465     }
3466   if (fcode >= ARM_BUILTIN_MVE_BASE)
3467     return arm_expand_mve_builtin (fcode, exp, target);
3468 
3469   if (fcode >= ARM_BUILTIN_ACLE_BASE)
3470     return arm_expand_acle_builtin (fcode, exp, target);
3471 
3472   if (fcode >= ARM_BUILTIN_NEON_BASE)
3473     return arm_expand_neon_builtin (fcode, exp, target);
3474 
3475   if (fcode >= ARM_BUILTIN_VFP_BASE)
3476     return arm_expand_vfp_builtin (fcode, exp, target);
3477 
3478   /* Check in the context of the function making the call whether the
3479      builtin is supported.  */
3480   if (fcode >= ARM_BUILTIN_CRYPTO_BASE
3481       && (!TARGET_CRYPTO || !TARGET_HARD_FLOAT))
3482     {
3483       fatal_error (input_location,
3484 		   "You must enable crypto instructions"
3485 		   " (e.g. include %<-mfloat-abi=softfp%> "
3486 		   "%<-mfpu=crypto-neon%>)"
3487 		   " to use these intrinsics.");
3488       return const0_rtx;
3489     }
3490 
3491   switch (fcode)
3492     {
3493     case ARM_BUILTIN_GET_FPSCR_NZCVQC:
3494     case ARM_BUILTIN_SET_FPSCR_NZCVQC:
3495       if (fcode == ARM_BUILTIN_GET_FPSCR_NZCVQC)
3496 	{
3497 	  icode = CODE_FOR_get_fpscr_nzcvqc;
3498 	  target = gen_reg_rtx (SImode);
3499 	  emit_insn (GEN_FCN (icode) (target));
3500 	  return target;
3501 	}
3502       else
3503 	{
3504 	  icode = CODE_FOR_set_fpscr_nzcvqc;
3505 	  op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
3506 	  emit_insn (GEN_FCN (icode) (force_reg (SImode, op0)));
3507 	  return NULL_RTX;
3508 	}
3509 
3510     case ARM_BUILTIN_GET_FPSCR:
3511     case ARM_BUILTIN_SET_FPSCR:
3512       if (fcode == ARM_BUILTIN_GET_FPSCR)
3513 	{
3514 	  icode = CODE_FOR_get_fpscr;
3515 	  target = gen_reg_rtx (SImode);
3516 	  pat = GEN_FCN (icode) (target);
3517 	}
3518       else
3519 	{
3520 	  target = NULL_RTX;
3521 	  icode = CODE_FOR_set_fpscr;
3522 	  arg0 = CALL_EXPR_ARG (exp, 0);
3523 	  op0 = expand_normal (arg0);
3524 	  pat = GEN_FCN (icode) (force_reg (SImode, op0));
3525 	}
3526       emit_insn (pat);
3527       return target;
3528 
3529     case ARM_BUILTIN_CMSE_NONSECURE_CALLER:
3530       target = gen_reg_rtx (SImode);
3531       op0 = arm_return_addr (0, NULL_RTX);
3532       emit_insn (gen_andsi3 (target, op0, const1_rtx));
3533       op1 = gen_rtx_EQ (SImode, target, const0_rtx);
3534       emit_insn (gen_cstoresi4 (target, op1, target, const0_rtx));
3535       return target;
3536 
3537     case ARM_BUILTIN_TEXTRMSB:
3538     case ARM_BUILTIN_TEXTRMUB:
3539     case ARM_BUILTIN_TEXTRMSH:
3540     case ARM_BUILTIN_TEXTRMUH:
3541     case ARM_BUILTIN_TEXTRMSW:
3542     case ARM_BUILTIN_TEXTRMUW:
3543       icode = (fcode == ARM_BUILTIN_TEXTRMSB ? CODE_FOR_iwmmxt_textrmsb
3544 	       : fcode == ARM_BUILTIN_TEXTRMUB ? CODE_FOR_iwmmxt_textrmub
3545 	       : fcode == ARM_BUILTIN_TEXTRMSH ? CODE_FOR_iwmmxt_textrmsh
3546 	       : fcode == ARM_BUILTIN_TEXTRMUH ? CODE_FOR_iwmmxt_textrmuh
3547 	       : CODE_FOR_iwmmxt_textrmw);
3548 
3549       arg0 = CALL_EXPR_ARG (exp, 0);
3550       arg1 = CALL_EXPR_ARG (exp, 1);
3551       op0 = expand_normal (arg0);
3552       op1 = expand_normal (arg1);
3553       tmode = insn_data[icode].operand[0].mode;
3554       mode0 = insn_data[icode].operand[1].mode;
3555       mode1 = insn_data[icode].operand[2].mode;
3556 
3557       if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3558 	op0 = copy_to_mode_reg (mode0, op0);
3559       if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3560 	{
3561 	  /* @@@ better error message */
3562 	  error ("selector must be an immediate");
3563 	  return gen_reg_rtx (tmode);
3564 	}
3565 
3566       opint = INTVAL (op1);
3567       if (fcode == ARM_BUILTIN_TEXTRMSB || fcode == ARM_BUILTIN_TEXTRMUB)
3568 	{
3569 	  if (opint > 7 || opint < 0)
3570 	    error ("the range of selector should be in 0 to 7");
3571 	}
3572       else if (fcode == ARM_BUILTIN_TEXTRMSH || fcode == ARM_BUILTIN_TEXTRMUH)
3573 	{
3574 	  if (opint > 3 || opint < 0)
3575 	    error ("the range of selector should be in 0 to 3");
3576 	}
3577       else /* ARM_BUILTIN_TEXTRMSW || ARM_BUILTIN_TEXTRMUW.  */
3578 	{
3579 	  if (opint > 1 || opint < 0)
3580 	    error ("the range of selector should be in 0 to 1");
3581 	}
3582 
3583       if (target == 0
3584 	  || GET_MODE (target) != tmode
3585 	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3586 	target = gen_reg_rtx (tmode);
3587       pat = GEN_FCN (icode) (target, op0, op1);
3588       if (! pat)
3589 	return 0;
3590       emit_insn (pat);
3591       return target;
3592 
3593     case ARM_BUILTIN_WALIGNI:
3594       /* If op2 is immediate, call walighi, else call walighr.  */
3595       arg0 = CALL_EXPR_ARG (exp, 0);
3596       arg1 = CALL_EXPR_ARG (exp, 1);
3597       arg2 = CALL_EXPR_ARG (exp, 2);
3598       op0 = expand_normal (arg0);
3599       op1 = expand_normal (arg1);
3600       op2 = expand_normal (arg2);
3601       if (CONST_INT_P (op2))
3602         {
3603 	  icode = CODE_FOR_iwmmxt_waligni;
3604           tmode = insn_data[icode].operand[0].mode;
3605 	  mode0 = insn_data[icode].operand[1].mode;
3606 	  mode1 = insn_data[icode].operand[2].mode;
3607 	  mode2 = insn_data[icode].operand[3].mode;
3608           if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
3609 	    op0 = copy_to_mode_reg (mode0, op0);
3610           if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
3611 	    op1 = copy_to_mode_reg (mode1, op1);
3612           gcc_assert ((*insn_data[icode].operand[3].predicate) (op2, mode2));
3613 	  selector = INTVAL (op2);
3614 	  if (selector > 7 || selector < 0)
3615 	    error ("the range of selector should be in 0 to 7");
3616 	}
3617       else
3618         {
3619 	  icode = CODE_FOR_iwmmxt_walignr;
3620           tmode = insn_data[icode].operand[0].mode;
3621 	  mode0 = insn_data[icode].operand[1].mode;
3622 	  mode1 = insn_data[icode].operand[2].mode;
3623 	  mode2 = insn_data[icode].operand[3].mode;
3624           if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
3625 	    op0 = copy_to_mode_reg (mode0, op0);
3626           if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
3627 	    op1 = copy_to_mode_reg (mode1, op1);
3628           if (!(*insn_data[icode].operand[3].predicate) (op2, mode2))
3629 	    op2 = copy_to_mode_reg (mode2, op2);
3630 	}
3631       if (target == 0
3632 	  || GET_MODE (target) != tmode
3633 	  || !(*insn_data[icode].operand[0].predicate) (target, tmode))
3634 	target = gen_reg_rtx (tmode);
3635       pat = GEN_FCN (icode) (target, op0, op1, op2);
3636       if (!pat)
3637 	return 0;
3638       emit_insn (pat);
3639       return target;
3640 
3641     case ARM_BUILTIN_TINSRB:
3642     case ARM_BUILTIN_TINSRH:
3643     case ARM_BUILTIN_TINSRW:
3644     case ARM_BUILTIN_WMERGE:
3645       icode = (fcode == ARM_BUILTIN_TINSRB ? CODE_FOR_iwmmxt_tinsrb
3646 	       : fcode == ARM_BUILTIN_TINSRH ? CODE_FOR_iwmmxt_tinsrh
3647 	       : fcode == ARM_BUILTIN_WMERGE ? CODE_FOR_iwmmxt_wmerge
3648 	       : CODE_FOR_iwmmxt_tinsrw);
3649       arg0 = CALL_EXPR_ARG (exp, 0);
3650       arg1 = CALL_EXPR_ARG (exp, 1);
3651       arg2 = CALL_EXPR_ARG (exp, 2);
3652       op0 = expand_normal (arg0);
3653       op1 = expand_normal (arg1);
3654       op2 = expand_normal (arg2);
3655       tmode = insn_data[icode].operand[0].mode;
3656       mode0 = insn_data[icode].operand[1].mode;
3657       mode1 = insn_data[icode].operand[2].mode;
3658       mode2 = insn_data[icode].operand[3].mode;
3659 
3660       if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3661 	op0 = copy_to_mode_reg (mode0, op0);
3662       if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3663 	op1 = copy_to_mode_reg (mode1, op1);
3664       if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3665 	{
3666 	  error ("selector must be an immediate");
3667 	  return const0_rtx;
3668 	}
3669       if (icode == CODE_FOR_iwmmxt_wmerge)
3670 	{
3671 	  selector = INTVAL (op2);
3672 	  if (selector > 7 || selector < 0)
3673 	    error ("the range of selector should be in 0 to 7");
3674 	}
3675       if ((icode == CODE_FOR_iwmmxt_tinsrb)
3676 	  || (icode == CODE_FOR_iwmmxt_tinsrh)
3677 	  || (icode == CODE_FOR_iwmmxt_tinsrw))
3678         {
3679 	  mask = 0x01;
3680 	  selector= INTVAL (op2);
3681 	  if (icode == CODE_FOR_iwmmxt_tinsrb && (selector < 0 || selector > 7))
3682 	    error ("the range of selector should be in 0 to 7");
3683 	  else if (icode == CODE_FOR_iwmmxt_tinsrh && (selector < 0 ||selector > 3))
3684 	    error ("the range of selector should be in 0 to 3");
3685 	  else if (icode == CODE_FOR_iwmmxt_tinsrw && (selector < 0 ||selector > 1))
3686 	    error ("the range of selector should be in 0 to 1");
3687 	  mask <<= selector;
3688 	  op2 = GEN_INT (mask);
3689 	}
3690       if (target == 0
3691 	  || GET_MODE (target) != tmode
3692 	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3693 	target = gen_reg_rtx (tmode);
3694       pat = GEN_FCN (icode) (target, op0, op1, op2);
3695       if (! pat)
3696 	return 0;
3697       emit_insn (pat);
3698       return target;
3699 
3700     case ARM_BUILTIN_SETWCGR0:
3701     case ARM_BUILTIN_SETWCGR1:
3702     case ARM_BUILTIN_SETWCGR2:
3703     case ARM_BUILTIN_SETWCGR3:
3704       icode = (fcode == ARM_BUILTIN_SETWCGR0 ? CODE_FOR_iwmmxt_setwcgr0
3705 	       : fcode == ARM_BUILTIN_SETWCGR1 ? CODE_FOR_iwmmxt_setwcgr1
3706 	       : fcode == ARM_BUILTIN_SETWCGR2 ? CODE_FOR_iwmmxt_setwcgr2
3707 	       : CODE_FOR_iwmmxt_setwcgr3);
3708       arg0 = CALL_EXPR_ARG (exp, 0);
3709       op0 = expand_normal (arg0);
3710       mode0 = insn_data[icode].operand[0].mode;
3711       if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
3712         op0 = copy_to_mode_reg (mode0, op0);
3713       pat = GEN_FCN (icode) (op0);
3714       if (!pat)
3715 	return 0;
3716       emit_insn (pat);
3717       return 0;
3718 
3719     case ARM_BUILTIN_GETWCGR0:
3720     case ARM_BUILTIN_GETWCGR1:
3721     case ARM_BUILTIN_GETWCGR2:
3722     case ARM_BUILTIN_GETWCGR3:
3723       icode = (fcode == ARM_BUILTIN_GETWCGR0 ? CODE_FOR_iwmmxt_getwcgr0
3724 	       : fcode == ARM_BUILTIN_GETWCGR1 ? CODE_FOR_iwmmxt_getwcgr1
3725 	       : fcode == ARM_BUILTIN_GETWCGR2 ? CODE_FOR_iwmmxt_getwcgr2
3726 	       : CODE_FOR_iwmmxt_getwcgr3);
3727       tmode = insn_data[icode].operand[0].mode;
3728       if (target == 0
3729 	  || GET_MODE (target) != tmode
3730 	  || !(*insn_data[icode].operand[0].predicate) (target, tmode))
3731         target = gen_reg_rtx (tmode);
3732       pat = GEN_FCN (icode) (target);
3733       if (!pat)
3734         return 0;
3735       emit_insn (pat);
3736       return target;
3737 
3738     case ARM_BUILTIN_WSHUFH:
3739       icode = CODE_FOR_iwmmxt_wshufh;
3740       arg0 = CALL_EXPR_ARG (exp, 0);
3741       arg1 = CALL_EXPR_ARG (exp, 1);
3742       op0 = expand_normal (arg0);
3743       op1 = expand_normal (arg1);
3744       tmode = insn_data[icode].operand[0].mode;
3745       mode1 = insn_data[icode].operand[1].mode;
3746       mode2 = insn_data[icode].operand[2].mode;
3747 
3748       if (! (*insn_data[icode].operand[1].predicate) (op0, mode1))
3749 	op0 = copy_to_mode_reg (mode1, op0);
3750       if (! (*insn_data[icode].operand[2].predicate) (op1, mode2))
3751 	{
3752 	  error ("mask must be an immediate");
3753 	  return const0_rtx;
3754 	}
3755       selector = INTVAL (op1);
3756       if (selector < 0 || selector > 255)
3757 	error ("the range of mask should be in 0 to 255");
3758       if (target == 0
3759 	  || GET_MODE (target) != tmode
3760 	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3761 	target = gen_reg_rtx (tmode);
3762       pat = GEN_FCN (icode) (target, op0, op1);
3763       if (! pat)
3764 	return 0;
3765       emit_insn (pat);
3766       return target;
3767 
3768     case ARM_BUILTIN_WMADDS:
3769       return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmadds, exp, target);
3770     case ARM_BUILTIN_WMADDSX:
3771       return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddsx, exp, target);
3772     case ARM_BUILTIN_WMADDSN:
3773       return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddsn, exp, target);
3774     case ARM_BUILTIN_WMADDU:
3775       return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddu, exp, target);
3776     case ARM_BUILTIN_WMADDUX:
3777       return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddux, exp, target);
3778     case ARM_BUILTIN_WMADDUN:
3779       return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddun, exp, target);
3780     case ARM_BUILTIN_WSADBZ:
3781       return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadbz, exp, target);
3782     case ARM_BUILTIN_WSADHZ:
3783       return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadhz, exp, target);
3784 
3785       /* Several three-argument builtins.  */
3786     case ARM_BUILTIN_WMACS:
3787     case ARM_BUILTIN_WMACU:
3788     case ARM_BUILTIN_TMIA:
3789     case ARM_BUILTIN_TMIAPH:
3790     case ARM_BUILTIN_TMIATT:
3791     case ARM_BUILTIN_TMIATB:
3792     case ARM_BUILTIN_TMIABT:
3793     case ARM_BUILTIN_TMIABB:
3794     case ARM_BUILTIN_WQMIABB:
3795     case ARM_BUILTIN_WQMIABT:
3796     case ARM_BUILTIN_WQMIATB:
3797     case ARM_BUILTIN_WQMIATT:
3798     case ARM_BUILTIN_WQMIABBN:
3799     case ARM_BUILTIN_WQMIABTN:
3800     case ARM_BUILTIN_WQMIATBN:
3801     case ARM_BUILTIN_WQMIATTN:
3802     case ARM_BUILTIN_WMIABB:
3803     case ARM_BUILTIN_WMIABT:
3804     case ARM_BUILTIN_WMIATB:
3805     case ARM_BUILTIN_WMIATT:
3806     case ARM_BUILTIN_WMIABBN:
3807     case ARM_BUILTIN_WMIABTN:
3808     case ARM_BUILTIN_WMIATBN:
3809     case ARM_BUILTIN_WMIATTN:
3810     case ARM_BUILTIN_WMIAWBB:
3811     case ARM_BUILTIN_WMIAWBT:
3812     case ARM_BUILTIN_WMIAWTB:
3813     case ARM_BUILTIN_WMIAWTT:
3814     case ARM_BUILTIN_WMIAWBBN:
3815     case ARM_BUILTIN_WMIAWBTN:
3816     case ARM_BUILTIN_WMIAWTBN:
3817     case ARM_BUILTIN_WMIAWTTN:
3818     case ARM_BUILTIN_WSADB:
3819     case ARM_BUILTIN_WSADH:
3820       icode = (fcode == ARM_BUILTIN_WMACS ? CODE_FOR_iwmmxt_wmacs
3821 	       : fcode == ARM_BUILTIN_WMACU ? CODE_FOR_iwmmxt_wmacu
3822 	       : fcode == ARM_BUILTIN_TMIA ? CODE_FOR_iwmmxt_tmia
3823 	       : fcode == ARM_BUILTIN_TMIAPH ? CODE_FOR_iwmmxt_tmiaph
3824 	       : fcode == ARM_BUILTIN_TMIABB ? CODE_FOR_iwmmxt_tmiabb
3825 	       : fcode == ARM_BUILTIN_TMIABT ? CODE_FOR_iwmmxt_tmiabt
3826 	       : fcode == ARM_BUILTIN_TMIATB ? CODE_FOR_iwmmxt_tmiatb
3827 	       : fcode == ARM_BUILTIN_TMIATT ? CODE_FOR_iwmmxt_tmiatt
3828 	       : fcode == ARM_BUILTIN_WQMIABB ? CODE_FOR_iwmmxt_wqmiabb
3829 	       : fcode == ARM_BUILTIN_WQMIABT ? CODE_FOR_iwmmxt_wqmiabt
3830 	       : fcode == ARM_BUILTIN_WQMIATB ? CODE_FOR_iwmmxt_wqmiatb
3831 	       : fcode == ARM_BUILTIN_WQMIATT ? CODE_FOR_iwmmxt_wqmiatt
3832 	       : fcode == ARM_BUILTIN_WQMIABBN ? CODE_FOR_iwmmxt_wqmiabbn
3833 	       : fcode == ARM_BUILTIN_WQMIABTN ? CODE_FOR_iwmmxt_wqmiabtn
3834 	       : fcode == ARM_BUILTIN_WQMIATBN ? CODE_FOR_iwmmxt_wqmiatbn
3835 	       : fcode == ARM_BUILTIN_WQMIATTN ? CODE_FOR_iwmmxt_wqmiattn
3836 	       : fcode == ARM_BUILTIN_WMIABB ? CODE_FOR_iwmmxt_wmiabb
3837 	       : fcode == ARM_BUILTIN_WMIABT ? CODE_FOR_iwmmxt_wmiabt
3838 	       : fcode == ARM_BUILTIN_WMIATB ? CODE_FOR_iwmmxt_wmiatb
3839 	       : fcode == ARM_BUILTIN_WMIATT ? CODE_FOR_iwmmxt_wmiatt
3840 	       : fcode == ARM_BUILTIN_WMIABBN ? CODE_FOR_iwmmxt_wmiabbn
3841 	       : fcode == ARM_BUILTIN_WMIABTN ? CODE_FOR_iwmmxt_wmiabtn
3842 	       : fcode == ARM_BUILTIN_WMIATBN ? CODE_FOR_iwmmxt_wmiatbn
3843 	       : fcode == ARM_BUILTIN_WMIATTN ? CODE_FOR_iwmmxt_wmiattn
3844 	       : fcode == ARM_BUILTIN_WMIAWBB ? CODE_FOR_iwmmxt_wmiawbb
3845 	       : fcode == ARM_BUILTIN_WMIAWBT ? CODE_FOR_iwmmxt_wmiawbt
3846 	       : fcode == ARM_BUILTIN_WMIAWTB ? CODE_FOR_iwmmxt_wmiawtb
3847 	       : fcode == ARM_BUILTIN_WMIAWTT ? CODE_FOR_iwmmxt_wmiawtt
3848 	       : fcode == ARM_BUILTIN_WMIAWBBN ? CODE_FOR_iwmmxt_wmiawbbn
3849 	       : fcode == ARM_BUILTIN_WMIAWBTN ? CODE_FOR_iwmmxt_wmiawbtn
3850 	       : fcode == ARM_BUILTIN_WMIAWTBN ? CODE_FOR_iwmmxt_wmiawtbn
3851 	       : fcode == ARM_BUILTIN_WMIAWTTN ? CODE_FOR_iwmmxt_wmiawttn
3852 	       : fcode == ARM_BUILTIN_WSADB ? CODE_FOR_iwmmxt_wsadb
3853 	       : CODE_FOR_iwmmxt_wsadh);
3854       arg0 = CALL_EXPR_ARG (exp, 0);
3855       arg1 = CALL_EXPR_ARG (exp, 1);
3856       arg2 = CALL_EXPR_ARG (exp, 2);
3857       op0 = expand_normal (arg0);
3858       op1 = expand_normal (arg1);
3859       op2 = expand_normal (arg2);
3860       tmode = insn_data[icode].operand[0].mode;
3861       mode0 = insn_data[icode].operand[1].mode;
3862       mode1 = insn_data[icode].operand[2].mode;
3863       mode2 = insn_data[icode].operand[3].mode;
3864 
3865       if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3866 	op0 = copy_to_mode_reg (mode0, op0);
3867       if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3868 	op1 = copy_to_mode_reg (mode1, op1);
3869       if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3870 	op2 = copy_to_mode_reg (mode2, op2);
3871       if (target == 0
3872 	  || GET_MODE (target) != tmode
3873 	  || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3874 	target = gen_reg_rtx (tmode);
3875       pat = GEN_FCN (icode) (target, op0, op1, op2);
3876       if (! pat)
3877 	return 0;
3878       emit_insn (pat);
3879       return target;
3880 
3881     case ARM_BUILTIN_WZERO:
3882       target = gen_reg_rtx (DImode);
3883       emit_insn (gen_iwmmxt_clrdi (target));
3884       return target;
3885 
3886     case ARM_BUILTIN_WSRLHI:
3887     case ARM_BUILTIN_WSRLWI:
3888     case ARM_BUILTIN_WSRLDI:
3889     case ARM_BUILTIN_WSLLHI:
3890     case ARM_BUILTIN_WSLLWI:
3891     case ARM_BUILTIN_WSLLDI:
3892     case ARM_BUILTIN_WSRAHI:
3893     case ARM_BUILTIN_WSRAWI:
3894     case ARM_BUILTIN_WSRADI:
3895     case ARM_BUILTIN_WRORHI:
3896     case ARM_BUILTIN_WRORWI:
3897     case ARM_BUILTIN_WRORDI:
3898     case ARM_BUILTIN_WSRLH:
3899     case ARM_BUILTIN_WSRLW:
3900     case ARM_BUILTIN_WSRLD:
3901     case ARM_BUILTIN_WSLLH:
3902     case ARM_BUILTIN_WSLLW:
3903     case ARM_BUILTIN_WSLLD:
3904     case ARM_BUILTIN_WSRAH:
3905     case ARM_BUILTIN_WSRAW:
3906     case ARM_BUILTIN_WSRAD:
3907     case ARM_BUILTIN_WRORH:
3908     case ARM_BUILTIN_WRORW:
3909     case ARM_BUILTIN_WRORD:
3910       icode = (fcode == ARM_BUILTIN_WSRLHI ? CODE_FOR_lshrv4hi3_iwmmxt
3911 	       : fcode == ARM_BUILTIN_WSRLWI ? CODE_FOR_lshrv2si3_iwmmxt
3912 	       : fcode == ARM_BUILTIN_WSRLDI ? CODE_FOR_lshrdi3_iwmmxt
3913 	       : fcode == ARM_BUILTIN_WSLLHI ? CODE_FOR_ashlv4hi3_iwmmxt
3914 	       : fcode == ARM_BUILTIN_WSLLWI ? CODE_FOR_ashlv2si3_iwmmxt
3915 	       : fcode == ARM_BUILTIN_WSLLDI ? CODE_FOR_ashldi3_iwmmxt
3916 	       : fcode == ARM_BUILTIN_WSRAHI ? CODE_FOR_ashrv4hi3_iwmmxt
3917 	       : fcode == ARM_BUILTIN_WSRAWI ? CODE_FOR_ashrv2si3_iwmmxt
3918 	       : fcode == ARM_BUILTIN_WSRADI ? CODE_FOR_ashrdi3_iwmmxt
3919 	       : fcode == ARM_BUILTIN_WRORHI ? CODE_FOR_rorv4hi3
3920 	       : fcode == ARM_BUILTIN_WRORWI ? CODE_FOR_rorv2si3
3921 	       : fcode == ARM_BUILTIN_WRORDI ? CODE_FOR_rordi3
3922 	       : fcode == ARM_BUILTIN_WSRLH  ? CODE_FOR_lshrv4hi3_di
3923 	       : fcode == ARM_BUILTIN_WSRLW  ? CODE_FOR_lshrv2si3_di
3924 	       : fcode == ARM_BUILTIN_WSRLD  ? CODE_FOR_lshrdi3_di
3925 	       : fcode == ARM_BUILTIN_WSLLH  ? CODE_FOR_ashlv4hi3_di
3926 	       : fcode == ARM_BUILTIN_WSLLW  ? CODE_FOR_ashlv2si3_di
3927 	       : fcode == ARM_BUILTIN_WSLLD  ? CODE_FOR_ashldi3_di
3928 	       : fcode == ARM_BUILTIN_WSRAH  ? CODE_FOR_ashrv4hi3_di
3929 	       : fcode == ARM_BUILTIN_WSRAW  ? CODE_FOR_ashrv2si3_di
3930 	       : fcode == ARM_BUILTIN_WSRAD  ? CODE_FOR_ashrdi3_di
3931 	       : fcode == ARM_BUILTIN_WRORH  ? CODE_FOR_rorv4hi3_di
3932 	       : fcode == ARM_BUILTIN_WRORW  ? CODE_FOR_rorv2si3_di
3933 	       : fcode == ARM_BUILTIN_WRORD  ? CODE_FOR_rordi3_di
3934 	       : CODE_FOR_nothing);
3935       arg1 = CALL_EXPR_ARG (exp, 1);
3936       op1 = expand_normal (arg1);
3937       if (GET_MODE (op1) == VOIDmode)
3938 	{
3939 	  imm = INTVAL (op1);
3940 	  if ((fcode == ARM_BUILTIN_WRORHI || fcode == ARM_BUILTIN_WRORWI
3941 	       || fcode == ARM_BUILTIN_WRORH || fcode == ARM_BUILTIN_WRORW)
3942 	      && (imm < 0 || imm > 32))
3943 	    {
3944 	      if (fcode == ARM_BUILTIN_WRORHI)
3945 		error ("the range of count should be in 0 to 32.  please check the intrinsic _mm_rori_pi16 in code.");
3946 	      else if (fcode == ARM_BUILTIN_WRORWI)
3947 		error ("the range of count should be in 0 to 32.  please check the intrinsic _mm_rori_pi32 in code.");
3948 	      else if (fcode == ARM_BUILTIN_WRORH)
3949 		error ("the range of count should be in 0 to 32.  please check the intrinsic _mm_ror_pi16 in code.");
3950 	      else
3951 		error ("the range of count should be in 0 to 32.  please check the intrinsic _mm_ror_pi32 in code.");
3952 	    }
3953 	  else if ((fcode == ARM_BUILTIN_WRORDI || fcode == ARM_BUILTIN_WRORD)
3954 		   && (imm < 0 || imm > 64))
3955 	    {
3956 	      if (fcode == ARM_BUILTIN_WRORDI)
3957 		error ("the range of count should be in 0 to 64.  please check the intrinsic _mm_rori_si64 in code.");
3958 	      else
3959 		error ("the range of count should be in 0 to 64.  please check the intrinsic _mm_ror_si64 in code.");
3960 	    }
3961 	  else if (imm < 0)
3962 	    {
3963 	      if (fcode == ARM_BUILTIN_WSRLHI)
3964 		error ("the count should be no less than 0.  please check the intrinsic _mm_srli_pi16 in code.");
3965 	      else if (fcode == ARM_BUILTIN_WSRLWI)
3966 		error ("the count should be no less than 0.  please check the intrinsic _mm_srli_pi32 in code.");
3967 	      else if (fcode == ARM_BUILTIN_WSRLDI)
3968 		error ("the count should be no less than 0.  please check the intrinsic _mm_srli_si64 in code.");
3969 	      else if (fcode == ARM_BUILTIN_WSLLHI)
3970 		error ("the count should be no less than 0.  please check the intrinsic _mm_slli_pi16 in code.");
3971 	      else if (fcode == ARM_BUILTIN_WSLLWI)
3972 		error ("the count should be no less than 0.  please check the intrinsic _mm_slli_pi32 in code.");
3973 	      else if (fcode == ARM_BUILTIN_WSLLDI)
3974 		error ("the count should be no less than 0.  please check the intrinsic _mm_slli_si64 in code.");
3975 	      else if (fcode == ARM_BUILTIN_WSRAHI)
3976 		error ("the count should be no less than 0.  please check the intrinsic _mm_srai_pi16 in code.");
3977 	      else if (fcode == ARM_BUILTIN_WSRAWI)
3978 		error ("the count should be no less than 0.  please check the intrinsic _mm_srai_pi32 in code.");
3979 	      else if (fcode == ARM_BUILTIN_WSRADI)
3980 		error ("the count should be no less than 0.  please check the intrinsic _mm_srai_si64 in code.");
3981 	      else if (fcode == ARM_BUILTIN_WSRLH)
3982 		error ("the count should be no less than 0.  please check the intrinsic _mm_srl_pi16 in code.");
3983 	      else if (fcode == ARM_BUILTIN_WSRLW)
3984 		error ("the count should be no less than 0.  please check the intrinsic _mm_srl_pi32 in code.");
3985 	      else if (fcode == ARM_BUILTIN_WSRLD)
3986 		error ("the count should be no less than 0.  please check the intrinsic _mm_srl_si64 in code.");
3987 	      else if (fcode == ARM_BUILTIN_WSLLH)
3988 		error ("the count should be no less than 0.  please check the intrinsic _mm_sll_pi16 in code.");
3989 	      else if (fcode == ARM_BUILTIN_WSLLW)
3990 		error ("the count should be no less than 0.  please check the intrinsic _mm_sll_pi32 in code.");
3991 	      else if (fcode == ARM_BUILTIN_WSLLD)
3992 		error ("the count should be no less than 0.  please check the intrinsic _mm_sll_si64 in code.");
3993 	      else if (fcode == ARM_BUILTIN_WSRAH)
3994 		error ("the count should be no less than 0.  please check the intrinsic _mm_sra_pi16 in code.");
3995 	      else if (fcode == ARM_BUILTIN_WSRAW)
3996 		error ("the count should be no less than 0.  please check the intrinsic _mm_sra_pi32 in code.");
3997 	      else
3998 		error ("the count should be no less than 0.  please check the intrinsic _mm_sra_si64 in code.");
3999 	    }
4000 	}
4001       return arm_expand_binop_builtin (icode, exp, target);
4002 
4003     default:
4004       break;
4005     }
4006 
4007   for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4008     if (d->code == (enum arm_builtins) fcode)
4009       return arm_expand_binop_builtin (d->icode, exp, target);
4010 
4011   for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4012     if (d->code == (enum arm_builtins) fcode)
4013       return arm_expand_unop_builtin (d->icode, exp, target, 0);
4014 
4015   for (i = 0, d = bdesc_3arg; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4016     if (d->code == (enum arm_builtins) fcode)
4017       return arm_expand_ternop_builtin (d->icode, exp, target);
4018 
4019   /* @@@ Should really do something sensible here.  */
4020   return NULL_RTX;
4021 }
4022 
4023 tree
arm_builtin_vectorized_function(unsigned int fn,tree type_out,tree type_in)4024 arm_builtin_vectorized_function (unsigned int fn, tree type_out, tree type_in)
4025 {
4026   machine_mode in_mode, out_mode;
4027   int in_n, out_n;
4028   bool out_unsigned_p = TYPE_UNSIGNED (type_out);
4029 
4030   /* Can't provide any vectorized builtins when we can't use NEON.  */
4031   if (!TARGET_NEON)
4032     return NULL_TREE;
4033 
4034   if (TREE_CODE (type_out) != VECTOR_TYPE
4035       || TREE_CODE (type_in) != VECTOR_TYPE)
4036     return NULL_TREE;
4037 
4038   out_mode = TYPE_MODE (TREE_TYPE (type_out));
4039   out_n = TYPE_VECTOR_SUBPARTS (type_out);
4040   in_mode = TYPE_MODE (TREE_TYPE (type_in));
4041   in_n = TYPE_VECTOR_SUBPARTS (type_in);
4042 
4043 /* ARM_CHECK_BUILTIN_MODE and ARM_FIND_VRINT_VARIANT are used to find the
4044    decl of the vectorized builtin for the appropriate vector mode.
4045    NULL_TREE is returned if no such builtin is available.  */
4046 #undef ARM_CHECK_BUILTIN_MODE
4047 #define ARM_CHECK_BUILTIN_MODE(C)    \
4048   (TARGET_VFP5   \
4049    && flag_unsafe_math_optimizations \
4050    && ARM_CHECK_BUILTIN_MODE_1 (C))
4051 
4052 #undef ARM_CHECK_BUILTIN_MODE_1
4053 #define ARM_CHECK_BUILTIN_MODE_1(C) \
4054   (out_mode == SFmode && out_n == C \
4055    && in_mode == SFmode && in_n == C)
4056 
4057 #undef ARM_FIND_VRINT_VARIANT
4058 #define ARM_FIND_VRINT_VARIANT(N) \
4059   (ARM_CHECK_BUILTIN_MODE (2) \
4060     ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v2sf, false) \
4061     : (ARM_CHECK_BUILTIN_MODE (4) \
4062       ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v4sf, false) \
4063       : NULL_TREE))
4064 
4065   switch (fn)
4066     {
4067     CASE_CFN_FLOOR:
4068       return ARM_FIND_VRINT_VARIANT (vrintm);
4069     CASE_CFN_CEIL:
4070       return ARM_FIND_VRINT_VARIANT (vrintp);
4071     CASE_CFN_TRUNC:
4072       return ARM_FIND_VRINT_VARIANT (vrintz);
4073     CASE_CFN_ROUND:
4074       return ARM_FIND_VRINT_VARIANT (vrinta);
4075 #undef ARM_CHECK_BUILTIN_MODE_1
4076 #define ARM_CHECK_BUILTIN_MODE_1(C) \
4077   (out_mode == SImode && out_n == C \
4078    && in_mode == SFmode && in_n == C)
4079 
4080 #define ARM_FIND_VCVT_VARIANT(N) \
4081   (ARM_CHECK_BUILTIN_MODE (2) \
4082    ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v2sfv2si, false) \
4083    : (ARM_CHECK_BUILTIN_MODE (4) \
4084      ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v4sfv4si, false) \
4085      : NULL_TREE))
4086 
4087 #define ARM_FIND_VCVTU_VARIANT(N) \
4088   (ARM_CHECK_BUILTIN_MODE (2) \
4089    ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##uv2sfv2si, false) \
4090    : (ARM_CHECK_BUILTIN_MODE (4) \
4091      ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##uv4sfv4si, false) \
4092      : NULL_TREE))
4093     CASE_CFN_LROUND:
4094       return (out_unsigned_p
4095 	      ? ARM_FIND_VCVTU_VARIANT (vcvta)
4096 	      : ARM_FIND_VCVT_VARIANT (vcvta));
4097     CASE_CFN_LCEIL:
4098       return (out_unsigned_p
4099 	      ? ARM_FIND_VCVTU_VARIANT (vcvtp)
4100 	      : ARM_FIND_VCVT_VARIANT (vcvtp));
4101     CASE_CFN_LFLOOR:
4102       return (out_unsigned_p
4103 	      ? ARM_FIND_VCVTU_VARIANT (vcvtm)
4104 	      : ARM_FIND_VCVT_VARIANT (vcvtm));
4105 #undef ARM_CHECK_BUILTIN_MODE
4106 #define ARM_CHECK_BUILTIN_MODE(C, N) \
4107   (out_mode == N##mode && out_n == C \
4108    && in_mode == N##mode && in_n == C)
4109     case CFN_BUILT_IN_BSWAP16:
4110       if (ARM_CHECK_BUILTIN_MODE (4, HI))
4111 	return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv4hi, false);
4112       else if (ARM_CHECK_BUILTIN_MODE (8, HI))
4113 	return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv8hi, false);
4114       else
4115 	return NULL_TREE;
4116     case CFN_BUILT_IN_BSWAP32:
4117       if (ARM_CHECK_BUILTIN_MODE (2, SI))
4118 	return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv2si, false);
4119       else if (ARM_CHECK_BUILTIN_MODE (4, SI))
4120 	return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv4si, false);
4121       else
4122 	return NULL_TREE;
4123     case CFN_BUILT_IN_BSWAP64:
4124       if (ARM_CHECK_BUILTIN_MODE (2, DI))
4125 	return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv2di, false);
4126       else
4127 	return NULL_TREE;
4128     CASE_CFN_COPYSIGN:
4129       if (ARM_CHECK_BUILTIN_MODE (2, SF))
4130 	return arm_builtin_decl (ARM_BUILTIN_NEON_copysignfv2sf, false);
4131       else if (ARM_CHECK_BUILTIN_MODE (4, SF))
4132 	return arm_builtin_decl (ARM_BUILTIN_NEON_copysignfv4sf, false);
4133       else
4134 	return NULL_TREE;
4135 
4136     default:
4137       return NULL_TREE;
4138     }
4139   return NULL_TREE;
4140 }
4141 #undef ARM_FIND_VCVT_VARIANT
4142 #undef ARM_FIND_VCVTU_VARIANT
4143 #undef ARM_CHECK_BUILTIN_MODE
4144 #undef ARM_FIND_VRINT_VARIANT
4145 
4146 void
arm_atomic_assign_expand_fenv(tree * hold,tree * clear,tree * update)4147 arm_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
4148 {
4149   const unsigned ARM_FE_INVALID = 1;
4150   const unsigned ARM_FE_DIVBYZERO = 2;
4151   const unsigned ARM_FE_OVERFLOW = 4;
4152   const unsigned ARM_FE_UNDERFLOW = 8;
4153   const unsigned ARM_FE_INEXACT = 16;
4154   const unsigned HOST_WIDE_INT ARM_FE_ALL_EXCEPT = (ARM_FE_INVALID
4155 						    | ARM_FE_DIVBYZERO
4156 						    | ARM_FE_OVERFLOW
4157 						    | ARM_FE_UNDERFLOW
4158 						    | ARM_FE_INEXACT);
4159   const unsigned HOST_WIDE_INT ARM_FE_EXCEPT_SHIFT = 8;
4160   tree fenv_var, get_fpscr, set_fpscr, mask, ld_fenv, masked_fenv;
4161   tree new_fenv_var, reload_fenv, restore_fnenv;
4162   tree update_call, atomic_feraiseexcept, hold_fnclex;
4163 
4164   if (!TARGET_HARD_FLOAT)
4165     return;
4166 
4167   /* Generate the equivalent of :
4168        unsigned int fenv_var;
4169        fenv_var = __builtin_arm_get_fpscr ();
4170 
4171        unsigned int masked_fenv;
4172        masked_fenv = fenv_var & mask;
4173 
4174        __builtin_arm_set_fpscr (masked_fenv);  */
4175 
4176   fenv_var = create_tmp_var_raw (unsigned_type_node);
4177   get_fpscr = arm_builtin_decls[ARM_BUILTIN_GET_FPSCR];
4178   set_fpscr = arm_builtin_decls[ARM_BUILTIN_SET_FPSCR];
4179   mask = build_int_cst (unsigned_type_node,
4180 			~((ARM_FE_ALL_EXCEPT << ARM_FE_EXCEPT_SHIFT)
4181 			  | ARM_FE_ALL_EXCEPT));
4182   ld_fenv = build4 (TARGET_EXPR, unsigned_type_node,
4183 		    fenv_var, build_call_expr (get_fpscr, 0),
4184 		    NULL_TREE, NULL_TREE);
4185   masked_fenv = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_var, mask);
4186   hold_fnclex = build_call_expr (set_fpscr, 1, masked_fenv);
4187   *hold = build2 (COMPOUND_EXPR, void_type_node,
4188 		  build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
4189 		  hold_fnclex);
4190 
4191   /* Store the value of masked_fenv to clear the exceptions:
4192      __builtin_arm_set_fpscr (masked_fenv);  */
4193 
4194   *clear = build_call_expr (set_fpscr, 1, masked_fenv);
4195 
4196   /* Generate the equivalent of :
4197        unsigned int new_fenv_var;
4198        new_fenv_var = __builtin_arm_get_fpscr ();
4199 
4200        __builtin_arm_set_fpscr (fenv_var);
4201 
4202        __atomic_feraiseexcept (new_fenv_var);  */
4203 
4204   new_fenv_var = create_tmp_var_raw (unsigned_type_node);
4205   reload_fenv = build4 (TARGET_EXPR, unsigned_type_node, new_fenv_var,
4206 			build_call_expr (get_fpscr, 0), NULL_TREE, NULL_TREE);
4207   restore_fnenv = build_call_expr (set_fpscr, 1, fenv_var);
4208   atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
4209   update_call = build_call_expr (atomic_feraiseexcept, 1,
4210 				 fold_convert (integer_type_node, new_fenv_var));
4211   *update = build2 (COMPOUND_EXPR, void_type_node,
4212 		    build2 (COMPOUND_EXPR, void_type_node,
4213 			    reload_fenv, restore_fnenv), update_call);
4214 }
4215 
4216 /* Implement TARGET_CHECK_BUILTIN_CALL.  Record a read of the Q bit through
4217    intrinsics in the machine function.  */
4218 bool
arm_check_builtin_call(location_t,vec<location_t>,tree fndecl,tree,unsigned int,tree *)4219 arm_check_builtin_call (location_t , vec<location_t> , tree fndecl,
4220 			tree, unsigned int, tree *)
4221 {
4222   int fcode = DECL_MD_FUNCTION_CODE (fndecl);
4223   if (fcode == ARM_BUILTIN_saturation_occurred
4224       || fcode == ARM_BUILTIN_set_saturation)
4225     {
4226       if (cfun && cfun->decl)
4227 	DECL_ATTRIBUTES (cfun->decl)
4228 	  = tree_cons (get_identifier ("acle qbit"), NULL_TREE,
4229 		       DECL_ATTRIBUTES (cfun->decl));
4230     }
4231   if (fcode == ARM_BUILTIN_sel)
4232     {
4233       if (cfun && cfun->decl)
4234 	DECL_ATTRIBUTES (cfun->decl)
4235 	  = tree_cons (get_identifier ("acle gebits"), NULL_TREE,
4236 		       DECL_ATTRIBUTES (cfun->decl));
4237     }
4238   return true;
4239 }
4240 
4241 enum resolver_ident
arm_describe_resolver(tree fndecl)4242 arm_describe_resolver (tree fndecl)
4243 {
4244   if (DECL_MD_FUNCTION_CODE (fndecl) >= ARM_BUILTIN_vcx1qv16qi
4245     && DECL_MD_FUNCTION_CODE (fndecl) < ARM_BUILTIN_MVE_BASE)
4246     return arm_cde_resolver;
4247   return arm_no_resolver;
4248 }
4249 
4250 unsigned
arm_cde_end_args(tree fndecl)4251 arm_cde_end_args (tree fndecl)
4252 {
4253   return DECL_MD_FUNCTION_CODE (fndecl) >= ARM_BUILTIN_vcx1q_p_v16qi ? 2 : 1;
4254 }
4255 
4256 #include "gt-arm-builtins.h"
4257