1 /* Description of builtins used by the ARM backend.
2 Copyright (C) 2014-2022 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #define IN_TARGET_CODE 1
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "function.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple-expr.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "profile-count.h"
33 #include "optabs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "explow.h"
40 #include "expr.h"
41 #include "langhooks.h"
42 #include "case-cfn-macros.h"
43 #include "sbitmap.h"
44 #include "stringpool.h"
45 #include "arm-builtins.h"
46 #include "stringpool.h"
47 #include "attribs.h"
48
49 #define SIMD_MAX_BUILTIN_ARGS 7
50
51 /* The qualifier_internal allows generation of a unary builtin from
52 a pattern with a third pseudo-operand such as a match_scratch.
53 T (T). */
54 static enum arm_type_qualifiers
55 arm_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
56 = { qualifier_none, qualifier_none, qualifier_internal };
57 #define UNOP_QUALIFIERS (arm_unop_qualifiers)
58
59 /* unsigned T (unsigned T). */
60 static enum arm_type_qualifiers
61 arm_bswap_qualifiers[SIMD_MAX_BUILTIN_ARGS]
62 = { qualifier_unsigned, qualifier_unsigned };
63 #define BSWAP_QUALIFIERS (arm_bswap_qualifiers)
64
65 /* T (T, T [maybe_immediate]). */
66 static enum arm_type_qualifiers
67 arm_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
68 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
69 #define BINOP_QUALIFIERS (arm_binop_qualifiers)
70
71 /* T (T, T, T). */
72 static enum arm_type_qualifiers
73 arm_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
74 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
75 #define TERNOP_QUALIFIERS (arm_ternop_qualifiers)
76
77 /* unsigned T (unsigned T, unsigned T, unsigned T). */
78 static enum arm_type_qualifiers
79 arm_unsigned_uternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
80 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
81 qualifier_unsigned };
82 #define UTERNOP_QUALIFIERS (arm_unsigned_uternop_qualifiers)
83
84 /* T (T, unsigned T, T). */
85 static enum arm_type_qualifiers
86 arm_usternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
87 = { qualifier_none, qualifier_none, qualifier_unsigned,
88 qualifier_none };
89 #define USTERNOP_QUALIFIERS (arm_usternop_qualifiers)
90
91 /* T (T, immediate). */
92 static enum arm_type_qualifiers
93 arm_binop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
94 = { qualifier_none, qualifier_none, qualifier_immediate };
95 #define BINOP_IMM_QUALIFIERS (arm_binop_imm_qualifiers)
96
97 /* T (T, unsigned immediate). */
98 static enum arm_type_qualifiers
99 arm_sat_binop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
100 = { qualifier_unsigned, qualifier_none, qualifier_unsigned_immediate };
101 #define SAT_BINOP_UNSIGNED_IMM_QUALIFIERS \
102 (arm_sat_binop_imm_qualifiers)
103
104 /* unsigned T (T, unsigned immediate). */
105 static enum arm_type_qualifiers
106 arm_unsigned_sat_binop_unsigned_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
107 = { qualifier_unsigned, qualifier_none, qualifier_unsigned_immediate };
108 #define UNSIGNED_SAT_BINOP_UNSIGNED_IMM_QUALIFIERS \
109 (arm_unsigned_sat_binop_unsigned_imm_qualifiers)
110
111 /* T (T, lane index). */
112 static enum arm_type_qualifiers
113 arm_getlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
114 = { qualifier_none, qualifier_none, qualifier_lane_index };
115 #define GETLANE_QUALIFIERS (arm_getlane_qualifiers)
116
117 /* T (T, T, T, immediate). */
118 static enum arm_type_qualifiers
119 arm_mac_n_qualifiers[SIMD_MAX_BUILTIN_ARGS]
120 = { qualifier_none, qualifier_none, qualifier_none,
121 qualifier_none, qualifier_immediate };
122 #define MAC_N_QUALIFIERS (arm_mac_n_qualifiers)
123
124 /* T (T, T, T, lane index). */
125 static enum arm_type_qualifiers
126 arm_mac_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
127 = { qualifier_none, qualifier_none, qualifier_none,
128 qualifier_none, qualifier_lane_index };
129 #define MAC_LANE_QUALIFIERS (arm_mac_lane_qualifiers)
130
131 /* T (T, T, T, lane pair index). */
132 static enum arm_type_qualifiers
133 arm_mac_lane_pair_qualifiers[SIMD_MAX_BUILTIN_ARGS]
134 = { qualifier_none, qualifier_none, qualifier_none,
135 qualifier_none, qualifier_lane_pair_index };
136 #define MAC_LANE_PAIR_QUALIFIERS (arm_mac_lane_pair_qualifiers)
137
138 /* unsigned T (unsigned T, unsigned T, unsigend T, lane index). */
139 static enum arm_type_qualifiers
140 arm_umac_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
141 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
142 qualifier_unsigned, qualifier_lane_index };
143 #define UMAC_LANE_QUALIFIERS (arm_umac_lane_qualifiers)
144
145 /* T (T, unsigned T, T, lane index). */
146 static enum arm_type_qualifiers
147 arm_usmac_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
148 = { qualifier_none, qualifier_none, qualifier_unsigned,
149 qualifier_none, qualifier_lane_quadtup_index };
150 #define USMAC_LANE_QUADTUP_QUALIFIERS (arm_usmac_lane_quadtup_qualifiers)
151
152 /* T (T, T, unsigend T, lane index). */
153 static enum arm_type_qualifiers
154 arm_sumac_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
155 = { qualifier_none, qualifier_none, qualifier_none,
156 qualifier_unsigned, qualifier_lane_quadtup_index };
157 #define SUMAC_LANE_QUADTUP_QUALIFIERS (arm_sumac_lane_quadtup_qualifiers)
158
159 /* T (T, T, immediate). */
160 static enum arm_type_qualifiers
161 arm_ternop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
162 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate };
163 #define TERNOP_IMM_QUALIFIERS (arm_ternop_imm_qualifiers)
164
165 /* T (T, T, lane index). */
166 static enum arm_type_qualifiers
167 arm_setlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
168 = { qualifier_none, qualifier_none, qualifier_none, qualifier_lane_index };
169 #define SETLANE_QUALIFIERS (arm_setlane_qualifiers)
170
171 /* T (T, T). */
172 static enum arm_type_qualifiers
173 arm_combine_qualifiers[SIMD_MAX_BUILTIN_ARGS]
174 = { qualifier_none, qualifier_none, qualifier_none };
175 #define COMBINE_QUALIFIERS (arm_combine_qualifiers)
176
177 /* T ([T element type] *). */
178 static enum arm_type_qualifiers
179 arm_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
180 = { qualifier_none, qualifier_const_pointer_map_mode };
181 #define LOAD1_QUALIFIERS (arm_load1_qualifiers)
182
183 /* T ([T element type] *, T, immediate). */
184 static enum arm_type_qualifiers
185 arm_load1_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
186 = { qualifier_none, qualifier_const_pointer_map_mode,
187 qualifier_none, qualifier_struct_load_store_lane_index };
188 #define LOAD1LANE_QUALIFIERS (arm_load1_lane_qualifiers)
189
190 /* unsigned T (unsigned T, unsigned T, unsigned T). */
191 static enum arm_type_qualifiers
192 arm_unsigned_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
193 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
194 qualifier_unsigned };
195 #define UBINOP_QUALIFIERS (arm_unsigned_binop_qualifiers)
196
197 /* void (unsigned immediate, unsigned immediate, unsigned immediate,
198 unsigned immediate, unsigned immediate, unsigned immediate). */
199 static enum arm_type_qualifiers
200 arm_cdp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
201 = { qualifier_void, qualifier_unsigned_immediate,
202 qualifier_unsigned_immediate,
203 qualifier_unsigned_immediate,
204 qualifier_unsigned_immediate,
205 qualifier_unsigned_immediate,
206 qualifier_unsigned_immediate };
207 #define CDP_QUALIFIERS \
208 (arm_cdp_qualifiers)
209
210 /* void (unsigned immediate, unsigned immediate, const void *). */
211 static enum arm_type_qualifiers
212 arm_ldc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
213 = { qualifier_void, qualifier_unsigned_immediate,
214 qualifier_unsigned_immediate, qualifier_const_void_pointer };
215 #define LDC_QUALIFIERS \
216 (arm_ldc_qualifiers)
217
218 /* void (unsigned immediate, unsigned immediate, void *). */
219 static enum arm_type_qualifiers
220 arm_stc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
221 = { qualifier_void, qualifier_unsigned_immediate,
222 qualifier_unsigned_immediate, qualifier_void_pointer };
223 #define STC_QUALIFIERS \
224 (arm_stc_qualifiers)
225
226 /* void (unsigned immediate, unsigned immediate, T, unsigned immediate,
227 unsigned immediate, unsigned immediate). */
228 static enum arm_type_qualifiers
229 arm_mcr_qualifiers[SIMD_MAX_BUILTIN_ARGS]
230 = { qualifier_void, qualifier_unsigned_immediate,
231 qualifier_unsigned_immediate, qualifier_none,
232 qualifier_unsigned_immediate, qualifier_unsigned_immediate,
233 qualifier_unsigned_immediate };
234 #define MCR_QUALIFIERS \
235 (arm_mcr_qualifiers)
236
237 /* T (unsigned immediate, unsigned immediate, unsigned immediate,
238 unsigned immediate, unsigned immediate). */
239 static enum arm_type_qualifiers
240 arm_mrc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
241 = { qualifier_none, qualifier_unsigned_immediate,
242 qualifier_unsigned_immediate, qualifier_unsigned_immediate,
243 qualifier_unsigned_immediate, qualifier_unsigned_immediate };
244 #define MRC_QUALIFIERS \
245 (arm_mrc_qualifiers)
246
247 /* void (unsigned immediate, unsigned immediate, T, unsigned immediate). */
248 static enum arm_type_qualifiers
249 arm_mcrr_qualifiers[SIMD_MAX_BUILTIN_ARGS]
250 = { qualifier_void, qualifier_unsigned_immediate,
251 qualifier_unsigned_immediate, qualifier_none,
252 qualifier_unsigned_immediate };
253 #define MCRR_QUALIFIERS \
254 (arm_mcrr_qualifiers)
255
256 /* T (unsigned immediate, unsigned immediate, unsigned immediate). */
257 static enum arm_type_qualifiers
258 arm_mrrc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
259 = { qualifier_none, qualifier_unsigned_immediate,
260 qualifier_unsigned_immediate, qualifier_unsigned_immediate };
261 #define MRRC_QUALIFIERS \
262 (arm_mrrc_qualifiers)
263
264 /* T (immediate, unsigned immediate). */
265 static enum arm_type_qualifiers
266 arm_cx_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
267 = { qualifier_none, qualifier_immediate, qualifier_unsigned_immediate };
268 #define CX_IMM_QUALIFIERS (arm_cx_imm_qualifiers)
269
270 /* T (immediate, T, unsigned immediate). */
271 static enum arm_type_qualifiers
272 arm_cx_unary_qualifiers[SIMD_MAX_BUILTIN_ARGS]
273 = { qualifier_none, qualifier_immediate, qualifier_none,
274 qualifier_unsigned_immediate };
275 #define CX_UNARY_QUALIFIERS (arm_cx_unary_qualifiers)
276
277 /* T (immediate, T, T, unsigned immediate). */
278 static enum arm_type_qualifiers
279 arm_cx_binary_qualifiers[SIMD_MAX_BUILTIN_ARGS]
280 = { qualifier_none, qualifier_immediate,
281 qualifier_none, qualifier_none,
282 qualifier_unsigned_immediate };
283 #define CX_BINARY_QUALIFIERS (arm_cx_binary_qualifiers)
284
285 /* T (immediate, T, T, T, unsigned immediate). */
286 static enum arm_type_qualifiers
287 arm_cx_ternary_qualifiers[SIMD_MAX_BUILTIN_ARGS]
288 = { qualifier_none, qualifier_immediate,
289 qualifier_none, qualifier_none, qualifier_none,
290 qualifier_unsigned_immediate };
291 #define CX_TERNARY_QUALIFIERS (arm_cx_ternary_qualifiers)
292
293 /* T (immediate, T, unsigned immediate). */
294 static enum arm_type_qualifiers
295 arm_cx_unary_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
296 = { qualifier_none, qualifier_immediate, qualifier_none,
297 qualifier_unsigned_immediate,
298 qualifier_predicate };
299 #define CX_UNARY_UNONE_QUALIFIERS (arm_cx_unary_unone_qualifiers)
300
301 /* T (immediate, T, T, unsigned immediate). */
302 static enum arm_type_qualifiers
303 arm_cx_binary_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
304 = { qualifier_none, qualifier_immediate,
305 qualifier_none, qualifier_none,
306 qualifier_unsigned_immediate,
307 qualifier_predicate };
308 #define CX_BINARY_UNONE_QUALIFIERS (arm_cx_binary_unone_qualifiers)
309
310 /* T (immediate, T, T, T, unsigned immediate). */
311 static enum arm_type_qualifiers
312 arm_cx_ternary_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
313 = { qualifier_none, qualifier_immediate,
314 qualifier_none, qualifier_none, qualifier_none,
315 qualifier_unsigned_immediate,
316 qualifier_predicate };
317 #define CX_TERNARY_UNONE_QUALIFIERS (arm_cx_ternary_unone_qualifiers)
318
319 /* The first argument (return type) of a store should be void type,
320 which we represent with qualifier_void. Their first operand will be
321 a DImode pointer to the location to store to, so we must use
322 qualifier_map_mode | qualifier_pointer to build a pointer to the
323 element type of the vector.
324
325 void ([T element type] *, T). */
326 static enum arm_type_qualifiers
327 arm_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
328 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
329 #define STORE1_QUALIFIERS (arm_store1_qualifiers)
330
331 /* Qualifiers for MVE builtins. */
332
333 static enum arm_type_qualifiers
334 arm_unop_none_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
335 = { qualifier_none, qualifier_none };
336 #define UNOP_NONE_NONE_QUALIFIERS \
337 (arm_unop_none_none_qualifiers)
338
339 static enum arm_type_qualifiers
340 arm_unop_none_snone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
341 = { qualifier_none, qualifier_none };
342 #define UNOP_NONE_SNONE_QUALIFIERS \
343 (arm_unop_none_snone_qualifiers)
344
345 static enum arm_type_qualifiers
346 arm_unop_none_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
347 = { qualifier_none, qualifier_unsigned };
348 #define UNOP_NONE_UNONE_QUALIFIERS \
349 (arm_unop_none_unone_qualifiers)
350
351 static enum arm_type_qualifiers
352 arm_unop_snone_snone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
353 = { qualifier_none, qualifier_none };
354 #define UNOP_SNONE_SNONE_QUALIFIERS \
355 (arm_unop_snone_snone_qualifiers)
356
357 static enum arm_type_qualifiers
358 arm_unop_snone_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
359 = { qualifier_none, qualifier_none };
360 #define UNOP_SNONE_NONE_QUALIFIERS \
361 (arm_unop_snone_none_qualifiers)
362
363 static enum arm_type_qualifiers
364 arm_unop_snone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
365 = { qualifier_none, qualifier_immediate };
366 #define UNOP_SNONE_IMM_QUALIFIERS \
367 (arm_unop_snone_imm_qualifiers)
368
369 static enum arm_type_qualifiers
370 arm_unop_unone_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
371 = { qualifier_unsigned, qualifier_none };
372 #define UNOP_UNONE_NONE_QUALIFIERS \
373 (arm_unop_unone_none_qualifiers)
374
375 static enum arm_type_qualifiers
376 arm_unop_unone_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
377 = { qualifier_unsigned, qualifier_unsigned };
378 #define UNOP_UNONE_UNONE_QUALIFIERS \
379 (arm_unop_unone_unone_qualifiers)
380
381 static enum arm_type_qualifiers
382 arm_unop_unone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
383 = { qualifier_unsigned, qualifier_immediate };
384 #define UNOP_UNONE_IMM_QUALIFIERS \
385 (arm_unop_unone_imm_qualifiers)
386
387 static enum arm_type_qualifiers
388 arm_binop_none_none_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
389 = { qualifier_none, qualifier_none, qualifier_none };
390 #define BINOP_NONE_NONE_NONE_QUALIFIERS \
391 (arm_binop_none_none_none_qualifiers)
392
393 static enum arm_type_qualifiers
394 arm_binop_none_none_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
395 = { qualifier_none, qualifier_none, qualifier_immediate };
396 #define BINOP_NONE_NONE_IMM_QUALIFIERS \
397 (arm_binop_none_none_imm_qualifiers)
398
399 static enum arm_type_qualifiers
400 arm_binop_none_unone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
401 = { qualifier_none, qualifier_unsigned, qualifier_immediate };
402 #define BINOP_NONE_UNONE_IMM_QUALIFIERS \
403 (arm_binop_none_unone_imm_qualifiers)
404
405 static enum arm_type_qualifiers
406 arm_binop_none_unone_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
407 = { qualifier_none, qualifier_unsigned, qualifier_unsigned };
408 #define BINOP_NONE_UNONE_UNONE_QUALIFIERS \
409 (arm_binop_none_unone_unone_qualifiers)
410
411 static enum arm_type_qualifiers
412 arm_binop_unone_unone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
413 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate };
414 #define BINOP_UNONE_UNONE_IMM_QUALIFIERS \
415 (arm_binop_unone_unone_imm_qualifiers)
416
417 static enum arm_type_qualifiers
418 arm_binop_unone_unone_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
419 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
420 #define BINOP_UNONE_UNONE_UNONE_QUALIFIERS \
421 (arm_binop_unone_unone_unone_qualifiers)
422
423 static enum arm_type_qualifiers
424 arm_binop_pred_unone_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
425 = { qualifier_predicate, qualifier_unsigned, qualifier_unsigned };
426 #define BINOP_PRED_UNONE_UNONE_QUALIFIERS \
427 (arm_binop_pred_unone_unone_qualifiers)
428
429 static enum arm_type_qualifiers
430 arm_binop_unone_none_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
431 = { qualifier_unsigned, qualifier_none, qualifier_immediate };
432 #define BINOP_UNONE_NONE_IMM_QUALIFIERS \
433 (arm_binop_unone_none_imm_qualifiers)
434
435 static enum arm_type_qualifiers
436 arm_binop_pred_none_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
437 = { qualifier_predicate, qualifier_none, qualifier_none };
438 #define BINOP_PRED_NONE_NONE_QUALIFIERS \
439 (arm_binop_pred_none_none_qualifiers)
440
441 static enum arm_type_qualifiers
442 arm_binop_unone_unone_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
443 = { qualifier_unsigned, qualifier_unsigned, qualifier_none };
444 #define BINOP_UNONE_UNONE_NONE_QUALIFIERS \
445 (arm_binop_unone_unone_none_qualifiers)
446
447 static enum arm_type_qualifiers
448 arm_ternop_unone_unone_unone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
449 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
450 qualifier_immediate };
451 #define TERNOP_UNONE_UNONE_UNONE_IMM_QUALIFIERS \
452 (arm_ternop_unone_unone_unone_imm_qualifiers)
453
454 static enum arm_type_qualifiers
455 arm_ternop_unone_unone_none_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
456 = { qualifier_unsigned, qualifier_unsigned, qualifier_none, qualifier_none };
457 #define TERNOP_UNONE_UNONE_NONE_NONE_QUALIFIERS \
458 (arm_ternop_unone_unone_none_none_qualifiers)
459
460 static enum arm_type_qualifiers
461 arm_ternop_unone_none_unone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
462 = { qualifier_unsigned, qualifier_none, qualifier_unsigned,
463 qualifier_immediate };
464 #define TERNOP_UNONE_NONE_UNONE_IMM_QUALIFIERS \
465 (arm_ternop_unone_none_unone_imm_qualifiers)
466
467 static enum arm_type_qualifiers
468 arm_ternop_none_none_unone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
469 = { qualifier_none, qualifier_none, qualifier_unsigned, qualifier_immediate };
470 #define TERNOP_NONE_NONE_UNONE_IMM_QUALIFIERS \
471 (arm_ternop_none_none_unone_imm_qualifiers)
472
473 static enum arm_type_qualifiers
474 arm_ternop_unone_unone_none_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
475 = { qualifier_unsigned, qualifier_unsigned, qualifier_none,
476 qualifier_immediate };
477 #define TERNOP_UNONE_UNONE_NONE_IMM_QUALIFIERS \
478 (arm_ternop_unone_unone_none_imm_qualifiers)
479
480 static enum arm_type_qualifiers
481 arm_ternop_unone_unone_none_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
482 = { qualifier_unsigned, qualifier_unsigned, qualifier_none,
483 qualifier_predicate };
484 #define TERNOP_UNONE_UNONE_NONE_PRED_QUALIFIERS \
485 (arm_ternop_unone_unone_none_pred_qualifiers)
486
487 static enum arm_type_qualifiers
488 arm_ternop_unone_unone_imm_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
489 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate,
490 qualifier_predicate };
491 #define TERNOP_UNONE_UNONE_IMM_PRED_QUALIFIERS \
492 (arm_ternop_unone_unone_imm_pred_qualifiers)
493
494 static enum arm_type_qualifiers
495 arm_ternop_pred_none_none_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
496 = { qualifier_predicate, qualifier_none, qualifier_none, qualifier_predicate };
497 #define TERNOP_PRED_NONE_NONE_PRED_QUALIFIERS \
498 (arm_ternop_pred_none_none_pred_qualifiers)
499
500 static enum arm_type_qualifiers
501 arm_ternop_none_none_none_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
502 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate };
503 #define TERNOP_NONE_NONE_NONE_IMM_QUALIFIERS \
504 (arm_ternop_none_none_none_imm_qualifiers)
505
506 static enum arm_type_qualifiers
507 arm_ternop_none_none_none_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
508 = { qualifier_none, qualifier_none, qualifier_none, qualifier_predicate };
509 #define TERNOP_NONE_NONE_NONE_PRED_QUALIFIERS \
510 (arm_ternop_none_none_none_pred_qualifiers)
511
512 static enum arm_type_qualifiers
513 arm_ternop_none_none_imm_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
514 = { qualifier_none, qualifier_none, qualifier_immediate, qualifier_predicate };
515 #define TERNOP_NONE_NONE_IMM_PRED_QUALIFIERS \
516 (arm_ternop_none_none_imm_pred_qualifiers)
517
518 static enum arm_type_qualifiers
519 arm_ternop_none_none_unone_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
520 = { qualifier_none, qualifier_none, qualifier_unsigned, qualifier_predicate };
521 #define TERNOP_NONE_NONE_UNONE_PRED_QUALIFIERS \
522 (arm_ternop_none_none_unone_pred_qualifiers)
523
524 static enum arm_type_qualifiers
525 arm_ternop_unone_unone_unone_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
526 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
527 qualifier_unsigned };
528 #define TERNOP_UNONE_UNONE_UNONE_UNONE_QUALIFIERS \
529 (arm_ternop_unone_unone_unone_unone_qualifiers)
530
531 static enum arm_type_qualifiers
532 arm_ternop_unone_unone_unone_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
533 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
534 qualifier_predicate };
535 #define TERNOP_UNONE_UNONE_UNONE_PRED_QUALIFIERS \
536 (arm_ternop_unone_unone_unone_pred_qualifiers)
537
538 static enum arm_type_qualifiers
539 arm_ternop_pred_unone_unone_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
540 = { qualifier_predicate, qualifier_unsigned, qualifier_unsigned,
541 qualifier_predicate };
542 #define TERNOP_PRED_UNONE_UNONE_PRED_QUALIFIERS \
543 (arm_ternop_pred_unone_unone_pred_qualifiers)
544
545 static enum arm_type_qualifiers
546 arm_ternop_none_none_none_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
547 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
548 #define TERNOP_NONE_NONE_NONE_NONE_QUALIFIERS \
549 (arm_ternop_none_none_none_none_qualifiers)
550
551 static enum arm_type_qualifiers
552 arm_quadop_unone_unone_none_none_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
553 = { qualifier_unsigned, qualifier_unsigned, qualifier_none, qualifier_none,
554 qualifier_predicate };
555 #define QUADOP_UNONE_UNONE_NONE_NONE_PRED_QUALIFIERS \
556 (arm_quadop_unone_unone_none_none_pred_qualifiers)
557
558 static enum arm_type_qualifiers
559 arm_quadop_none_none_none_none_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
560 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none,
561 qualifier_predicate };
562 #define QUADOP_NONE_NONE_NONE_NONE_PRED_QUALIFIERS \
563 (arm_quadop_none_none_none_none_pred_qualifiers)
564
565 static enum arm_type_qualifiers
566 arm_quadop_none_none_none_imm_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
567 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate,
568 qualifier_predicate };
569 #define QUADOP_NONE_NONE_NONE_IMM_PRED_QUALIFIERS \
570 (arm_quadop_none_none_none_imm_pred_qualifiers)
571
572 static enum arm_type_qualifiers
573 arm_quadop_unone_unone_unone_unone_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
574 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
575 qualifier_unsigned, qualifier_predicate };
576 #define QUADOP_UNONE_UNONE_UNONE_UNONE_PRED_QUALIFIERS \
577 (arm_quadop_unone_unone_unone_unone_pred_qualifiers)
578
579 static enum arm_type_qualifiers
580 arm_quadop_unone_unone_none_imm_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
581 = { qualifier_unsigned, qualifier_unsigned, qualifier_none,
582 qualifier_immediate, qualifier_predicate };
583 #define QUADOP_UNONE_UNONE_NONE_IMM_PRED_QUALIFIERS \
584 (arm_quadop_unone_unone_none_imm_pred_qualifiers)
585
586 static enum arm_type_qualifiers
587 arm_quadop_none_none_unone_imm_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
588 = { qualifier_none, qualifier_none, qualifier_unsigned, qualifier_immediate,
589 qualifier_predicate };
590 #define QUADOP_NONE_NONE_UNONE_IMM_PRED_QUALIFIERS \
591 (arm_quadop_none_none_unone_imm_pred_qualifiers)
592
593 static enum arm_type_qualifiers
594 arm_quadop_unone_unone_unone_imm_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
595 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
596 qualifier_immediate, qualifier_predicate };
597 #define QUADOP_UNONE_UNONE_UNONE_IMM_PRED_QUALIFIERS \
598 (arm_quadop_unone_unone_unone_imm_pred_qualifiers)
599
600 static enum arm_type_qualifiers
601 arm_quadop_unone_unone_unone_none_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
602 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
603 qualifier_none, qualifier_predicate };
604 #define QUADOP_UNONE_UNONE_UNONE_NONE_PRED_QUALIFIERS \
605 (arm_quadop_unone_unone_unone_none_pred_qualifiers)
606
607 static enum arm_type_qualifiers
608 arm_strs_qualifiers[SIMD_MAX_BUILTIN_ARGS]
609 = { qualifier_void, qualifier_pointer, qualifier_none };
610 #define STRS_QUALIFIERS (arm_strs_qualifiers)
611
612 static enum arm_type_qualifiers
613 arm_stru_qualifiers[SIMD_MAX_BUILTIN_ARGS]
614 = { qualifier_void, qualifier_pointer, qualifier_unsigned };
615 #define STRU_QUALIFIERS (arm_stru_qualifiers)
616
617 static enum arm_type_qualifiers
618 arm_strss_qualifiers[SIMD_MAX_BUILTIN_ARGS]
619 = { qualifier_void, qualifier_pointer, qualifier_unsigned,
620 qualifier_none};
621 #define STRSS_QUALIFIERS (arm_strss_qualifiers)
622
623 static enum arm_type_qualifiers
624 arm_strsu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
625 = { qualifier_void, qualifier_pointer, qualifier_unsigned,
626 qualifier_unsigned};
627 #define STRSU_QUALIFIERS (arm_strsu_qualifiers)
628
629 static enum arm_type_qualifiers
630 arm_strsbs_qualifiers[SIMD_MAX_BUILTIN_ARGS]
631 = { qualifier_void, qualifier_unsigned, qualifier_immediate, qualifier_none};
632 #define STRSBS_QUALIFIERS (arm_strsbs_qualifiers)
633
634 static enum arm_type_qualifiers
635 arm_strsbu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
636 = { qualifier_void, qualifier_unsigned, qualifier_immediate,
637 qualifier_unsigned};
638 #define STRSBU_QUALIFIERS (arm_strsbu_qualifiers)
639
640 static enum arm_type_qualifiers
641 arm_strs_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
642 = { qualifier_void, qualifier_pointer, qualifier_none, qualifier_predicate};
643 #define STRS_P_QUALIFIERS (arm_strs_p_qualifiers)
644
645 static enum arm_type_qualifiers
646 arm_stru_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
647 = { qualifier_void, qualifier_pointer, qualifier_unsigned,
648 qualifier_predicate};
649 #define STRU_P_QUALIFIERS (arm_stru_p_qualifiers)
650
651 static enum arm_type_qualifiers
652 arm_strsu_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
653 = { qualifier_void, qualifier_pointer, qualifier_unsigned,
654 qualifier_unsigned, qualifier_predicate};
655 #define STRSU_P_QUALIFIERS (arm_strsu_p_qualifiers)
656
657 static enum arm_type_qualifiers
658 arm_strss_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
659 = { qualifier_void, qualifier_pointer, qualifier_unsigned,
660 qualifier_none, qualifier_predicate};
661 #define STRSS_P_QUALIFIERS (arm_strss_p_qualifiers)
662
663 static enum arm_type_qualifiers
664 arm_strsbs_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
665 = { qualifier_void, qualifier_unsigned, qualifier_immediate,
666 qualifier_none, qualifier_predicate};
667 #define STRSBS_P_QUALIFIERS (arm_strsbs_p_qualifiers)
668
669 static enum arm_type_qualifiers
670 arm_strsbu_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
671 = { qualifier_void, qualifier_unsigned, qualifier_immediate,
672 qualifier_unsigned, qualifier_predicate};
673 #define STRSBU_P_QUALIFIERS (arm_strsbu_p_qualifiers)
674
675 static enum arm_type_qualifiers
676 arm_ldrgu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
677 = { qualifier_unsigned, qualifier_pointer, qualifier_unsigned};
678 #define LDRGU_QUALIFIERS (arm_ldrgu_qualifiers)
679
680 static enum arm_type_qualifiers
681 arm_ldrgs_qualifiers[SIMD_MAX_BUILTIN_ARGS]
682 = { qualifier_none, qualifier_pointer, qualifier_unsigned};
683 #define LDRGS_QUALIFIERS (arm_ldrgs_qualifiers)
684
685 static enum arm_type_qualifiers
686 arm_ldrs_qualifiers[SIMD_MAX_BUILTIN_ARGS]
687 = { qualifier_none, qualifier_pointer};
688 #define LDRS_QUALIFIERS (arm_ldrs_qualifiers)
689
690 static enum arm_type_qualifiers
691 arm_ldru_qualifiers[SIMD_MAX_BUILTIN_ARGS]
692 = { qualifier_unsigned, qualifier_pointer};
693 #define LDRU_QUALIFIERS (arm_ldru_qualifiers)
694
695 static enum arm_type_qualifiers
696 arm_ldrgbs_qualifiers[SIMD_MAX_BUILTIN_ARGS]
697 = { qualifier_none, qualifier_unsigned, qualifier_immediate};
698 #define LDRGBS_QUALIFIERS (arm_ldrgbs_qualifiers)
699
700 static enum arm_type_qualifiers
701 arm_ldrgbu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
702 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate};
703 #define LDRGBU_QUALIFIERS (arm_ldrgbu_qualifiers)
704
705 static enum arm_type_qualifiers
706 arm_ldrgbs_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
707 = { qualifier_none, qualifier_unsigned, qualifier_immediate,
708 qualifier_predicate};
709 #define LDRGBS_Z_QUALIFIERS (arm_ldrgbs_z_qualifiers)
710
711 static enum arm_type_qualifiers
712 arm_ldrgbu_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
713 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate,
714 qualifier_predicate};
715 #define LDRGBU_Z_QUALIFIERS (arm_ldrgbu_z_qualifiers)
716
717 static enum arm_type_qualifiers
718 arm_ldrgs_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
719 = { qualifier_none, qualifier_pointer, qualifier_unsigned,
720 qualifier_predicate};
721 #define LDRGS_Z_QUALIFIERS (arm_ldrgs_z_qualifiers)
722
723 static enum arm_type_qualifiers
724 arm_ldrgu_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
725 = { qualifier_unsigned, qualifier_pointer, qualifier_unsigned,
726 qualifier_predicate};
727 #define LDRGU_Z_QUALIFIERS (arm_ldrgu_z_qualifiers)
728
729 static enum arm_type_qualifiers
730 arm_ldrs_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
731 = { qualifier_none, qualifier_pointer, qualifier_predicate};
732 #define LDRS_Z_QUALIFIERS (arm_ldrs_z_qualifiers)
733
734 static enum arm_type_qualifiers
735 arm_ldru_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
736 = { qualifier_unsigned, qualifier_pointer, qualifier_predicate};
737 #define LDRU_Z_QUALIFIERS (arm_ldru_z_qualifiers)
738
739 static enum arm_type_qualifiers
740 arm_quinop_unone_unone_unone_unone_imm_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
741 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
742 qualifier_unsigned, qualifier_immediate, qualifier_predicate };
743 #define QUINOP_UNONE_UNONE_UNONE_UNONE_IMM_PRED_QUALIFIERS \
744 (arm_quinop_unone_unone_unone_unone_imm_pred_qualifiers)
745
746 static enum arm_type_qualifiers
747 arm_ldrgbwbxu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
748 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate};
749 #define LDRGBWBXU_QUALIFIERS (arm_ldrgbwbxu_qualifiers)
750
751 static enum arm_type_qualifiers
752 arm_ldrgbwbxu_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
753 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate,
754 qualifier_predicate};
755 #define LDRGBWBXU_Z_QUALIFIERS (arm_ldrgbwbxu_z_qualifiers)
756
757 static enum arm_type_qualifiers
758 arm_ldrgbwbs_qualifiers[SIMD_MAX_BUILTIN_ARGS]
759 = { qualifier_none, qualifier_unsigned, qualifier_immediate};
760 #define LDRGBWBS_QUALIFIERS (arm_ldrgbwbs_qualifiers)
761
762 static enum arm_type_qualifiers
763 arm_ldrgbwbu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
764 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate};
765 #define LDRGBWBU_QUALIFIERS (arm_ldrgbwbu_qualifiers)
766
767 static enum arm_type_qualifiers
768 arm_ldrgbwbs_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
769 = { qualifier_none, qualifier_unsigned, qualifier_immediate,
770 qualifier_predicate};
771 #define LDRGBWBS_Z_QUALIFIERS (arm_ldrgbwbs_z_qualifiers)
772
773 static enum arm_type_qualifiers
774 arm_ldrgbwbu_z_qualifiers[SIMD_MAX_BUILTIN_ARGS]
775 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate,
776 qualifier_predicate};
777 #define LDRGBWBU_Z_QUALIFIERS (arm_ldrgbwbu_z_qualifiers)
778
779 static enum arm_type_qualifiers
780 arm_strsbwbs_qualifiers[SIMD_MAX_BUILTIN_ARGS]
781 = { qualifier_unsigned, qualifier_unsigned, qualifier_const, qualifier_none};
782 #define STRSBWBS_QUALIFIERS (arm_strsbwbs_qualifiers)
783
784 static enum arm_type_qualifiers
785 arm_strsbwbu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
786 = { qualifier_unsigned, qualifier_unsigned, qualifier_const, qualifier_unsigned};
787 #define STRSBWBU_QUALIFIERS (arm_strsbwbu_qualifiers)
788
789 static enum arm_type_qualifiers
790 arm_strsbwbs_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
791 = { qualifier_unsigned, qualifier_unsigned, qualifier_const,
792 qualifier_none, qualifier_predicate};
793 #define STRSBWBS_P_QUALIFIERS (arm_strsbwbs_p_qualifiers)
794
795 static enum arm_type_qualifiers
796 arm_strsbwbu_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
797 = { qualifier_unsigned, qualifier_unsigned, qualifier_const,
798 qualifier_unsigned, qualifier_predicate};
799 #define STRSBWBU_P_QUALIFIERS (arm_strsbwbu_p_qualifiers)
800
801 static enum arm_type_qualifiers
802 arm_lsll_qualifiers[SIMD_MAX_BUILTIN_ARGS]
803 = { qualifier_unsigned, qualifier_unsigned, qualifier_none};
804 #define LSLL_QUALIFIERS (arm_lsll_qualifiers)
805
806 static enum arm_type_qualifiers
807 arm_uqshl_qualifiers[SIMD_MAX_BUILTIN_ARGS]
808 = { qualifier_unsigned, qualifier_unsigned, qualifier_const};
809 #define UQSHL_QUALIFIERS (arm_uqshl_qualifiers)
810
811 static enum arm_type_qualifiers
812 arm_asrl_qualifiers[SIMD_MAX_BUILTIN_ARGS]
813 = { qualifier_none, qualifier_none, qualifier_none};
814 #define ASRL_QUALIFIERS (arm_asrl_qualifiers)
815
816 static enum arm_type_qualifiers
817 arm_sqshl_qualifiers[SIMD_MAX_BUILTIN_ARGS]
818 = { qualifier_unsigned, qualifier_unsigned, qualifier_const};
819 #define SQSHL_QUALIFIERS (arm_sqshl_qualifiers)
820
821 static enum arm_type_qualifiers
822 arm_binop_none_none_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
823 = { qualifier_none, qualifier_none, qualifier_predicate };
824 #define BINOP_NONE_NONE_PRED_QUALIFIERS \
825 (arm_binop_none_none_pred_qualifiers)
826
827 static enum arm_type_qualifiers
828 arm_binop_unone_unone_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
829 = { qualifier_unsigned, qualifier_unsigned, qualifier_predicate };
830 #define BINOP_UNONE_UNONE_PRED_QUALIFIERS \
831 (arm_binop_unone_unone_pred_qualifiers)
832
833 /* End of Qualifier for MVE builtins. */
834
835 /* void ([T element type] *, T, immediate). */
836 static enum arm_type_qualifiers
837 arm_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
838 = { qualifier_void, qualifier_pointer_map_mode,
839 qualifier_none, qualifier_struct_load_store_lane_index };
840 #define STORE1LANE_QUALIFIERS (arm_storestruct_lane_qualifiers)
841
842 /* int (void). */
843 static enum arm_type_qualifiers
844 arm_sat_occurred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
845 = { qualifier_none, qualifier_void };
846 #define SAT_OCCURRED_QUALIFIERS (arm_sat_occurred_qualifiers)
847
848 /* void (int). */
849 static enum arm_type_qualifiers
850 arm_set_sat_qualifiers[SIMD_MAX_BUILTIN_ARGS]
851 = { qualifier_void, qualifier_none };
852 #define SET_SAT_QUALIFIERS (arm_set_sat_qualifiers)
853
854 #define v8qi_UP E_V8QImode
855 #define v4hi_UP E_V4HImode
856 #define v4hf_UP E_V4HFmode
857 #define v4bf_UP E_V4BFmode
858 #define v2si_UP E_V2SImode
859 #define v2sf_UP E_V2SFmode
860 #define v2bf_UP E_V2BFmode
861 #define di_UP E_DImode
862 #define v16qi_UP E_V16QImode
863 #define v8hi_UP E_V8HImode
864 #define v8hf_UP E_V8HFmode
865 #define v8bf_UP E_V8BFmode
866 #define v4si_UP E_V4SImode
867 #define v4sf_UP E_V4SFmode
868 #define v2di_UP E_V2DImode
869 #define ti_UP E_TImode
870 #define ei_UP E_EImode
871 #define oi_UP E_OImode
872 #define hf_UP E_HFmode
873 #define bf_UP E_BFmode
874 #define si_UP E_SImode
875 #define hi_UP E_HImode
876 #define void_UP E_VOIDmode
877 #define sf_UP E_SFmode
878 #define UP(X) X##_UP
879
880 typedef struct {
881 const char *name;
882 machine_mode mode;
883 const enum insn_code code;
884 unsigned int fcode;
885 enum arm_type_qualifiers *qualifiers;
886 } arm_builtin_datum;
887
888 #define CF(N,X) CODE_FOR_neon_##N##X
889
890 #define VAR1(T, N, A) \
891 {#N #A, UP (A), CF (N, A), 0, T##_QUALIFIERS},
892 #define VAR2(T, N, A, B) \
893 VAR1 (T, N, A) \
894 VAR1 (T, N, B)
895 #define VAR3(T, N, A, B, C) \
896 VAR2 (T, N, A, B) \
897 VAR1 (T, N, C)
898 #define VAR4(T, N, A, B, C, D) \
899 VAR3 (T, N, A, B, C) \
900 VAR1 (T, N, D)
901 #define VAR5(T, N, A, B, C, D, E) \
902 VAR4 (T, N, A, B, C, D) \
903 VAR1 (T, N, E)
904 #define VAR6(T, N, A, B, C, D, E, F) \
905 VAR5 (T, N, A, B, C, D, E) \
906 VAR1 (T, N, F)
907 #define VAR7(T, N, A, B, C, D, E, F, G) \
908 VAR6 (T, N, A, B, C, D, E, F) \
909 VAR1 (T, N, G)
910 #define VAR8(T, N, A, B, C, D, E, F, G, H) \
911 VAR7 (T, N, A, B, C, D, E, F, G) \
912 VAR1 (T, N, H)
913 #define VAR9(T, N, A, B, C, D, E, F, G, H, I) \
914 VAR8 (T, N, A, B, C, D, E, F, G, H) \
915 VAR1 (T, N, I)
916 #define VAR10(T, N, A, B, C, D, E, F, G, H, I, J) \
917 VAR9 (T, N, A, B, C, D, E, F, G, H, I) \
918 VAR1 (T, N, J)
919 #define VAR11(T, N, A, B, C, D, E, F, G, H, I, J, K) \
920 VAR10 (T, N, A, B, C, D, E, F, G, H, I, J) \
921 VAR1 (T, N, K)
922 #define VAR12(T, N, A, B, C, D, E, F, G, H, I, J, K, L) \
923 VAR11 (T, N, A, B, C, D, E, F, G, H, I, J, K) \
924 VAR1 (T, N, L)
925 #define VAR13(T, N, A, B, C, D, E, F, G, H, I, J, K, L, M) \
926 VAR12 (T, N, A, B, C, D, E, F, G, H, I, J, K, L) \
927 VAR1 (T, N, M)
928 #define VAR14(T, N, A, B, C, D, E, F, G, H, I, J, K, L, M, O) \
929 VAR13 (T, N, A, B, C, D, E, F, G, H, I, J, K, L, M) \
930 VAR1 (T, N, O)
931
932 /* The builtin data can be found in arm_neon_builtins.def, arm_vfp_builtins.def
933 and arm_acle_builtins.def. The entries in arm_neon_builtins.def require
934 TARGET_NEON to be true. The feature tests are checked when the builtins are
935 expanded.
936
937 The mode entries in the following table correspond to the "key" type of the
938 instruction variant, i.e. equivalent to that which would be specified after
939 the assembler mnemonic for neon instructions, which usually refers to the
940 last vector operand. The modes listed per instruction should be the same as
941 those defined for that instruction's pattern, for instance in neon.md. */
942
943 static arm_builtin_datum vfp_builtin_data[] =
944 {
945 #include "arm_vfp_builtins.def"
946 };
947
948 static arm_builtin_datum neon_builtin_data[] =
949 {
950 #include "arm_neon_builtins.def"
951 };
952
953 #undef CF
954 #define CF(N,X) CODE_FOR_mve_##N##X
955 static arm_builtin_datum mve_builtin_data[] =
956 {
957 #include "arm_mve_builtins.def"
958 };
959
960 #undef CF
961 #undef VAR1
962 #define VAR1(T, N, A) \
963 {#N, UP (A), CODE_FOR_arm_##N, 0, T##_QUALIFIERS},
964
965 static arm_builtin_datum acle_builtin_data[] =
966 {
967 #include "arm_acle_builtins.def"
968 };
969
970 #undef VAR1
971 /* IMM_MAX sets the maximum valid value of the CDE immediate operand.
972 ECF_FLAG sets the flag used for set_call_expr_flags. */
973 #define VAR1(T, N, A, IMM_MAX, ECF_FLAG) \
974 {{#N #A, UP (A), CODE_FOR_arm_##N##A, 0, T##_QUALIFIERS}, IMM_MAX, ECF_FLAG},
975
976 typedef struct {
977 arm_builtin_datum base;
978 unsigned int imm_max;
979 int ecf_flag;
980 } arm_builtin_cde_datum;
981
982 static arm_builtin_cde_datum cde_builtin_data[] =
983 {
984 #include "arm_cde_builtins.def"
985 };
986
987 #undef VAR1
988 #define VAR1(T, N, X) \
989 ARM_BUILTIN_NEON_##N##X,
990
991 enum arm_builtins
992 {
993 ARM_BUILTIN_GETWCGR0,
994 ARM_BUILTIN_GETWCGR1,
995 ARM_BUILTIN_GETWCGR2,
996 ARM_BUILTIN_GETWCGR3,
997
998 ARM_BUILTIN_SETWCGR0,
999 ARM_BUILTIN_SETWCGR1,
1000 ARM_BUILTIN_SETWCGR2,
1001 ARM_BUILTIN_SETWCGR3,
1002
1003 ARM_BUILTIN_WZERO,
1004
1005 ARM_BUILTIN_WAVG2BR,
1006 ARM_BUILTIN_WAVG2HR,
1007 ARM_BUILTIN_WAVG2B,
1008 ARM_BUILTIN_WAVG2H,
1009
1010 ARM_BUILTIN_WACCB,
1011 ARM_BUILTIN_WACCH,
1012 ARM_BUILTIN_WACCW,
1013
1014 ARM_BUILTIN_WMACS,
1015 ARM_BUILTIN_WMACSZ,
1016 ARM_BUILTIN_WMACU,
1017 ARM_BUILTIN_WMACUZ,
1018
1019 ARM_BUILTIN_WSADB,
1020 ARM_BUILTIN_WSADBZ,
1021 ARM_BUILTIN_WSADH,
1022 ARM_BUILTIN_WSADHZ,
1023
1024 ARM_BUILTIN_WALIGNI,
1025 ARM_BUILTIN_WALIGNR0,
1026 ARM_BUILTIN_WALIGNR1,
1027 ARM_BUILTIN_WALIGNR2,
1028 ARM_BUILTIN_WALIGNR3,
1029
1030 ARM_BUILTIN_TMIA,
1031 ARM_BUILTIN_TMIAPH,
1032 ARM_BUILTIN_TMIABB,
1033 ARM_BUILTIN_TMIABT,
1034 ARM_BUILTIN_TMIATB,
1035 ARM_BUILTIN_TMIATT,
1036
1037 ARM_BUILTIN_TMOVMSKB,
1038 ARM_BUILTIN_TMOVMSKH,
1039 ARM_BUILTIN_TMOVMSKW,
1040
1041 ARM_BUILTIN_TBCSTB,
1042 ARM_BUILTIN_TBCSTH,
1043 ARM_BUILTIN_TBCSTW,
1044
1045 ARM_BUILTIN_WMADDS,
1046 ARM_BUILTIN_WMADDU,
1047
1048 ARM_BUILTIN_WPACKHSS,
1049 ARM_BUILTIN_WPACKWSS,
1050 ARM_BUILTIN_WPACKDSS,
1051 ARM_BUILTIN_WPACKHUS,
1052 ARM_BUILTIN_WPACKWUS,
1053 ARM_BUILTIN_WPACKDUS,
1054
1055 ARM_BUILTIN_WADDB,
1056 ARM_BUILTIN_WADDH,
1057 ARM_BUILTIN_WADDW,
1058 ARM_BUILTIN_WADDSSB,
1059 ARM_BUILTIN_WADDSSH,
1060 ARM_BUILTIN_WADDSSW,
1061 ARM_BUILTIN_WADDUSB,
1062 ARM_BUILTIN_WADDUSH,
1063 ARM_BUILTIN_WADDUSW,
1064 ARM_BUILTIN_WSUBB,
1065 ARM_BUILTIN_WSUBH,
1066 ARM_BUILTIN_WSUBW,
1067 ARM_BUILTIN_WSUBSSB,
1068 ARM_BUILTIN_WSUBSSH,
1069 ARM_BUILTIN_WSUBSSW,
1070 ARM_BUILTIN_WSUBUSB,
1071 ARM_BUILTIN_WSUBUSH,
1072 ARM_BUILTIN_WSUBUSW,
1073
1074 ARM_BUILTIN_WAND,
1075 ARM_BUILTIN_WANDN,
1076 ARM_BUILTIN_WOR,
1077 ARM_BUILTIN_WXOR,
1078
1079 ARM_BUILTIN_WCMPEQB,
1080 ARM_BUILTIN_WCMPEQH,
1081 ARM_BUILTIN_WCMPEQW,
1082 ARM_BUILTIN_WCMPGTUB,
1083 ARM_BUILTIN_WCMPGTUH,
1084 ARM_BUILTIN_WCMPGTUW,
1085 ARM_BUILTIN_WCMPGTSB,
1086 ARM_BUILTIN_WCMPGTSH,
1087 ARM_BUILTIN_WCMPGTSW,
1088
1089 ARM_BUILTIN_TEXTRMSB,
1090 ARM_BUILTIN_TEXTRMSH,
1091 ARM_BUILTIN_TEXTRMSW,
1092 ARM_BUILTIN_TEXTRMUB,
1093 ARM_BUILTIN_TEXTRMUH,
1094 ARM_BUILTIN_TEXTRMUW,
1095 ARM_BUILTIN_TINSRB,
1096 ARM_BUILTIN_TINSRH,
1097 ARM_BUILTIN_TINSRW,
1098
1099 ARM_BUILTIN_WMAXSW,
1100 ARM_BUILTIN_WMAXSH,
1101 ARM_BUILTIN_WMAXSB,
1102 ARM_BUILTIN_WMAXUW,
1103 ARM_BUILTIN_WMAXUH,
1104 ARM_BUILTIN_WMAXUB,
1105 ARM_BUILTIN_WMINSW,
1106 ARM_BUILTIN_WMINSH,
1107 ARM_BUILTIN_WMINSB,
1108 ARM_BUILTIN_WMINUW,
1109 ARM_BUILTIN_WMINUH,
1110 ARM_BUILTIN_WMINUB,
1111
1112 ARM_BUILTIN_WMULUM,
1113 ARM_BUILTIN_WMULSM,
1114 ARM_BUILTIN_WMULUL,
1115
1116 ARM_BUILTIN_PSADBH,
1117 ARM_BUILTIN_WSHUFH,
1118
1119 ARM_BUILTIN_WSLLH,
1120 ARM_BUILTIN_WSLLW,
1121 ARM_BUILTIN_WSLLD,
1122 ARM_BUILTIN_WSRAH,
1123 ARM_BUILTIN_WSRAW,
1124 ARM_BUILTIN_WSRAD,
1125 ARM_BUILTIN_WSRLH,
1126 ARM_BUILTIN_WSRLW,
1127 ARM_BUILTIN_WSRLD,
1128 ARM_BUILTIN_WRORH,
1129 ARM_BUILTIN_WRORW,
1130 ARM_BUILTIN_WRORD,
1131 ARM_BUILTIN_WSLLHI,
1132 ARM_BUILTIN_WSLLWI,
1133 ARM_BUILTIN_WSLLDI,
1134 ARM_BUILTIN_WSRAHI,
1135 ARM_BUILTIN_WSRAWI,
1136 ARM_BUILTIN_WSRADI,
1137 ARM_BUILTIN_WSRLHI,
1138 ARM_BUILTIN_WSRLWI,
1139 ARM_BUILTIN_WSRLDI,
1140 ARM_BUILTIN_WRORHI,
1141 ARM_BUILTIN_WRORWI,
1142 ARM_BUILTIN_WRORDI,
1143
1144 ARM_BUILTIN_WUNPCKIHB,
1145 ARM_BUILTIN_WUNPCKIHH,
1146 ARM_BUILTIN_WUNPCKIHW,
1147 ARM_BUILTIN_WUNPCKILB,
1148 ARM_BUILTIN_WUNPCKILH,
1149 ARM_BUILTIN_WUNPCKILW,
1150
1151 ARM_BUILTIN_WUNPCKEHSB,
1152 ARM_BUILTIN_WUNPCKEHSH,
1153 ARM_BUILTIN_WUNPCKEHSW,
1154 ARM_BUILTIN_WUNPCKEHUB,
1155 ARM_BUILTIN_WUNPCKEHUH,
1156 ARM_BUILTIN_WUNPCKEHUW,
1157 ARM_BUILTIN_WUNPCKELSB,
1158 ARM_BUILTIN_WUNPCKELSH,
1159 ARM_BUILTIN_WUNPCKELSW,
1160 ARM_BUILTIN_WUNPCKELUB,
1161 ARM_BUILTIN_WUNPCKELUH,
1162 ARM_BUILTIN_WUNPCKELUW,
1163
1164 ARM_BUILTIN_WABSB,
1165 ARM_BUILTIN_WABSH,
1166 ARM_BUILTIN_WABSW,
1167
1168 ARM_BUILTIN_WADDSUBHX,
1169 ARM_BUILTIN_WSUBADDHX,
1170
1171 ARM_BUILTIN_WABSDIFFB,
1172 ARM_BUILTIN_WABSDIFFH,
1173 ARM_BUILTIN_WABSDIFFW,
1174
1175 ARM_BUILTIN_WADDCH,
1176 ARM_BUILTIN_WADDCW,
1177
1178 ARM_BUILTIN_WAVG4,
1179 ARM_BUILTIN_WAVG4R,
1180
1181 ARM_BUILTIN_WMADDSX,
1182 ARM_BUILTIN_WMADDUX,
1183
1184 ARM_BUILTIN_WMADDSN,
1185 ARM_BUILTIN_WMADDUN,
1186
1187 ARM_BUILTIN_WMULWSM,
1188 ARM_BUILTIN_WMULWUM,
1189
1190 ARM_BUILTIN_WMULWSMR,
1191 ARM_BUILTIN_WMULWUMR,
1192
1193 ARM_BUILTIN_WMULWL,
1194
1195 ARM_BUILTIN_WMULSMR,
1196 ARM_BUILTIN_WMULUMR,
1197
1198 ARM_BUILTIN_WQMULM,
1199 ARM_BUILTIN_WQMULMR,
1200
1201 ARM_BUILTIN_WQMULWM,
1202 ARM_BUILTIN_WQMULWMR,
1203
1204 ARM_BUILTIN_WADDBHUSM,
1205 ARM_BUILTIN_WADDBHUSL,
1206
1207 ARM_BUILTIN_WQMIABB,
1208 ARM_BUILTIN_WQMIABT,
1209 ARM_BUILTIN_WQMIATB,
1210 ARM_BUILTIN_WQMIATT,
1211
1212 ARM_BUILTIN_WQMIABBN,
1213 ARM_BUILTIN_WQMIABTN,
1214 ARM_BUILTIN_WQMIATBN,
1215 ARM_BUILTIN_WQMIATTN,
1216
1217 ARM_BUILTIN_WMIABB,
1218 ARM_BUILTIN_WMIABT,
1219 ARM_BUILTIN_WMIATB,
1220 ARM_BUILTIN_WMIATT,
1221
1222 ARM_BUILTIN_WMIABBN,
1223 ARM_BUILTIN_WMIABTN,
1224 ARM_BUILTIN_WMIATBN,
1225 ARM_BUILTIN_WMIATTN,
1226
1227 ARM_BUILTIN_WMIAWBB,
1228 ARM_BUILTIN_WMIAWBT,
1229 ARM_BUILTIN_WMIAWTB,
1230 ARM_BUILTIN_WMIAWTT,
1231
1232 ARM_BUILTIN_WMIAWBBN,
1233 ARM_BUILTIN_WMIAWBTN,
1234 ARM_BUILTIN_WMIAWTBN,
1235 ARM_BUILTIN_WMIAWTTN,
1236
1237 ARM_BUILTIN_WMERGE,
1238
1239 ARM_BUILTIN_GET_FPSCR,
1240 ARM_BUILTIN_SET_FPSCR,
1241 ARM_BUILTIN_GET_FPSCR_NZCVQC,
1242 ARM_BUILTIN_SET_FPSCR_NZCVQC,
1243
1244 ARM_BUILTIN_CMSE_NONSECURE_CALLER,
1245 ARM_BUILTIN_SIMD_LANE_CHECK,
1246
1247 #undef CRYPTO1
1248 #undef CRYPTO2
1249 #undef CRYPTO3
1250
1251 #define CRYPTO1(L, U, M1, M2) \
1252 ARM_BUILTIN_CRYPTO_##U,
1253 #define CRYPTO2(L, U, M1, M2, M3) \
1254 ARM_BUILTIN_CRYPTO_##U,
1255 #define CRYPTO3(L, U, M1, M2, M3, M4) \
1256 ARM_BUILTIN_CRYPTO_##U,
1257
1258 ARM_BUILTIN_CRYPTO_BASE,
1259
1260 #include "crypto.def"
1261
1262 #undef CRYPTO1
1263 #undef CRYPTO2
1264 #undef CRYPTO3
1265
1266 ARM_BUILTIN_VFP_BASE,
1267
1268 #include "arm_vfp_builtins.def"
1269
1270 ARM_BUILTIN_NEON_BASE,
1271
1272 #include "arm_neon_builtins.def"
1273
1274 #undef VAR1
1275 #define VAR1(T, N, X) \
1276 ARM_BUILTIN_##N,
1277
1278 ARM_BUILTIN_ACLE_BASE,
1279 ARM_BUILTIN_SAT_IMM_CHECK = ARM_BUILTIN_ACLE_BASE,
1280
1281 #include "arm_acle_builtins.def"
1282
1283 #undef VAR1
1284 #define VAR1(T, N, X, ... ) \
1285 ARM_BUILTIN_##N##X,
1286
1287 ARM_BUILTIN_CDE_BASE,
1288
1289 #include "arm_cde_builtins.def"
1290
1291 ARM_BUILTIN_MVE_BASE,
1292
1293 #undef VAR1
1294 #define VAR1(T, N, X) \
1295 ARM_BUILTIN_MVE_##N##X,
1296 #include "arm_mve_builtins.def"
1297
1298 ARM_BUILTIN_MAX
1299 };
1300
1301 #define ARM_BUILTIN_VFP_PATTERN_START \
1302 (ARM_BUILTIN_VFP_BASE + 1)
1303
1304 #define ARM_BUILTIN_NEON_PATTERN_START \
1305 (ARM_BUILTIN_NEON_BASE + 1)
1306
1307 #define ARM_BUILTIN_MVE_PATTERN_START \
1308 (ARM_BUILTIN_MVE_BASE + 1)
1309
1310 #define ARM_BUILTIN_ACLE_PATTERN_START \
1311 (ARM_BUILTIN_ACLE_BASE + 1)
1312
1313 #define ARM_BUILTIN_CDE_PATTERN_START \
1314 (ARM_BUILTIN_CDE_BASE + 1)
1315
1316 #define ARM_BUILTIN_CDE_PATTERN_END \
1317 (ARM_BUILTIN_CDE_BASE + ARRAY_SIZE (cde_builtin_data))
1318
1319 #undef CF
1320 #undef VAR1
1321 #undef VAR2
1322 #undef VAR3
1323 #undef VAR4
1324 #undef VAR5
1325 #undef VAR6
1326 #undef VAR7
1327 #undef VAR8
1328 #undef VAR9
1329 #undef VAR10
1330
1331 static GTY(()) tree arm_builtin_decls[ARM_BUILTIN_MAX];
1332
1333 #define NUM_DREG_TYPES 5
1334 #define NUM_QREG_TYPES 6
1335
1336 /* Internal scalar builtin types. These types are used to support
1337 neon intrinsic builtins. They are _not_ user-visible types. Therefore
1338 the mangling for these types are implementation defined. */
1339 const char *arm_scalar_builtin_types[] = {
1340 "__builtin_neon_qi",
1341 "__builtin_neon_hi",
1342 "__builtin_neon_si",
1343 "__builtin_neon_sf",
1344 "__builtin_neon_di",
1345 "__builtin_neon_df",
1346 "__builtin_neon_ti",
1347 "__builtin_neon_uqi",
1348 "__builtin_neon_uhi",
1349 "__builtin_neon_usi",
1350 "__builtin_neon_udi",
1351 "__builtin_neon_ei",
1352 "__builtin_neon_oi",
1353 "__builtin_neon_ci",
1354 "__builtin_neon_xi",
1355 "__builtin_neon_bf",
1356 NULL
1357 };
1358
1359 #define ENTRY(E, M, Q, S, T, G) \
1360 {E, \
1361 "__simd" #S "_" #T "_t", \
1362 #G "__simd" #S "_" #T "_t", \
1363 NULL_TREE, NULL_TREE, M##mode, qualifier_##Q},
1364 struct arm_simd_type_info arm_simd_types [] = {
1365 #include "arm-simd-builtin-types.def"
1366 };
1367 #undef ENTRY
1368
1369 /* The user-visible __fp16 type. */
1370 tree arm_fp16_type_node = NULL_TREE;
1371
1372 /* Back-end node type for brain float (bfloat) types. */
1373 tree arm_bf16_type_node = NULL_TREE;
1374 tree arm_bf16_ptr_type_node = NULL_TREE;
1375
1376 static tree arm_simd_intOI_type_node = NULL_TREE;
1377 static tree arm_simd_intEI_type_node = NULL_TREE;
1378 static tree arm_simd_intCI_type_node = NULL_TREE;
1379 static tree arm_simd_intXI_type_node = NULL_TREE;
1380 static tree arm_simd_polyQI_type_node = NULL_TREE;
1381 static tree arm_simd_polyHI_type_node = NULL_TREE;
1382 static tree arm_simd_polyDI_type_node = NULL_TREE;
1383 static tree arm_simd_polyTI_type_node = NULL_TREE;
1384
1385 static const char *
arm_mangle_builtin_scalar_type(const_tree type)1386 arm_mangle_builtin_scalar_type (const_tree type)
1387 {
1388 int i = 0;
1389
1390 while (arm_scalar_builtin_types[i] != NULL)
1391 {
1392 const char *name = arm_scalar_builtin_types[i];
1393
1394 if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1395 && DECL_NAME (TYPE_NAME (type))
1396 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))), name))
1397 return arm_scalar_builtin_types[i];
1398 i++;
1399 }
1400 return NULL;
1401 }
1402
1403 static const char *
arm_mangle_builtin_vector_type(const_tree type)1404 arm_mangle_builtin_vector_type (const_tree type)
1405 {
1406 tree attrs = TYPE_ATTRIBUTES (type);
1407 if (tree attr = lookup_attribute ("Advanced SIMD type", attrs))
1408 {
1409 tree mangled_name = TREE_VALUE (TREE_VALUE (attr));
1410 return IDENTIFIER_POINTER (mangled_name);
1411 }
1412
1413 return NULL;
1414 }
1415
1416 const char *
arm_mangle_builtin_type(const_tree type)1417 arm_mangle_builtin_type (const_tree type)
1418 {
1419 const char *mangle;
1420 /* Walk through all the Arm builtins types tables to filter out the
1421 incoming type. */
1422 if ((mangle = arm_mangle_builtin_vector_type (type))
1423 || (mangle = arm_mangle_builtin_scalar_type (type)))
1424 return mangle;
1425
1426 return NULL;
1427 }
1428
1429 static tree
arm_simd_builtin_std_type(machine_mode mode,enum arm_type_qualifiers q)1430 arm_simd_builtin_std_type (machine_mode mode,
1431 enum arm_type_qualifiers q)
1432 {
1433 #define QUAL_TYPE(M) \
1434 ((q == qualifier_none) ? int##M##_type_node : unsigned_int##M##_type_node);
1435 switch (mode)
1436 {
1437 case E_QImode:
1438 return QUAL_TYPE (QI);
1439 case E_HImode:
1440 return QUAL_TYPE (HI);
1441 case E_SImode:
1442 return QUAL_TYPE (SI);
1443 case E_DImode:
1444 return QUAL_TYPE (DI);
1445 case E_TImode:
1446 return QUAL_TYPE (TI);
1447 case E_OImode:
1448 return arm_simd_intOI_type_node;
1449 case E_EImode:
1450 return arm_simd_intEI_type_node;
1451 case E_CImode:
1452 return arm_simd_intCI_type_node;
1453 case E_XImode:
1454 return arm_simd_intXI_type_node;
1455 case E_HFmode:
1456 return arm_fp16_type_node;
1457 case E_SFmode:
1458 return float_type_node;
1459 case E_DFmode:
1460 return double_type_node;
1461 case E_BFmode:
1462 return arm_bf16_type_node;
1463 default:
1464 gcc_unreachable ();
1465 }
1466 #undef QUAL_TYPE
1467 }
1468
1469 static tree
arm_lookup_simd_builtin_type(machine_mode mode,enum arm_type_qualifiers q)1470 arm_lookup_simd_builtin_type (machine_mode mode,
1471 enum arm_type_qualifiers q)
1472 {
1473 int i;
1474 int nelts = sizeof (arm_simd_types) / sizeof (arm_simd_types[0]);
1475
1476 /* Non-poly scalar modes map to standard types not in the table. */
1477 if (q != qualifier_poly && !VECTOR_MODE_P (mode))
1478 return arm_simd_builtin_std_type (mode, q);
1479
1480 for (i = 0; i < nelts; i++)
1481 if (arm_simd_types[i].mode == mode
1482 && arm_simd_types[i].q == q)
1483 return arm_simd_types[i].itype;
1484
1485 /* Note that we won't have caught the underlying type for poly64x2_t
1486 in the above table. This gets default mangling. */
1487
1488 return NULL_TREE;
1489 }
1490
1491 static tree
arm_simd_builtin_type(machine_mode mode,bool unsigned_p,bool poly_p)1492 arm_simd_builtin_type (machine_mode mode, bool unsigned_p, bool poly_p)
1493 {
1494 if (poly_p)
1495 return arm_lookup_simd_builtin_type (mode, qualifier_poly);
1496 else if (unsigned_p)
1497 return arm_lookup_simd_builtin_type (mode, qualifier_unsigned);
1498 else
1499 return arm_lookup_simd_builtin_type (mode, qualifier_none);
1500 }
1501
1502 static void
arm_init_simd_builtin_types(void)1503 arm_init_simd_builtin_types (void)
1504 {
1505 int i;
1506 int nelts = sizeof (arm_simd_types) / sizeof (arm_simd_types[0]);
1507 tree tdecl;
1508
1509 /* Poly types are a world of their own. In order to maintain legacy
1510 ABI, they get initialized using the old interface, and don't get
1511 an entry in our mangling table, consequently, they get default
1512 mangling. As a further gotcha, poly8_t and poly16_t are signed
1513 types, poly64_t and poly128_t are unsigned types. */
1514 if (!TARGET_HAVE_MVE)
1515 {
1516 arm_simd_polyQI_type_node
1517 = build_distinct_type_copy (intQI_type_node);
1518 (*lang_hooks.types.register_builtin_type) (arm_simd_polyQI_type_node,
1519 "__builtin_neon_poly8");
1520 arm_simd_polyHI_type_node
1521 = build_distinct_type_copy (intHI_type_node);
1522 (*lang_hooks.types.register_builtin_type) (arm_simd_polyHI_type_node,
1523 "__builtin_neon_poly16");
1524 arm_simd_polyDI_type_node
1525 = build_distinct_type_copy (unsigned_intDI_type_node);
1526 (*lang_hooks.types.register_builtin_type) (arm_simd_polyDI_type_node,
1527 "__builtin_neon_poly64");
1528 arm_simd_polyTI_type_node
1529 = build_distinct_type_copy (unsigned_intTI_type_node);
1530 (*lang_hooks.types.register_builtin_type) (arm_simd_polyTI_type_node,
1531 "__builtin_neon_poly128");
1532 /* Init poly vector element types with scalar poly types. */
1533 arm_simd_types[Poly8x8_t].eltype = arm_simd_polyQI_type_node;
1534 arm_simd_types[Poly8x16_t].eltype = arm_simd_polyQI_type_node;
1535 arm_simd_types[Poly16x4_t].eltype = arm_simd_polyHI_type_node;
1536 arm_simd_types[Poly16x8_t].eltype = arm_simd_polyHI_type_node;
1537 /* Note: poly64x2_t is defined in arm_neon.h, to ensure it gets default
1538 mangling. */
1539
1540 /* Prevent front-ends from transforming poly vectors into string
1541 literals. */
1542 TYPE_STRING_FLAG (arm_simd_polyQI_type_node) = false;
1543 TYPE_STRING_FLAG (arm_simd_polyHI_type_node) = false;
1544 }
1545 /* Init all the element types built by the front-end. */
1546 arm_simd_types[Int8x8_t].eltype = intQI_type_node;
1547 arm_simd_types[Int8x16_t].eltype = intQI_type_node;
1548 arm_simd_types[Int16x4_t].eltype = intHI_type_node;
1549 arm_simd_types[Int16x8_t].eltype = intHI_type_node;
1550 arm_simd_types[Int32x2_t].eltype = intSI_type_node;
1551 arm_simd_types[Int32x4_t].eltype = intSI_type_node;
1552 arm_simd_types[Int64x2_t].eltype = intDI_type_node;
1553 arm_simd_types[Uint8x8_t].eltype = unsigned_intQI_type_node;
1554 arm_simd_types[Uint8x16_t].eltype = unsigned_intQI_type_node;
1555 arm_simd_types[Uint16x4_t].eltype = unsigned_intHI_type_node;
1556 arm_simd_types[Uint16x8_t].eltype = unsigned_intHI_type_node;
1557 arm_simd_types[Uint32x2_t].eltype = unsigned_intSI_type_node;
1558 arm_simd_types[Uint32x4_t].eltype = unsigned_intSI_type_node;
1559 arm_simd_types[Uint64x2_t].eltype = unsigned_intDI_type_node;
1560
1561 /* Note: poly64x2_t is defined in arm_neon.h, to ensure it gets default
1562 mangling. */
1563
1564 /* Continue with standard types. */
1565 /* The __builtin_simd{64,128}_float16 types are kept private unless
1566 we have a scalar __fp16 type. */
1567 arm_simd_types[Float16x4_t].eltype = arm_fp16_type_node;
1568 arm_simd_types[Float16x8_t].eltype = arm_fp16_type_node;
1569 arm_simd_types[Float32x2_t].eltype = float_type_node;
1570 arm_simd_types[Float32x4_t].eltype = float_type_node;
1571
1572 /* Init Bfloat vector types with underlying __bf16 scalar type. */
1573 arm_simd_types[Bfloat16x2_t].eltype = arm_bf16_type_node;
1574 arm_simd_types[Bfloat16x4_t].eltype = arm_bf16_type_node;
1575 arm_simd_types[Bfloat16x8_t].eltype = arm_bf16_type_node;
1576
1577 for (i = 0; i < nelts; i++)
1578 {
1579 tree eltype = arm_simd_types[i].eltype;
1580 machine_mode mode = arm_simd_types[i].mode;
1581
1582 if (eltype == NULL
1583 /* VECTOR_BOOL is not supported unless MVE is activated,
1584 this would make build_truth_vector_type_for_mode
1585 crash. */
1586 && ((GET_MODE_CLASS (mode) != MODE_VECTOR_BOOL)
1587 || !TARGET_HAVE_MVE))
1588 continue;
1589 if (arm_simd_types[i].itype == NULL)
1590 {
1591 tree type;
1592 if (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL)
1593 {
1594 /* Handle MVE predicates: they are internally stored as
1595 16 bits, but are used as vectors of 1, 2 or 4-bit
1596 elements. */
1597 type = build_truth_vector_type_for_mode (GET_MODE_NUNITS (mode),
1598 mode);
1599 eltype = TREE_TYPE (type);
1600 }
1601 else
1602 type = build_vector_type (eltype, GET_MODE_NUNITS (mode));
1603
1604 type = build_distinct_type_copy (type);
1605 SET_TYPE_STRUCTURAL_EQUALITY (type);
1606
1607 tree mangled_name = get_identifier (arm_simd_types[i].mangle);
1608 tree value = tree_cons (NULL_TREE, mangled_name, NULL_TREE);
1609 TYPE_ATTRIBUTES (type)
1610 = tree_cons (get_identifier ("Advanced SIMD type"), value,
1611 TYPE_ATTRIBUTES (type));
1612 arm_simd_types[i].itype = type;
1613 }
1614
1615 tdecl = add_builtin_type (arm_simd_types[i].name,
1616 arm_simd_types[i].itype);
1617 TYPE_NAME (arm_simd_types[i].itype) = tdecl;
1618 SET_TYPE_STRUCTURAL_EQUALITY (arm_simd_types[i].itype);
1619 }
1620
1621 #define AARCH_BUILD_SIGNED_TYPE(mode) \
1622 make_signed_type (GET_MODE_PRECISION (mode));
1623 arm_simd_intOI_type_node = AARCH_BUILD_SIGNED_TYPE (OImode);
1624 arm_simd_intEI_type_node = AARCH_BUILD_SIGNED_TYPE (EImode);
1625 arm_simd_intCI_type_node = AARCH_BUILD_SIGNED_TYPE (CImode);
1626 arm_simd_intXI_type_node = AARCH_BUILD_SIGNED_TYPE (XImode);
1627 #undef AARCH_BUILD_SIGNED_TYPE
1628
1629 tdecl = add_builtin_type
1630 ("__builtin_neon_ei" , arm_simd_intEI_type_node);
1631 TYPE_NAME (arm_simd_intEI_type_node) = tdecl;
1632 tdecl = add_builtin_type
1633 ("__builtin_neon_oi" , arm_simd_intOI_type_node);
1634 TYPE_NAME (arm_simd_intOI_type_node) = tdecl;
1635 tdecl = add_builtin_type
1636 ("__builtin_neon_ci" , arm_simd_intCI_type_node);
1637 TYPE_NAME (arm_simd_intCI_type_node) = tdecl;
1638 tdecl = add_builtin_type
1639 ("__builtin_neon_xi" , arm_simd_intXI_type_node);
1640 TYPE_NAME (arm_simd_intXI_type_node) = tdecl;
1641 }
1642
1643 static void
arm_init_simd_builtin_scalar_types(void)1644 arm_init_simd_builtin_scalar_types (void)
1645 {
1646 /* Define typedefs for all the standard scalar types. */
1647 (*lang_hooks.types.register_builtin_type) (intQI_type_node,
1648 "__builtin_neon_qi");
1649 (*lang_hooks.types.register_builtin_type) (intHI_type_node,
1650 "__builtin_neon_hi");
1651 (*lang_hooks.types.register_builtin_type) (intSI_type_node,
1652 "__builtin_neon_si");
1653 (*lang_hooks.types.register_builtin_type) (float_type_node,
1654 "__builtin_neon_sf");
1655 (*lang_hooks.types.register_builtin_type) (intDI_type_node,
1656 "__builtin_neon_di");
1657 (*lang_hooks.types.register_builtin_type) (double_type_node,
1658 "__builtin_neon_df");
1659 (*lang_hooks.types.register_builtin_type) (intTI_type_node,
1660 "__builtin_neon_ti");
1661 (*lang_hooks.types.register_builtin_type) (arm_bf16_type_node,
1662 "__builtin_neon_bf");
1663 /* Unsigned integer types for various mode sizes. */
1664 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
1665 "__builtin_neon_uqi");
1666 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
1667 "__builtin_neon_uhi");
1668 (*lang_hooks.types.register_builtin_type) (unsigned_intSI_type_node,
1669 "__builtin_neon_usi");
1670 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
1671 "__builtin_neon_udi");
1672 (*lang_hooks.types.register_builtin_type) (unsigned_intTI_type_node,
1673 "__builtin_neon_uti");
1674 }
1675
1676 /* Set up a builtin. It will use information stored in the argument struct D to
1677 derive the builtin's type signature and name. It will append the name in D
1678 to the PREFIX passed and use these to create a builtin declaration that is
1679 then stored in 'arm_builtin_decls' under index FCODE. This FCODE is also
1680 written back to D for future use. */
1681
1682 static void
arm_init_builtin(unsigned int fcode,arm_builtin_datum * d,const char * prefix)1683 arm_init_builtin (unsigned int fcode, arm_builtin_datum *d,
1684 const char * prefix)
1685 {
1686 bool print_type_signature_p = false;
1687 char type_signature[SIMD_MAX_BUILTIN_ARGS] = { 0 };
1688 char namebuf[60];
1689 tree ftype = NULL;
1690 tree fndecl = NULL;
1691
1692 d->fcode = fcode;
1693
1694 /* We must track two variables here. op_num is
1695 the operand number as in the RTL pattern. This is
1696 required to access the mode (e.g. V4SF mode) of the
1697 argument, from which the base type can be derived.
1698 arg_num is an index in to the qualifiers data, which
1699 gives qualifiers to the type (e.g. const unsigned).
1700 The reason these two variables may differ by one is the
1701 void return type. While all return types take the 0th entry
1702 in the qualifiers array, there is no operand for them in the
1703 RTL pattern. */
1704 int op_num = insn_data[d->code].n_operands - 1;
1705 int arg_num = d->qualifiers[0] & qualifier_void
1706 ? op_num + 1
1707 : op_num;
1708 tree return_type = void_type_node, args = void_list_node;
1709 tree eltype;
1710
1711 /* Build a function type directly from the insn_data for this
1712 builtin. The build_function_type () function takes care of
1713 removing duplicates for us. */
1714 for (; op_num >= 0; arg_num--, op_num--)
1715 {
1716 machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
1717 enum arm_type_qualifiers qualifiers = d->qualifiers[arg_num];
1718
1719 if (qualifiers & qualifier_unsigned)
1720 {
1721 type_signature[arg_num] = 'u';
1722 print_type_signature_p = true;
1723 }
1724 else if (qualifiers & qualifier_poly)
1725 {
1726 type_signature[arg_num] = 'p';
1727 print_type_signature_p = true;
1728 }
1729 else
1730 type_signature[arg_num] = 's';
1731
1732 /* Skip an internal operand for vget_{low, high}. */
1733 if (qualifiers & qualifier_internal)
1734 continue;
1735
1736 /* Some builtins have different user-facing types
1737 for certain arguments, encoded in d->mode. */
1738 if (qualifiers & qualifier_map_mode)
1739 op_mode = d->mode;
1740
1741 /* MVE Predicates use HImode as mandated by the ABI: pred16_t is
1742 unsigned short. */
1743 if (qualifiers & qualifier_predicate)
1744 op_mode = HImode;
1745
1746 /* For pointers, we want a pointer to the basic type
1747 of the vector. */
1748 if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
1749 op_mode = GET_MODE_INNER (op_mode);
1750
1751 /* For void pointers we already have nodes constructed by the midend. */
1752 if (qualifiers & qualifier_void_pointer)
1753 eltype = qualifiers & qualifier_const
1754 ? const_ptr_type_node : ptr_type_node;
1755 else
1756 {
1757 eltype
1758 = arm_simd_builtin_type (op_mode,
1759 (qualifiers & qualifier_unsigned) != 0,
1760 (qualifiers & qualifier_poly) != 0);
1761 gcc_assert (eltype != NULL);
1762
1763 /* Add qualifiers. */
1764 if (qualifiers & qualifier_const)
1765 eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
1766
1767 if (qualifiers & qualifier_pointer)
1768 eltype = build_pointer_type (eltype);
1769 }
1770 /* If we have reached arg_num == 0, we are at a non-void
1771 return type. Otherwise, we are still processing
1772 arguments. */
1773 if (arg_num == 0)
1774 return_type = eltype;
1775 else
1776 args = tree_cons (NULL_TREE, eltype, args);
1777 }
1778
1779 ftype = build_function_type (return_type, args);
1780
1781 gcc_assert (ftype != NULL);
1782
1783 if (print_type_signature_p
1784 && IN_RANGE (fcode, ARM_BUILTIN_VFP_BASE, ARM_BUILTIN_ACLE_BASE - 1))
1785 snprintf (namebuf, sizeof (namebuf), "%s_%s_%s",
1786 prefix, d->name, type_signature);
1787 else
1788 snprintf (namebuf, sizeof (namebuf), "%s_%s",
1789 prefix, d->name);
1790
1791 fndecl = add_builtin_function (namebuf, ftype, fcode, BUILT_IN_MD,
1792 NULL, NULL_TREE);
1793 arm_builtin_decls[fcode] = fndecl;
1794 }
1795
1796 /* Initialize the backend REAL_TYPE type supporting bfloat types. */
1797 static void
arm_init_bf16_types(void)1798 arm_init_bf16_types (void)
1799 {
1800 arm_bf16_type_node = make_node (REAL_TYPE);
1801 TYPE_PRECISION (arm_bf16_type_node) = 16;
1802 SET_TYPE_MODE (arm_bf16_type_node, BFmode);
1803 layout_type (arm_bf16_type_node);
1804
1805 lang_hooks.types.register_builtin_type (arm_bf16_type_node, "__bf16");
1806 arm_bf16_ptr_type_node = build_pointer_type (arm_bf16_type_node);
1807 }
1808
1809 /* Set up ACLE builtins, even builtins for instructions that are not
1810 in the current target ISA to allow the user to compile particular modules
1811 with different target specific options that differ from the command line
1812 options. Such builtins will be rejected in arm_expand_builtin. */
1813
1814 static void
arm_init_acle_builtins(void)1815 arm_init_acle_builtins (void)
1816 {
1817 unsigned int i, fcode = ARM_BUILTIN_ACLE_PATTERN_START;
1818
1819 tree sat_check_fpr = build_function_type_list (void_type_node,
1820 intSI_type_node,
1821 intSI_type_node,
1822 intSI_type_node,
1823 NULL);
1824 arm_builtin_decls[ARM_BUILTIN_SAT_IMM_CHECK]
1825 = add_builtin_function ("__builtin_sat_imm_check", sat_check_fpr,
1826 ARM_BUILTIN_SAT_IMM_CHECK, BUILT_IN_MD,
1827 NULL, NULL_TREE);
1828
1829 for (i = 0; i < ARRAY_SIZE (acle_builtin_data); i++, fcode++)
1830 {
1831 arm_builtin_datum *d = &acle_builtin_data[i];
1832 arm_init_builtin (fcode, d, "__builtin_arm");
1833 }
1834 }
1835
1836 static void
arm_init_cde_builtins(void)1837 arm_init_cde_builtins (void)
1838 {
1839 unsigned int i, fcode = ARM_BUILTIN_CDE_PATTERN_START;
1840 for (i = 0; i < ARRAY_SIZE (cde_builtin_data); i++, fcode++)
1841 {
1842 /* Only define CDE floating point builtins if the target has floating
1843 point registers. NOTE: without HARD_FLOAT we don't have MVE, so we
1844 can break out of this loop directly here. */
1845 if (!TARGET_MAYBE_HARD_FLOAT && fcode >= ARM_BUILTIN_vcx1si)
1846 break;
1847 /* Only define CDE/MVE builtins if MVE is available. */
1848 if (!TARGET_HAVE_MVE && fcode >= ARM_BUILTIN_vcx1qv16qi)
1849 break;
1850 arm_builtin_cde_datum *cde = &cde_builtin_data[i];
1851 arm_builtin_datum *d = &cde->base;
1852 arm_init_builtin (fcode, d, "__builtin_arm");
1853 set_call_expr_flags (arm_builtin_decls[fcode], cde->ecf_flag);
1854 }
1855 }
1856
1857 /* Set up all the MVE builtins mentioned in arm_mve_builtins.def file. */
1858 static void
arm_init_mve_builtins(void)1859 arm_init_mve_builtins (void)
1860 {
1861 volatile unsigned int i, fcode = ARM_BUILTIN_MVE_PATTERN_START;
1862
1863 arm_init_simd_builtin_scalar_types ();
1864 arm_init_simd_builtin_types ();
1865
1866 /* Add support for __builtin_{get,set}_fpscr_nzcvqc, used by MVE intrinsics
1867 that read and/or write the carry bit. */
1868 tree get_fpscr_nzcvqc = build_function_type_list (intSI_type_node,
1869 NULL);
1870 tree set_fpscr_nzcvqc = build_function_type_list (void_type_node,
1871 intSI_type_node,
1872 NULL);
1873 arm_builtin_decls[ARM_BUILTIN_GET_FPSCR_NZCVQC]
1874 = add_builtin_function ("__builtin_arm_get_fpscr_nzcvqc", get_fpscr_nzcvqc,
1875 ARM_BUILTIN_GET_FPSCR_NZCVQC, BUILT_IN_MD, NULL,
1876 NULL_TREE);
1877 arm_builtin_decls[ARM_BUILTIN_SET_FPSCR_NZCVQC]
1878 = add_builtin_function ("__builtin_arm_set_fpscr_nzcvqc", set_fpscr_nzcvqc,
1879 ARM_BUILTIN_SET_FPSCR_NZCVQC, BUILT_IN_MD, NULL,
1880 NULL_TREE);
1881
1882 for (i = 0; i < ARRAY_SIZE (mve_builtin_data); i++, fcode++)
1883 {
1884 arm_builtin_datum *d = &mve_builtin_data[i];
1885 arm_init_builtin (fcode, d, "__builtin_mve");
1886 }
1887 }
1888
1889 /* Set up all the NEON builtins, even builtins for instructions that are not
1890 in the current target ISA to allow the user to compile particular modules
1891 with different target specific options that differ from the command line
1892 options. Such builtins will be rejected in arm_expand_builtin. */
1893
1894 static void
arm_init_neon_builtins(void)1895 arm_init_neon_builtins (void)
1896 {
1897 unsigned int i, fcode = ARM_BUILTIN_NEON_PATTERN_START;
1898
1899 arm_init_simd_builtin_types ();
1900
1901 /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
1902 Therefore we need to preserve the old __builtin scalar types. It can be
1903 removed once all the intrinsics become strongly typed using the qualifier
1904 system. */
1905 arm_init_simd_builtin_scalar_types ();
1906
1907 for (i = 0; i < ARRAY_SIZE (neon_builtin_data); i++, fcode++)
1908 {
1909 arm_builtin_datum *d = &neon_builtin_data[i];
1910 arm_init_builtin (fcode, d, "__builtin_neon");
1911 }
1912 }
1913
1914 /* Set up all the scalar floating point builtins. */
1915
1916 static void
arm_init_vfp_builtins(void)1917 arm_init_vfp_builtins (void)
1918 {
1919 unsigned int i, fcode = ARM_BUILTIN_VFP_PATTERN_START;
1920
1921 for (i = 0; i < ARRAY_SIZE (vfp_builtin_data); i++, fcode++)
1922 {
1923 arm_builtin_datum *d = &vfp_builtin_data[i];
1924 arm_init_builtin (fcode, d, "__builtin_neon");
1925 }
1926 }
1927
1928 static void
arm_init_crypto_builtins(void)1929 arm_init_crypto_builtins (void)
1930 {
1931 tree V16UQI_type_node
1932 = arm_simd_builtin_type (V16QImode, true, false);
1933
1934 tree V4USI_type_node
1935 = arm_simd_builtin_type (V4SImode, true, false);
1936
1937 tree v16uqi_ftype_v16uqi
1938 = build_function_type_list (V16UQI_type_node, V16UQI_type_node,
1939 NULL_TREE);
1940
1941 tree v16uqi_ftype_v16uqi_v16uqi
1942 = build_function_type_list (V16UQI_type_node, V16UQI_type_node,
1943 V16UQI_type_node, NULL_TREE);
1944
1945 tree v4usi_ftype_v4usi
1946 = build_function_type_list (V4USI_type_node, V4USI_type_node,
1947 NULL_TREE);
1948
1949 tree v4usi_ftype_v4usi_v4usi
1950 = build_function_type_list (V4USI_type_node, V4USI_type_node,
1951 V4USI_type_node, NULL_TREE);
1952
1953 tree v4usi_ftype_v4usi_v4usi_v4usi
1954 = build_function_type_list (V4USI_type_node, V4USI_type_node,
1955 V4USI_type_node, V4USI_type_node,
1956 NULL_TREE);
1957
1958 tree uti_ftype_udi_udi
1959 = build_function_type_list (unsigned_intTI_type_node,
1960 unsigned_intDI_type_node,
1961 unsigned_intDI_type_node,
1962 NULL_TREE);
1963
1964 #undef CRYPTO1
1965 #undef CRYPTO2
1966 #undef CRYPTO3
1967 #undef C
1968 #undef N
1969 #undef CF
1970 #undef FT1
1971 #undef FT2
1972 #undef FT3
1973
1974 #define C(U) \
1975 ARM_BUILTIN_CRYPTO_##U
1976 #define N(L) \
1977 "__builtin_arm_crypto_"#L
1978 #define FT1(R, A) \
1979 R##_ftype_##A
1980 #define FT2(R, A1, A2) \
1981 R##_ftype_##A1##_##A2
1982 #define FT3(R, A1, A2, A3) \
1983 R##_ftype_##A1##_##A2##_##A3
1984 #define CRYPTO1(L, U, R, A) \
1985 arm_builtin_decls[C (U)] \
1986 = add_builtin_function (N (L), FT1 (R, A), \
1987 C (U), BUILT_IN_MD, NULL, NULL_TREE);
1988 #define CRYPTO2(L, U, R, A1, A2) \
1989 arm_builtin_decls[C (U)] \
1990 = add_builtin_function (N (L), FT2 (R, A1, A2), \
1991 C (U), BUILT_IN_MD, NULL, NULL_TREE);
1992
1993 #define CRYPTO3(L, U, R, A1, A2, A3) \
1994 arm_builtin_decls[C (U)] \
1995 = add_builtin_function (N (L), FT3 (R, A1, A2, A3), \
1996 C (U), BUILT_IN_MD, NULL, NULL_TREE);
1997 #include "crypto.def"
1998
1999 #undef CRYPTO1
2000 #undef CRYPTO2
2001 #undef CRYPTO3
2002 #undef C
2003 #undef N
2004 #undef FT1
2005 #undef FT2
2006 #undef FT3
2007 }
2008
2009 #undef NUM_DREG_TYPES
2010 #undef NUM_QREG_TYPES
2011
2012 #define def_mbuiltin(FLAG, NAME, TYPE, CODE) \
2013 do \
2014 { \
2015 if (FLAG == isa_nobit \
2016 || bitmap_bit_p (arm_active_target.isa, FLAG)) \
2017 { \
2018 tree bdecl; \
2019 bdecl = add_builtin_function ((NAME), (TYPE), (CODE), \
2020 BUILT_IN_MD, NULL, NULL_TREE); \
2021 arm_builtin_decls[CODE] = bdecl; \
2022 } \
2023 } \
2024 while (0)
2025
2026 struct builtin_description
2027 {
2028 const enum isa_feature feature;
2029 const enum insn_code icode;
2030 const char * const name;
2031 const enum arm_builtins code;
2032 const enum rtx_code comparison;
2033 const unsigned int flag;
2034 };
2035
2036 static const struct builtin_description bdesc_2arg[] =
2037 {
2038 #define IWMMXT_BUILTIN(code, string, builtin) \
2039 { isa_bit_iwmmxt, CODE_FOR_##code, \
2040 "__builtin_arm_" string, \
2041 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
2042
2043 #define IWMMXT2_BUILTIN(code, string, builtin) \
2044 { isa_bit_iwmmxt2, CODE_FOR_##code, \
2045 "__builtin_arm_" string, \
2046 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
2047
2048 IWMMXT_BUILTIN (addv8qi3, "waddb", WADDB)
2049 IWMMXT_BUILTIN (addv4hi3, "waddh", WADDH)
2050 IWMMXT_BUILTIN (addv2si3, "waddw", WADDW)
2051 IWMMXT_BUILTIN (subv8qi3, "wsubb", WSUBB)
2052 IWMMXT_BUILTIN (subv4hi3, "wsubh", WSUBH)
2053 IWMMXT_BUILTIN (subv2si3, "wsubw", WSUBW)
2054 IWMMXT_BUILTIN (ssaddv8qi3, "waddbss", WADDSSB)
2055 IWMMXT_BUILTIN (ssaddv4hi3, "waddhss", WADDSSH)
2056 IWMMXT_BUILTIN (ssaddv2si3, "waddwss", WADDSSW)
2057 IWMMXT_BUILTIN (sssubv8qi3, "wsubbss", WSUBSSB)
2058 IWMMXT_BUILTIN (sssubv4hi3, "wsubhss", WSUBSSH)
2059 IWMMXT_BUILTIN (sssubv2si3, "wsubwss", WSUBSSW)
2060 IWMMXT_BUILTIN (usaddv8qi3, "waddbus", WADDUSB)
2061 IWMMXT_BUILTIN (usaddv4hi3, "waddhus", WADDUSH)
2062 IWMMXT_BUILTIN (usaddv2si3, "waddwus", WADDUSW)
2063 IWMMXT_BUILTIN (ussubv8qi3, "wsubbus", WSUBUSB)
2064 IWMMXT_BUILTIN (ussubv4hi3, "wsubhus", WSUBUSH)
2065 IWMMXT_BUILTIN (ussubv2si3, "wsubwus", WSUBUSW)
2066 IWMMXT_BUILTIN (mulv4hi3, "wmulul", WMULUL)
2067 IWMMXT_BUILTIN (smulv4hi3_highpart, "wmulsm", WMULSM)
2068 IWMMXT_BUILTIN (umulv4hi3_highpart, "wmulum", WMULUM)
2069 IWMMXT_BUILTIN (eqv8qi3, "wcmpeqb", WCMPEQB)
2070 IWMMXT_BUILTIN (eqv4hi3, "wcmpeqh", WCMPEQH)
2071 IWMMXT_BUILTIN (eqv2si3, "wcmpeqw", WCMPEQW)
2072 IWMMXT_BUILTIN (gtuv8qi3, "wcmpgtub", WCMPGTUB)
2073 IWMMXT_BUILTIN (gtuv4hi3, "wcmpgtuh", WCMPGTUH)
2074 IWMMXT_BUILTIN (gtuv2si3, "wcmpgtuw", WCMPGTUW)
2075 IWMMXT_BUILTIN (gtv8qi3, "wcmpgtsb", WCMPGTSB)
2076 IWMMXT_BUILTIN (gtv4hi3, "wcmpgtsh", WCMPGTSH)
2077 IWMMXT_BUILTIN (gtv2si3, "wcmpgtsw", WCMPGTSW)
2078 IWMMXT_BUILTIN (umaxv8qi3, "wmaxub", WMAXUB)
2079 IWMMXT_BUILTIN (smaxv8qi3, "wmaxsb", WMAXSB)
2080 IWMMXT_BUILTIN (umaxv4hi3, "wmaxuh", WMAXUH)
2081 IWMMXT_BUILTIN (smaxv4hi3, "wmaxsh", WMAXSH)
2082 IWMMXT_BUILTIN (umaxv2si3, "wmaxuw", WMAXUW)
2083 IWMMXT_BUILTIN (smaxv2si3, "wmaxsw", WMAXSW)
2084 IWMMXT_BUILTIN (uminv8qi3, "wminub", WMINUB)
2085 IWMMXT_BUILTIN (sminv8qi3, "wminsb", WMINSB)
2086 IWMMXT_BUILTIN (uminv4hi3, "wminuh", WMINUH)
2087 IWMMXT_BUILTIN (sminv4hi3, "wminsh", WMINSH)
2088 IWMMXT_BUILTIN (uminv2si3, "wminuw", WMINUW)
2089 IWMMXT_BUILTIN (sminv2si3, "wminsw", WMINSW)
2090 IWMMXT_BUILTIN (iwmmxt_anddi3, "wand", WAND)
2091 IWMMXT_BUILTIN (iwmmxt_nanddi3, "wandn", WANDN)
2092 IWMMXT_BUILTIN (iwmmxt_iordi3, "wor", WOR)
2093 IWMMXT_BUILTIN (iwmmxt_xordi3, "wxor", WXOR)
2094 IWMMXT_BUILTIN (iwmmxt_uavgv8qi3, "wavg2b", WAVG2B)
2095 IWMMXT_BUILTIN (iwmmxt_uavgv4hi3, "wavg2h", WAVG2H)
2096 IWMMXT_BUILTIN (iwmmxt_uavgrndv8qi3, "wavg2br", WAVG2BR)
2097 IWMMXT_BUILTIN (iwmmxt_uavgrndv4hi3, "wavg2hr", WAVG2HR)
2098 IWMMXT_BUILTIN (iwmmxt_wunpckilb, "wunpckilb", WUNPCKILB)
2099 IWMMXT_BUILTIN (iwmmxt_wunpckilh, "wunpckilh", WUNPCKILH)
2100 IWMMXT_BUILTIN (iwmmxt_wunpckilw, "wunpckilw", WUNPCKILW)
2101 IWMMXT_BUILTIN (iwmmxt_wunpckihb, "wunpckihb", WUNPCKIHB)
2102 IWMMXT_BUILTIN (iwmmxt_wunpckihh, "wunpckihh", WUNPCKIHH)
2103 IWMMXT_BUILTIN (iwmmxt_wunpckihw, "wunpckihw", WUNPCKIHW)
2104 IWMMXT2_BUILTIN (iwmmxt_waddsubhx, "waddsubhx", WADDSUBHX)
2105 IWMMXT2_BUILTIN (iwmmxt_wsubaddhx, "wsubaddhx", WSUBADDHX)
2106 IWMMXT2_BUILTIN (iwmmxt_wabsdiffb, "wabsdiffb", WABSDIFFB)
2107 IWMMXT2_BUILTIN (iwmmxt_wabsdiffh, "wabsdiffh", WABSDIFFH)
2108 IWMMXT2_BUILTIN (iwmmxt_wabsdiffw, "wabsdiffw", WABSDIFFW)
2109 IWMMXT2_BUILTIN (iwmmxt_avg4, "wavg4", WAVG4)
2110 IWMMXT2_BUILTIN (iwmmxt_avg4r, "wavg4r", WAVG4R)
2111 IWMMXT2_BUILTIN (iwmmxt_wmulwsm, "wmulwsm", WMULWSM)
2112 IWMMXT2_BUILTIN (iwmmxt_wmulwum, "wmulwum", WMULWUM)
2113 IWMMXT2_BUILTIN (iwmmxt_wmulwsmr, "wmulwsmr", WMULWSMR)
2114 IWMMXT2_BUILTIN (iwmmxt_wmulwumr, "wmulwumr", WMULWUMR)
2115 IWMMXT2_BUILTIN (iwmmxt_wmulwl, "wmulwl", WMULWL)
2116 IWMMXT2_BUILTIN (iwmmxt_wmulsmr, "wmulsmr", WMULSMR)
2117 IWMMXT2_BUILTIN (iwmmxt_wmulumr, "wmulumr", WMULUMR)
2118 IWMMXT2_BUILTIN (iwmmxt_wqmulm, "wqmulm", WQMULM)
2119 IWMMXT2_BUILTIN (iwmmxt_wqmulmr, "wqmulmr", WQMULMR)
2120 IWMMXT2_BUILTIN (iwmmxt_wqmulwm, "wqmulwm", WQMULWM)
2121 IWMMXT2_BUILTIN (iwmmxt_wqmulwmr, "wqmulwmr", WQMULWMR)
2122 IWMMXT_BUILTIN (iwmmxt_walignr0, "walignr0", WALIGNR0)
2123 IWMMXT_BUILTIN (iwmmxt_walignr1, "walignr1", WALIGNR1)
2124 IWMMXT_BUILTIN (iwmmxt_walignr2, "walignr2", WALIGNR2)
2125 IWMMXT_BUILTIN (iwmmxt_walignr3, "walignr3", WALIGNR3)
2126
2127 #define IWMMXT_BUILTIN2(code, builtin) \
2128 { isa_bit_iwmmxt, CODE_FOR_##code, NULL, \
2129 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
2130
2131 #define IWMMXT2_BUILTIN2(code, builtin) \
2132 { isa_bit_iwmmxt2, CODE_FOR_##code, NULL, \
2133 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
2134
2135 IWMMXT2_BUILTIN2 (iwmmxt_waddbhusm, WADDBHUSM)
2136 IWMMXT2_BUILTIN2 (iwmmxt_waddbhusl, WADDBHUSL)
2137 IWMMXT_BUILTIN2 (iwmmxt_wpackhss, WPACKHSS)
2138 IWMMXT_BUILTIN2 (iwmmxt_wpackwss, WPACKWSS)
2139 IWMMXT_BUILTIN2 (iwmmxt_wpackdss, WPACKDSS)
2140 IWMMXT_BUILTIN2 (iwmmxt_wpackhus, WPACKHUS)
2141 IWMMXT_BUILTIN2 (iwmmxt_wpackwus, WPACKWUS)
2142 IWMMXT_BUILTIN2 (iwmmxt_wpackdus, WPACKDUS)
2143 IWMMXT_BUILTIN2 (iwmmxt_wmacuz, WMACUZ)
2144 IWMMXT_BUILTIN2 (iwmmxt_wmacsz, WMACSZ)
2145
2146
2147 #define FP_BUILTIN(L, U) \
2148 {isa_nobit, CODE_FOR_##L, "__builtin_arm_"#L, ARM_BUILTIN_##U, \
2149 UNKNOWN, 0},
2150
2151 FP_BUILTIN (get_fpscr, GET_FPSCR)
2152 FP_BUILTIN (set_fpscr, SET_FPSCR)
2153 #undef FP_BUILTIN
2154
2155 #define CRYPTO_BUILTIN(L, U) \
2156 {isa_nobit, CODE_FOR_crypto_##L, "__builtin_arm_crypto_"#L, \
2157 ARM_BUILTIN_CRYPTO_##U, UNKNOWN, 0},
2158 #undef CRYPTO1
2159 #undef CRYPTO2
2160 #undef CRYPTO3
2161 #define CRYPTO2(L, U, R, A1, A2) CRYPTO_BUILTIN (L, U)
2162 #define CRYPTO1(L, U, R, A)
2163 #define CRYPTO3(L, U, R, A1, A2, A3)
2164 #include "crypto.def"
2165 #undef CRYPTO1
2166 #undef CRYPTO2
2167 #undef CRYPTO3
2168
2169 };
2170
2171 static const struct builtin_description bdesc_1arg[] =
2172 {
2173 IWMMXT_BUILTIN (iwmmxt_tmovmskb, "tmovmskb", TMOVMSKB)
2174 IWMMXT_BUILTIN (iwmmxt_tmovmskh, "tmovmskh", TMOVMSKH)
2175 IWMMXT_BUILTIN (iwmmxt_tmovmskw, "tmovmskw", TMOVMSKW)
2176 IWMMXT_BUILTIN (iwmmxt_waccb, "waccb", WACCB)
2177 IWMMXT_BUILTIN (iwmmxt_wacch, "wacch", WACCH)
2178 IWMMXT_BUILTIN (iwmmxt_waccw, "waccw", WACCW)
2179 IWMMXT_BUILTIN (iwmmxt_wunpckehub, "wunpckehub", WUNPCKEHUB)
2180 IWMMXT_BUILTIN (iwmmxt_wunpckehuh, "wunpckehuh", WUNPCKEHUH)
2181 IWMMXT_BUILTIN (iwmmxt_wunpckehuw, "wunpckehuw", WUNPCKEHUW)
2182 IWMMXT_BUILTIN (iwmmxt_wunpckehsb, "wunpckehsb", WUNPCKEHSB)
2183 IWMMXT_BUILTIN (iwmmxt_wunpckehsh, "wunpckehsh", WUNPCKEHSH)
2184 IWMMXT_BUILTIN (iwmmxt_wunpckehsw, "wunpckehsw", WUNPCKEHSW)
2185 IWMMXT_BUILTIN (iwmmxt_wunpckelub, "wunpckelub", WUNPCKELUB)
2186 IWMMXT_BUILTIN (iwmmxt_wunpckeluh, "wunpckeluh", WUNPCKELUH)
2187 IWMMXT_BUILTIN (iwmmxt_wunpckeluw, "wunpckeluw", WUNPCKELUW)
2188 IWMMXT_BUILTIN (iwmmxt_wunpckelsb, "wunpckelsb", WUNPCKELSB)
2189 IWMMXT_BUILTIN (iwmmxt_wunpckelsh, "wunpckelsh", WUNPCKELSH)
2190 IWMMXT_BUILTIN (iwmmxt_wunpckelsw, "wunpckelsw", WUNPCKELSW)
2191 IWMMXT2_BUILTIN (iwmmxt_wabsv8qi3, "wabsb", WABSB)
2192 IWMMXT2_BUILTIN (iwmmxt_wabsv4hi3, "wabsh", WABSH)
2193 IWMMXT2_BUILTIN (iwmmxt_wabsv2si3, "wabsw", WABSW)
2194 IWMMXT_BUILTIN (tbcstv8qi, "tbcstb", TBCSTB)
2195 IWMMXT_BUILTIN (tbcstv4hi, "tbcsth", TBCSTH)
2196 IWMMXT_BUILTIN (tbcstv2si, "tbcstw", TBCSTW)
2197
2198 #define CRYPTO1(L, U, R, A) CRYPTO_BUILTIN (L, U)
2199 #define CRYPTO2(L, U, R, A1, A2)
2200 #define CRYPTO3(L, U, R, A1, A2, A3)
2201 #include "crypto.def"
2202 #undef CRYPTO1
2203 #undef CRYPTO2
2204 #undef CRYPTO3
2205 };
2206
2207 static const struct builtin_description bdesc_3arg[] =
2208 {
2209 #define CRYPTO3(L, U, R, A1, A2, A3) CRYPTO_BUILTIN (L, U)
2210 #define CRYPTO1(L, U, R, A)
2211 #define CRYPTO2(L, U, R, A1, A2)
2212 #include "crypto.def"
2213 #undef CRYPTO1
2214 #undef CRYPTO2
2215 #undef CRYPTO3
2216 };
2217 #undef CRYPTO_BUILTIN
2218
2219 /* Set up all the iWMMXt builtins. This is not called if
2220 TARGET_IWMMXT is zero. */
2221
2222 static void
arm_init_iwmmxt_builtins(void)2223 arm_init_iwmmxt_builtins (void)
2224 {
2225 const struct builtin_description * d;
2226 size_t i;
2227
2228 tree V2SI_type_node = build_vector_type_for_mode (intSI_type_node, V2SImode);
2229 tree V4HI_type_node = build_vector_type_for_mode (intHI_type_node, V4HImode);
2230 tree V8QI_type_node = build_vector_type_for_mode (intQI_type_node, V8QImode);
2231
2232 tree v8qi_ftype_v8qi_v8qi_int
2233 = build_function_type_list (V8QI_type_node,
2234 V8QI_type_node, V8QI_type_node,
2235 integer_type_node, NULL_TREE);
2236 tree v4hi_ftype_v4hi_int
2237 = build_function_type_list (V4HI_type_node,
2238 V4HI_type_node, integer_type_node, NULL_TREE);
2239 tree v2si_ftype_v2si_int
2240 = build_function_type_list (V2SI_type_node,
2241 V2SI_type_node, integer_type_node, NULL_TREE);
2242 tree v2si_ftype_di_di
2243 = build_function_type_list (V2SI_type_node,
2244 long_long_integer_type_node,
2245 long_long_integer_type_node,
2246 NULL_TREE);
2247 tree di_ftype_di_int
2248 = build_function_type_list (long_long_integer_type_node,
2249 long_long_integer_type_node,
2250 integer_type_node, NULL_TREE);
2251 tree di_ftype_di_int_int
2252 = build_function_type_list (long_long_integer_type_node,
2253 long_long_integer_type_node,
2254 integer_type_node,
2255 integer_type_node, NULL_TREE);
2256 tree int_ftype_v8qi
2257 = build_function_type_list (integer_type_node,
2258 V8QI_type_node, NULL_TREE);
2259 tree int_ftype_v4hi
2260 = build_function_type_list (integer_type_node,
2261 V4HI_type_node, NULL_TREE);
2262 tree int_ftype_v2si
2263 = build_function_type_list (integer_type_node,
2264 V2SI_type_node, NULL_TREE);
2265 tree int_ftype_v8qi_int
2266 = build_function_type_list (integer_type_node,
2267 V8QI_type_node, integer_type_node, NULL_TREE);
2268 tree int_ftype_v4hi_int
2269 = build_function_type_list (integer_type_node,
2270 V4HI_type_node, integer_type_node, NULL_TREE);
2271 tree int_ftype_v2si_int
2272 = build_function_type_list (integer_type_node,
2273 V2SI_type_node, integer_type_node, NULL_TREE);
2274 tree v8qi_ftype_v8qi_int_int
2275 = build_function_type_list (V8QI_type_node,
2276 V8QI_type_node, integer_type_node,
2277 integer_type_node, NULL_TREE);
2278 tree v4hi_ftype_v4hi_int_int
2279 = build_function_type_list (V4HI_type_node,
2280 V4HI_type_node, integer_type_node,
2281 integer_type_node, NULL_TREE);
2282 tree v2si_ftype_v2si_int_int
2283 = build_function_type_list (V2SI_type_node,
2284 V2SI_type_node, integer_type_node,
2285 integer_type_node, NULL_TREE);
2286 /* Miscellaneous. */
2287 tree v8qi_ftype_v4hi_v4hi
2288 = build_function_type_list (V8QI_type_node,
2289 V4HI_type_node, V4HI_type_node, NULL_TREE);
2290 tree v4hi_ftype_v2si_v2si
2291 = build_function_type_list (V4HI_type_node,
2292 V2SI_type_node, V2SI_type_node, NULL_TREE);
2293 tree v8qi_ftype_v4hi_v8qi
2294 = build_function_type_list (V8QI_type_node,
2295 V4HI_type_node, V8QI_type_node, NULL_TREE);
2296 tree v2si_ftype_v4hi_v4hi
2297 = build_function_type_list (V2SI_type_node,
2298 V4HI_type_node, V4HI_type_node, NULL_TREE);
2299 tree v2si_ftype_v8qi_v8qi
2300 = build_function_type_list (V2SI_type_node,
2301 V8QI_type_node, V8QI_type_node, NULL_TREE);
2302 tree v4hi_ftype_v4hi_di
2303 = build_function_type_list (V4HI_type_node,
2304 V4HI_type_node, long_long_integer_type_node,
2305 NULL_TREE);
2306 tree v2si_ftype_v2si_di
2307 = build_function_type_list (V2SI_type_node,
2308 V2SI_type_node, long_long_integer_type_node,
2309 NULL_TREE);
2310 tree di_ftype_void
2311 = build_function_type_list (long_long_unsigned_type_node, NULL_TREE);
2312 tree int_ftype_void
2313 = build_function_type_list (integer_type_node, NULL_TREE);
2314 tree di_ftype_v8qi
2315 = build_function_type_list (long_long_integer_type_node,
2316 V8QI_type_node, NULL_TREE);
2317 tree di_ftype_v4hi
2318 = build_function_type_list (long_long_integer_type_node,
2319 V4HI_type_node, NULL_TREE);
2320 tree di_ftype_v2si
2321 = build_function_type_list (long_long_integer_type_node,
2322 V2SI_type_node, NULL_TREE);
2323 tree v2si_ftype_v4hi
2324 = build_function_type_list (V2SI_type_node,
2325 V4HI_type_node, NULL_TREE);
2326 tree v4hi_ftype_v8qi
2327 = build_function_type_list (V4HI_type_node,
2328 V8QI_type_node, NULL_TREE);
2329 tree v8qi_ftype_v8qi
2330 = build_function_type_list (V8QI_type_node,
2331 V8QI_type_node, NULL_TREE);
2332 tree v4hi_ftype_v4hi
2333 = build_function_type_list (V4HI_type_node,
2334 V4HI_type_node, NULL_TREE);
2335 tree v2si_ftype_v2si
2336 = build_function_type_list (V2SI_type_node,
2337 V2SI_type_node, NULL_TREE);
2338
2339 tree di_ftype_di_v4hi_v4hi
2340 = build_function_type_list (long_long_unsigned_type_node,
2341 long_long_unsigned_type_node,
2342 V4HI_type_node, V4HI_type_node,
2343 NULL_TREE);
2344
2345 tree di_ftype_v4hi_v4hi
2346 = build_function_type_list (long_long_unsigned_type_node,
2347 V4HI_type_node,V4HI_type_node,
2348 NULL_TREE);
2349
2350 tree v2si_ftype_v2si_v4hi_v4hi
2351 = build_function_type_list (V2SI_type_node,
2352 V2SI_type_node, V4HI_type_node,
2353 V4HI_type_node, NULL_TREE);
2354
2355 tree v2si_ftype_v2si_v8qi_v8qi
2356 = build_function_type_list (V2SI_type_node,
2357 V2SI_type_node, V8QI_type_node,
2358 V8QI_type_node, NULL_TREE);
2359
2360 tree di_ftype_di_v2si_v2si
2361 = build_function_type_list (long_long_unsigned_type_node,
2362 long_long_unsigned_type_node,
2363 V2SI_type_node, V2SI_type_node,
2364 NULL_TREE);
2365
2366 tree di_ftype_di_di_int
2367 = build_function_type_list (long_long_unsigned_type_node,
2368 long_long_unsigned_type_node,
2369 long_long_unsigned_type_node,
2370 integer_type_node, NULL_TREE);
2371
2372 tree void_ftype_int
2373 = build_function_type_list (void_type_node,
2374 integer_type_node, NULL_TREE);
2375
2376 tree v8qi_ftype_char
2377 = build_function_type_list (V8QI_type_node,
2378 signed_char_type_node, NULL_TREE);
2379
2380 tree v4hi_ftype_short
2381 = build_function_type_list (V4HI_type_node,
2382 short_integer_type_node, NULL_TREE);
2383
2384 tree v2si_ftype_int
2385 = build_function_type_list (V2SI_type_node,
2386 integer_type_node, NULL_TREE);
2387
2388 /* Normal vector binops. */
2389 tree v8qi_ftype_v8qi_v8qi
2390 = build_function_type_list (V8QI_type_node,
2391 V8QI_type_node, V8QI_type_node, NULL_TREE);
2392 tree v4hi_ftype_v4hi_v4hi
2393 = build_function_type_list (V4HI_type_node,
2394 V4HI_type_node,V4HI_type_node, NULL_TREE);
2395 tree v2si_ftype_v2si_v2si
2396 = build_function_type_list (V2SI_type_node,
2397 V2SI_type_node, V2SI_type_node, NULL_TREE);
2398 tree di_ftype_di_di
2399 = build_function_type_list (long_long_unsigned_type_node,
2400 long_long_unsigned_type_node,
2401 long_long_unsigned_type_node,
2402 NULL_TREE);
2403
2404 /* Add all builtins that are more or less simple operations on two
2405 operands. */
2406 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
2407 {
2408 /* Use one of the operands; the target can have a different mode for
2409 mask-generating compares. */
2410 machine_mode mode;
2411 tree type;
2412
2413 if (d->name == 0
2414 || !(d->feature == isa_bit_iwmmxt
2415 || d->feature == isa_bit_iwmmxt2))
2416 continue;
2417
2418 mode = insn_data[d->icode].operand[1].mode;
2419
2420 switch (mode)
2421 {
2422 case E_V8QImode:
2423 type = v8qi_ftype_v8qi_v8qi;
2424 break;
2425 case E_V4HImode:
2426 type = v4hi_ftype_v4hi_v4hi;
2427 break;
2428 case E_V2SImode:
2429 type = v2si_ftype_v2si_v2si;
2430 break;
2431 case E_DImode:
2432 type = di_ftype_di_di;
2433 break;
2434
2435 default:
2436 gcc_unreachable ();
2437 }
2438
2439 def_mbuiltin (d->feature, d->name, type, d->code);
2440 }
2441
2442 /* Add the remaining MMX insns with somewhat more complicated types. */
2443 #define iwmmx_mbuiltin(NAME, TYPE, CODE) \
2444 def_mbuiltin (isa_bit_iwmmxt, "__builtin_arm_" NAME, \
2445 (TYPE), ARM_BUILTIN_ ## CODE)
2446
2447 #define iwmmx2_mbuiltin(NAME, TYPE, CODE) \
2448 def_mbuiltin (isa_bit_iwmmxt2, "__builtin_arm_" NAME, \
2449 (TYPE), ARM_BUILTIN_ ## CODE)
2450
2451 iwmmx_mbuiltin ("wzero", di_ftype_void, WZERO);
2452 iwmmx_mbuiltin ("setwcgr0", void_ftype_int, SETWCGR0);
2453 iwmmx_mbuiltin ("setwcgr1", void_ftype_int, SETWCGR1);
2454 iwmmx_mbuiltin ("setwcgr2", void_ftype_int, SETWCGR2);
2455 iwmmx_mbuiltin ("setwcgr3", void_ftype_int, SETWCGR3);
2456 iwmmx_mbuiltin ("getwcgr0", int_ftype_void, GETWCGR0);
2457 iwmmx_mbuiltin ("getwcgr1", int_ftype_void, GETWCGR1);
2458 iwmmx_mbuiltin ("getwcgr2", int_ftype_void, GETWCGR2);
2459 iwmmx_mbuiltin ("getwcgr3", int_ftype_void, GETWCGR3);
2460
2461 iwmmx_mbuiltin ("wsllh", v4hi_ftype_v4hi_di, WSLLH);
2462 iwmmx_mbuiltin ("wsllw", v2si_ftype_v2si_di, WSLLW);
2463 iwmmx_mbuiltin ("wslld", di_ftype_di_di, WSLLD);
2464 iwmmx_mbuiltin ("wsllhi", v4hi_ftype_v4hi_int, WSLLHI);
2465 iwmmx_mbuiltin ("wsllwi", v2si_ftype_v2si_int, WSLLWI);
2466 iwmmx_mbuiltin ("wslldi", di_ftype_di_int, WSLLDI);
2467
2468 iwmmx_mbuiltin ("wsrlh", v4hi_ftype_v4hi_di, WSRLH);
2469 iwmmx_mbuiltin ("wsrlw", v2si_ftype_v2si_di, WSRLW);
2470 iwmmx_mbuiltin ("wsrld", di_ftype_di_di, WSRLD);
2471 iwmmx_mbuiltin ("wsrlhi", v4hi_ftype_v4hi_int, WSRLHI);
2472 iwmmx_mbuiltin ("wsrlwi", v2si_ftype_v2si_int, WSRLWI);
2473 iwmmx_mbuiltin ("wsrldi", di_ftype_di_int, WSRLDI);
2474
2475 iwmmx_mbuiltin ("wsrah", v4hi_ftype_v4hi_di, WSRAH);
2476 iwmmx_mbuiltin ("wsraw", v2si_ftype_v2si_di, WSRAW);
2477 iwmmx_mbuiltin ("wsrad", di_ftype_di_di, WSRAD);
2478 iwmmx_mbuiltin ("wsrahi", v4hi_ftype_v4hi_int, WSRAHI);
2479 iwmmx_mbuiltin ("wsrawi", v2si_ftype_v2si_int, WSRAWI);
2480 iwmmx_mbuiltin ("wsradi", di_ftype_di_int, WSRADI);
2481
2482 iwmmx_mbuiltin ("wrorh", v4hi_ftype_v4hi_di, WRORH);
2483 iwmmx_mbuiltin ("wrorw", v2si_ftype_v2si_di, WRORW);
2484 iwmmx_mbuiltin ("wrord", di_ftype_di_di, WRORD);
2485 iwmmx_mbuiltin ("wrorhi", v4hi_ftype_v4hi_int, WRORHI);
2486 iwmmx_mbuiltin ("wrorwi", v2si_ftype_v2si_int, WRORWI);
2487 iwmmx_mbuiltin ("wrordi", di_ftype_di_int, WRORDI);
2488
2489 iwmmx_mbuiltin ("wshufh", v4hi_ftype_v4hi_int, WSHUFH);
2490
2491 iwmmx_mbuiltin ("wsadb", v2si_ftype_v2si_v8qi_v8qi, WSADB);
2492 iwmmx_mbuiltin ("wsadh", v2si_ftype_v2si_v4hi_v4hi, WSADH);
2493 iwmmx_mbuiltin ("wmadds", v2si_ftype_v4hi_v4hi, WMADDS);
2494 iwmmx2_mbuiltin ("wmaddsx", v2si_ftype_v4hi_v4hi, WMADDSX);
2495 iwmmx2_mbuiltin ("wmaddsn", v2si_ftype_v4hi_v4hi, WMADDSN);
2496 iwmmx_mbuiltin ("wmaddu", v2si_ftype_v4hi_v4hi, WMADDU);
2497 iwmmx2_mbuiltin ("wmaddux", v2si_ftype_v4hi_v4hi, WMADDUX);
2498 iwmmx2_mbuiltin ("wmaddun", v2si_ftype_v4hi_v4hi, WMADDUN);
2499 iwmmx_mbuiltin ("wsadbz", v2si_ftype_v8qi_v8qi, WSADBZ);
2500 iwmmx_mbuiltin ("wsadhz", v2si_ftype_v4hi_v4hi, WSADHZ);
2501
2502 iwmmx_mbuiltin ("textrmsb", int_ftype_v8qi_int, TEXTRMSB);
2503 iwmmx_mbuiltin ("textrmsh", int_ftype_v4hi_int, TEXTRMSH);
2504 iwmmx_mbuiltin ("textrmsw", int_ftype_v2si_int, TEXTRMSW);
2505 iwmmx_mbuiltin ("textrmub", int_ftype_v8qi_int, TEXTRMUB);
2506 iwmmx_mbuiltin ("textrmuh", int_ftype_v4hi_int, TEXTRMUH);
2507 iwmmx_mbuiltin ("textrmuw", int_ftype_v2si_int, TEXTRMUW);
2508 iwmmx_mbuiltin ("tinsrb", v8qi_ftype_v8qi_int_int, TINSRB);
2509 iwmmx_mbuiltin ("tinsrh", v4hi_ftype_v4hi_int_int, TINSRH);
2510 iwmmx_mbuiltin ("tinsrw", v2si_ftype_v2si_int_int, TINSRW);
2511
2512 iwmmx_mbuiltin ("waccb", di_ftype_v8qi, WACCB);
2513 iwmmx_mbuiltin ("wacch", di_ftype_v4hi, WACCH);
2514 iwmmx_mbuiltin ("waccw", di_ftype_v2si, WACCW);
2515
2516 iwmmx_mbuiltin ("tmovmskb", int_ftype_v8qi, TMOVMSKB);
2517 iwmmx_mbuiltin ("tmovmskh", int_ftype_v4hi, TMOVMSKH);
2518 iwmmx_mbuiltin ("tmovmskw", int_ftype_v2si, TMOVMSKW);
2519
2520 iwmmx2_mbuiltin ("waddbhusm", v8qi_ftype_v4hi_v8qi, WADDBHUSM);
2521 iwmmx2_mbuiltin ("waddbhusl", v8qi_ftype_v4hi_v8qi, WADDBHUSL);
2522
2523 iwmmx_mbuiltin ("wpackhss", v8qi_ftype_v4hi_v4hi, WPACKHSS);
2524 iwmmx_mbuiltin ("wpackhus", v8qi_ftype_v4hi_v4hi, WPACKHUS);
2525 iwmmx_mbuiltin ("wpackwus", v4hi_ftype_v2si_v2si, WPACKWUS);
2526 iwmmx_mbuiltin ("wpackwss", v4hi_ftype_v2si_v2si, WPACKWSS);
2527 iwmmx_mbuiltin ("wpackdus", v2si_ftype_di_di, WPACKDUS);
2528 iwmmx_mbuiltin ("wpackdss", v2si_ftype_di_di, WPACKDSS);
2529
2530 iwmmx_mbuiltin ("wunpckehub", v4hi_ftype_v8qi, WUNPCKEHUB);
2531 iwmmx_mbuiltin ("wunpckehuh", v2si_ftype_v4hi, WUNPCKEHUH);
2532 iwmmx_mbuiltin ("wunpckehuw", di_ftype_v2si, WUNPCKEHUW);
2533 iwmmx_mbuiltin ("wunpckehsb", v4hi_ftype_v8qi, WUNPCKEHSB);
2534 iwmmx_mbuiltin ("wunpckehsh", v2si_ftype_v4hi, WUNPCKEHSH);
2535 iwmmx_mbuiltin ("wunpckehsw", di_ftype_v2si, WUNPCKEHSW);
2536 iwmmx_mbuiltin ("wunpckelub", v4hi_ftype_v8qi, WUNPCKELUB);
2537 iwmmx_mbuiltin ("wunpckeluh", v2si_ftype_v4hi, WUNPCKELUH);
2538 iwmmx_mbuiltin ("wunpckeluw", di_ftype_v2si, WUNPCKELUW);
2539 iwmmx_mbuiltin ("wunpckelsb", v4hi_ftype_v8qi, WUNPCKELSB);
2540 iwmmx_mbuiltin ("wunpckelsh", v2si_ftype_v4hi, WUNPCKELSH);
2541 iwmmx_mbuiltin ("wunpckelsw", di_ftype_v2si, WUNPCKELSW);
2542
2543 iwmmx_mbuiltin ("wmacs", di_ftype_di_v4hi_v4hi, WMACS);
2544 iwmmx_mbuiltin ("wmacsz", di_ftype_v4hi_v4hi, WMACSZ);
2545 iwmmx_mbuiltin ("wmacu", di_ftype_di_v4hi_v4hi, WMACU);
2546 iwmmx_mbuiltin ("wmacuz", di_ftype_v4hi_v4hi, WMACUZ);
2547
2548 iwmmx_mbuiltin ("walign", v8qi_ftype_v8qi_v8qi_int, WALIGNI);
2549 iwmmx_mbuiltin ("tmia", di_ftype_di_int_int, TMIA);
2550 iwmmx_mbuiltin ("tmiaph", di_ftype_di_int_int, TMIAPH);
2551 iwmmx_mbuiltin ("tmiabb", di_ftype_di_int_int, TMIABB);
2552 iwmmx_mbuiltin ("tmiabt", di_ftype_di_int_int, TMIABT);
2553 iwmmx_mbuiltin ("tmiatb", di_ftype_di_int_int, TMIATB);
2554 iwmmx_mbuiltin ("tmiatt", di_ftype_di_int_int, TMIATT);
2555
2556 iwmmx2_mbuiltin ("wabsb", v8qi_ftype_v8qi, WABSB);
2557 iwmmx2_mbuiltin ("wabsh", v4hi_ftype_v4hi, WABSH);
2558 iwmmx2_mbuiltin ("wabsw", v2si_ftype_v2si, WABSW);
2559
2560 iwmmx2_mbuiltin ("wqmiabb", v2si_ftype_v2si_v4hi_v4hi, WQMIABB);
2561 iwmmx2_mbuiltin ("wqmiabt", v2si_ftype_v2si_v4hi_v4hi, WQMIABT);
2562 iwmmx2_mbuiltin ("wqmiatb", v2si_ftype_v2si_v4hi_v4hi, WQMIATB);
2563 iwmmx2_mbuiltin ("wqmiatt", v2si_ftype_v2si_v4hi_v4hi, WQMIATT);
2564
2565 iwmmx2_mbuiltin ("wqmiabbn", v2si_ftype_v2si_v4hi_v4hi, WQMIABBN);
2566 iwmmx2_mbuiltin ("wqmiabtn", v2si_ftype_v2si_v4hi_v4hi, WQMIABTN);
2567 iwmmx2_mbuiltin ("wqmiatbn", v2si_ftype_v2si_v4hi_v4hi, WQMIATBN);
2568 iwmmx2_mbuiltin ("wqmiattn", v2si_ftype_v2si_v4hi_v4hi, WQMIATTN);
2569
2570 iwmmx2_mbuiltin ("wmiabb", di_ftype_di_v4hi_v4hi, WMIABB);
2571 iwmmx2_mbuiltin ("wmiabt", di_ftype_di_v4hi_v4hi, WMIABT);
2572 iwmmx2_mbuiltin ("wmiatb", di_ftype_di_v4hi_v4hi, WMIATB);
2573 iwmmx2_mbuiltin ("wmiatt", di_ftype_di_v4hi_v4hi, WMIATT);
2574
2575 iwmmx2_mbuiltin ("wmiabbn", di_ftype_di_v4hi_v4hi, WMIABBN);
2576 iwmmx2_mbuiltin ("wmiabtn", di_ftype_di_v4hi_v4hi, WMIABTN);
2577 iwmmx2_mbuiltin ("wmiatbn", di_ftype_di_v4hi_v4hi, WMIATBN);
2578 iwmmx2_mbuiltin ("wmiattn", di_ftype_di_v4hi_v4hi, WMIATTN);
2579
2580 iwmmx2_mbuiltin ("wmiawbb", di_ftype_di_v2si_v2si, WMIAWBB);
2581 iwmmx2_mbuiltin ("wmiawbt", di_ftype_di_v2si_v2si, WMIAWBT);
2582 iwmmx2_mbuiltin ("wmiawtb", di_ftype_di_v2si_v2si, WMIAWTB);
2583 iwmmx2_mbuiltin ("wmiawtt", di_ftype_di_v2si_v2si, WMIAWTT);
2584
2585 iwmmx2_mbuiltin ("wmiawbbn", di_ftype_di_v2si_v2si, WMIAWBBN);
2586 iwmmx2_mbuiltin ("wmiawbtn", di_ftype_di_v2si_v2si, WMIAWBTN);
2587 iwmmx2_mbuiltin ("wmiawtbn", di_ftype_di_v2si_v2si, WMIAWTBN);
2588 iwmmx2_mbuiltin ("wmiawttn", di_ftype_di_v2si_v2si, WMIAWTTN);
2589
2590 iwmmx2_mbuiltin ("wmerge", di_ftype_di_di_int, WMERGE);
2591
2592 iwmmx_mbuiltin ("tbcstb", v8qi_ftype_char, TBCSTB);
2593 iwmmx_mbuiltin ("tbcsth", v4hi_ftype_short, TBCSTH);
2594 iwmmx_mbuiltin ("tbcstw", v2si_ftype_int, TBCSTW);
2595
2596 #undef iwmmx_mbuiltin
2597 #undef iwmmx2_mbuiltin
2598 }
2599
2600 static void
arm_init_fp16_builtins(void)2601 arm_init_fp16_builtins (void)
2602 {
2603 arm_fp16_type_node = make_node (REAL_TYPE);
2604 TYPE_PRECISION (arm_fp16_type_node) = GET_MODE_PRECISION (HFmode);
2605 layout_type (arm_fp16_type_node);
2606 if (arm_fp16_format)
2607 (*lang_hooks.types.register_builtin_type) (arm_fp16_type_node,
2608 "__fp16");
2609 }
2610
2611 void
arm_init_builtins(void)2612 arm_init_builtins (void)
2613 {
2614 if (TARGET_REALLY_IWMMXT)
2615 arm_init_iwmmxt_builtins ();
2616
2617 /* This creates the arm_simd_floatHF_type_node so must come before
2618 arm_init_neon_builtins which uses it. */
2619 arm_init_fp16_builtins ();
2620
2621 arm_init_bf16_types ();
2622
2623 if (TARGET_MAYBE_HARD_FLOAT)
2624 {
2625 tree lane_check_fpr = build_function_type_list (void_type_node,
2626 intSI_type_node,
2627 intSI_type_node,
2628 NULL);
2629 arm_builtin_decls[ARM_BUILTIN_SIMD_LANE_CHECK]
2630 = add_builtin_function ("__builtin_arm_lane_check", lane_check_fpr,
2631 ARM_BUILTIN_SIMD_LANE_CHECK, BUILT_IN_MD,
2632 NULL, NULL_TREE);
2633 if (TARGET_HAVE_MVE)
2634 arm_init_mve_builtins ();
2635 else
2636 arm_init_neon_builtins ();
2637 arm_init_vfp_builtins ();
2638 arm_init_crypto_builtins ();
2639 }
2640
2641 if (TARGET_CDE)
2642 arm_init_cde_builtins ();
2643
2644 arm_init_acle_builtins ();
2645
2646 if (TARGET_MAYBE_HARD_FLOAT)
2647 {
2648 tree ftype_set_fpscr
2649 = build_function_type_list (void_type_node, unsigned_type_node, NULL);
2650 tree ftype_get_fpscr
2651 = build_function_type_list (unsigned_type_node, NULL);
2652
2653 arm_builtin_decls[ARM_BUILTIN_GET_FPSCR]
2654 = add_builtin_function ("__builtin_arm_get_fpscr", ftype_get_fpscr,
2655 ARM_BUILTIN_GET_FPSCR, BUILT_IN_MD, NULL, NULL_TREE);
2656 arm_builtin_decls[ARM_BUILTIN_SET_FPSCR]
2657 = add_builtin_function ("__builtin_arm_set_fpscr", ftype_set_fpscr,
2658 ARM_BUILTIN_SET_FPSCR, BUILT_IN_MD, NULL, NULL_TREE);
2659 }
2660
2661 if (use_cmse)
2662 {
2663 tree ftype_cmse_nonsecure_caller
2664 = build_function_type_list (unsigned_type_node, NULL);
2665 arm_builtin_decls[ARM_BUILTIN_CMSE_NONSECURE_CALLER]
2666 = add_builtin_function ("__builtin_arm_cmse_nonsecure_caller",
2667 ftype_cmse_nonsecure_caller,
2668 ARM_BUILTIN_CMSE_NONSECURE_CALLER, BUILT_IN_MD,
2669 NULL, NULL_TREE);
2670 }
2671 }
2672
2673 /* Return the ARM builtin for CODE. */
2674
2675 tree
arm_builtin_decl(unsigned code,bool initialize_p ATTRIBUTE_UNUSED)2676 arm_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2677 {
2678 if (code >= ARM_BUILTIN_MAX)
2679 return error_mark_node;
2680
2681 return arm_builtin_decls[code];
2682 }
2683
2684 /* Errors in the source file can cause expand_expr to return const0_rtx
2685 where we expect a vector. To avoid crashing, use one of the vector
2686 clear instructions. */
2687
2688 static rtx
safe_vector_operand(rtx x,machine_mode mode)2689 safe_vector_operand (rtx x, machine_mode mode)
2690 {
2691 if (x != const0_rtx)
2692 return x;
2693 x = gen_reg_rtx (mode);
2694
2695 emit_insn (gen_iwmmxt_clrdi (mode == DImode ? x
2696 : gen_rtx_SUBREG (DImode, x, 0)));
2697 return x;
2698 }
2699
2700 /* Function to expand ternary builtins. */
2701 static rtx
arm_expand_ternop_builtin(enum insn_code icode,tree exp,rtx target)2702 arm_expand_ternop_builtin (enum insn_code icode,
2703 tree exp, rtx target)
2704 {
2705 rtx pat;
2706 tree arg0 = CALL_EXPR_ARG (exp, 0);
2707 tree arg1 = CALL_EXPR_ARG (exp, 1);
2708 tree arg2 = CALL_EXPR_ARG (exp, 2);
2709
2710 rtx op0 = expand_normal (arg0);
2711 rtx op1 = expand_normal (arg1);
2712 rtx op2 = expand_normal (arg2);
2713
2714 machine_mode tmode = insn_data[icode].operand[0].mode;
2715 machine_mode mode0 = insn_data[icode].operand[1].mode;
2716 machine_mode mode1 = insn_data[icode].operand[2].mode;
2717 machine_mode mode2 = insn_data[icode].operand[3].mode;
2718
2719 if (VECTOR_MODE_P (mode0))
2720 op0 = safe_vector_operand (op0, mode0);
2721 if (VECTOR_MODE_P (mode1))
2722 op1 = safe_vector_operand (op1, mode1);
2723 if (VECTOR_MODE_P (mode2))
2724 op2 = safe_vector_operand (op2, mode2);
2725
2726 if (! target
2727 || GET_MODE (target) != tmode
2728 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2729 target = gen_reg_rtx (tmode);
2730
2731 gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
2732 && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode)
2733 && (GET_MODE (op2) == mode2 || GET_MODE (op2) == VOIDmode));
2734
2735 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2736 op0 = copy_to_mode_reg (mode0, op0);
2737 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
2738 op1 = copy_to_mode_reg (mode1, op1);
2739 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
2740 op2 = copy_to_mode_reg (mode2, op2);
2741
2742 pat = GEN_FCN (icode) (target, op0, op1, op2);
2743 if (! pat)
2744 return 0;
2745 emit_insn (pat);
2746 return target;
2747 }
2748
2749 /* Subroutine of arm_expand_builtin to take care of binop insns. */
2750
2751 static rtx
arm_expand_binop_builtin(enum insn_code icode,tree exp,rtx target)2752 arm_expand_binop_builtin (enum insn_code icode,
2753 tree exp, rtx target)
2754 {
2755 rtx pat;
2756 tree arg0 = CALL_EXPR_ARG (exp, 0);
2757 tree arg1 = CALL_EXPR_ARG (exp, 1);
2758 rtx op0 = expand_normal (arg0);
2759 rtx op1 = expand_normal (arg1);
2760 machine_mode tmode = insn_data[icode].operand[0].mode;
2761 machine_mode mode0 = insn_data[icode].operand[1].mode;
2762 machine_mode mode1 = insn_data[icode].operand[2].mode;
2763
2764 if (VECTOR_MODE_P (mode0))
2765 op0 = safe_vector_operand (op0, mode0);
2766 if (VECTOR_MODE_P (mode1))
2767 op1 = safe_vector_operand (op1, mode1);
2768
2769 if (! target
2770 || GET_MODE (target) != tmode
2771 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2772 target = gen_reg_rtx (tmode);
2773
2774 gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
2775 && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));
2776
2777 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2778 op0 = copy_to_mode_reg (mode0, op0);
2779 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
2780 op1 = copy_to_mode_reg (mode1, op1);
2781
2782 pat = GEN_FCN (icode) (target, op0, op1);
2783 if (! pat)
2784 return 0;
2785 emit_insn (pat);
2786 return target;
2787 }
2788
2789 /* Subroutine of arm_expand_builtin to take care of unop insns. */
2790
2791 static rtx
arm_expand_unop_builtin(enum insn_code icode,tree exp,rtx target,int do_load)2792 arm_expand_unop_builtin (enum insn_code icode,
2793 tree exp, rtx target, int do_load)
2794 {
2795 rtx pat;
2796 tree arg0 = CALL_EXPR_ARG (exp, 0);
2797 rtx op0 = expand_normal (arg0);
2798 machine_mode tmode = insn_data[icode].operand[0].mode;
2799 machine_mode mode0 = insn_data[icode].operand[1].mode;
2800
2801 if (! target
2802 || GET_MODE (target) != tmode
2803 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2804 target = gen_reg_rtx (tmode);
2805 if (do_load)
2806 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
2807 else
2808 {
2809 if (VECTOR_MODE_P (mode0))
2810 op0 = safe_vector_operand (op0, mode0);
2811
2812 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2813 op0 = copy_to_mode_reg (mode0, op0);
2814 }
2815
2816 pat = GEN_FCN (icode) (target, op0);
2817
2818 if (! pat)
2819 return 0;
2820 emit_insn (pat);
2821 return target;
2822 }
2823
2824 typedef enum {
2825 ARG_BUILTIN_COPY_TO_REG,
2826 ARG_BUILTIN_CONSTANT,
2827 ARG_BUILTIN_LANE_INDEX,
2828 ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX,
2829 ARG_BUILTIN_LANE_PAIR_INDEX,
2830 ARG_BUILTIN_LANE_QUADTUP_INDEX,
2831 ARG_BUILTIN_NEON_MEMORY,
2832 ARG_BUILTIN_MEMORY,
2833 ARG_BUILTIN_STOP
2834 } builtin_arg;
2835
2836
2837 /* EXP is a pointer argument to a Neon load or store intrinsic. Derive
2838 and return an expression for the accessed memory.
2839
2840 The intrinsic function operates on a block of registers that has
2841 mode REG_MODE. This block contains vectors of type TYPE_MODE. The
2842 function references the memory at EXP of type TYPE and in mode
2843 MEM_MODE; this mode may be BLKmode if no more suitable mode is
2844 available. */
2845
2846 static tree
neon_dereference_pointer(tree exp,tree type,machine_mode mem_mode,machine_mode reg_mode,machine_mode vector_mode)2847 neon_dereference_pointer (tree exp, tree type, machine_mode mem_mode,
2848 machine_mode reg_mode,
2849 machine_mode vector_mode)
2850 {
2851 HOST_WIDE_INT reg_size, vector_size, nvectors, nelems;
2852 tree elem_type, upper_bound, array_type;
2853
2854 /* Work out the size of the register block in bytes. */
2855 reg_size = GET_MODE_SIZE (reg_mode);
2856
2857 /* Work out the size of each vector in bytes. */
2858 vector_size = GET_MODE_SIZE (vector_mode);
2859
2860 /* Work out how many vectors there are. */
2861 gcc_assert (reg_size % vector_size == 0);
2862 nvectors = reg_size / vector_size;
2863
2864 /* Work out the type of each element. */
2865 gcc_assert (POINTER_TYPE_P (type));
2866 elem_type = TREE_TYPE (type);
2867
2868 /* Work out how many elements are being loaded or stored.
2869 MEM_MODE == REG_MODE implies a one-to-one mapping between register
2870 and memory elements; anything else implies a lane load or store. */
2871 if (mem_mode == reg_mode)
2872 nelems = vector_size * nvectors / int_size_in_bytes (elem_type);
2873 else
2874 nelems = nvectors;
2875
2876 /* Create a type that describes the full access. */
2877 upper_bound = build_int_cst (size_type_node, nelems - 1);
2878 array_type = build_array_type (elem_type, build_index_type (upper_bound));
2879
2880 /* Dereference EXP using that type. */
2881 return fold_build2 (MEM_REF, array_type, exp,
2882 build_int_cst (build_pointer_type (array_type), 0));
2883 }
2884
2885 /* EXP is a pointer argument to a vector scatter store intrinsics.
2886
2887 Consider the following example:
2888 VSTRW<v>.<dt> Qd, [Qm{, #+/-<imm>}]!
2889 When <Qm> used as the base register for the target address,
2890 this function is used to derive and return an expression for the
2891 accessed memory.
2892
2893 The intrinsic function operates on a block of registers that has mode
2894 REG_MODE. This block contains vectors of type TYPE_MODE. The function
2895 references the memory at EXP of type TYPE and in mode MEM_MODE. This
2896 mode may be BLKmode if no more suitable mode is available. */
2897
2898 static tree
mve_dereference_pointer(tree exp,tree type,machine_mode reg_mode,machine_mode vector_mode)2899 mve_dereference_pointer (tree exp, tree type, machine_mode reg_mode,
2900 machine_mode vector_mode)
2901 {
2902 HOST_WIDE_INT reg_size, vector_size, nelems;
2903 tree elem_type, upper_bound, array_type;
2904
2905 /* Work out the size of each vector in bytes. */
2906 vector_size = GET_MODE_SIZE (vector_mode);
2907
2908 /* Work out the size of the register block in bytes. */
2909 reg_size = GET_MODE_SIZE (reg_mode);
2910
2911 /* Work out the type of each element. */
2912 gcc_assert (POINTER_TYPE_P (type));
2913 elem_type = TREE_TYPE (type);
2914
2915 nelems = reg_size / vector_size;
2916
2917 /* Create a type that describes the full access. */
2918 upper_bound = build_int_cst (size_type_node, nelems - 1);
2919 array_type = build_array_type (elem_type, build_index_type (upper_bound));
2920
2921 /* Dereference EXP using that type. */
2922 return fold_build2 (MEM_REF, array_type, exp,
2923 build_int_cst (build_pointer_type (array_type), 0));
2924 }
2925
2926 /* Expand a builtin. */
2927 static rtx
arm_expand_builtin_args(rtx target,machine_mode map_mode,int fcode,int icode,int have_retval,tree exp,builtin_arg * args)2928 arm_expand_builtin_args (rtx target, machine_mode map_mode, int fcode,
2929 int icode, int have_retval, tree exp,
2930 builtin_arg *args)
2931 {
2932 rtx pat;
2933 tree arg[SIMD_MAX_BUILTIN_ARGS];
2934 rtx op[SIMD_MAX_BUILTIN_ARGS];
2935 machine_mode tmode = insn_data[icode].operand[0].mode;
2936 machine_mode mode[SIMD_MAX_BUILTIN_ARGS];
2937 tree formals;
2938 int argc = 0;
2939 rtx_insn * insn;
2940
2941 if (have_retval
2942 && (!target
2943 || GET_MODE (target) != tmode
2944 || !(*insn_data[icode].operand[0].predicate) (target, tmode)))
2945 target = gen_reg_rtx (tmode);
2946
2947 formals = TYPE_ARG_TYPES (TREE_TYPE (arm_builtin_decls[fcode]));
2948
2949 for (;;)
2950 {
2951 builtin_arg thisarg = args[argc];
2952
2953 if (thisarg == ARG_BUILTIN_STOP)
2954 break;
2955 else
2956 {
2957 int opno = argc + have_retval;
2958 arg[argc] = CALL_EXPR_ARG (exp, argc);
2959 mode[argc] = insn_data[icode].operand[opno].mode;
2960 if (thisarg == ARG_BUILTIN_NEON_MEMORY)
2961 {
2962 machine_mode other_mode
2963 = insn_data[icode].operand[1 - opno].mode;
2964 if (TARGET_HAVE_MVE && mode[argc] != other_mode)
2965 {
2966 arg[argc] = mve_dereference_pointer (arg[argc],
2967 TREE_VALUE (formals),
2968 other_mode, map_mode);
2969 }
2970 else
2971 arg[argc] = neon_dereference_pointer (arg[argc],
2972 TREE_VALUE (formals),
2973 mode[argc], other_mode,
2974 map_mode);
2975 }
2976
2977 /* Use EXPAND_MEMORY for ARG_BUILTIN_MEMORY and
2978 ARG_BUILTIN_NEON_MEMORY to ensure a MEM_P be returned. */
2979 op[argc] = expand_expr (arg[argc], NULL_RTX, VOIDmode,
2980 ((thisarg == ARG_BUILTIN_MEMORY
2981 || thisarg == ARG_BUILTIN_NEON_MEMORY)
2982 ? EXPAND_MEMORY : EXPAND_NORMAL));
2983
2984 switch (thisarg)
2985 {
2986 case ARG_BUILTIN_MEMORY:
2987 case ARG_BUILTIN_COPY_TO_REG:
2988 if (POINTER_TYPE_P (TREE_TYPE (arg[argc])))
2989 op[argc] = convert_memory_address (Pmode, op[argc]);
2990
2991 /* MVE uses mve_pred16_t (aka HImode) for vectors of
2992 predicates. */
2993 if (GET_MODE_CLASS (mode[argc]) == MODE_VECTOR_BOOL)
2994 op[argc] = gen_lowpart (mode[argc], op[argc]);
2995
2996 /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
2997 if (!(*insn_data[icode].operand[opno].predicate)
2998 (op[argc], mode[argc]))
2999 op[argc] = copy_to_mode_reg (mode[argc], op[argc]);
3000 break;
3001
3002 case ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX:
3003 gcc_assert (argc > 1);
3004 if (CONST_INT_P (op[argc]))
3005 {
3006 neon_lane_bounds (op[argc], 0,
3007 GET_MODE_NUNITS (map_mode), exp);
3008 /* Keep to GCC-vector-extension lane indices in the RTL. */
3009 op[argc] =
3010 GEN_INT (NEON_ENDIAN_LANE_N (map_mode, INTVAL (op[argc])));
3011 }
3012 goto constant_arg;
3013
3014 case ARG_BUILTIN_LANE_INDEX:
3015 /* Previous argument must be a vector, which this indexes. */
3016 gcc_assert (argc > 0);
3017 if (CONST_INT_P (op[argc]))
3018 {
3019 machine_mode vmode = mode[argc - 1];
3020 neon_lane_bounds (op[argc], 0, GET_MODE_NUNITS (vmode), exp);
3021 }
3022 /* If the lane index isn't a constant then error out. */
3023 goto constant_arg;
3024
3025 case ARG_BUILTIN_LANE_PAIR_INDEX:
3026 /* Previous argument must be a vector, which this indexes. The
3027 indexing will always select i and i+1 out of the vector, which
3028 puts a limit on i. */
3029 gcc_assert (argc > 0);
3030 if (CONST_INT_P (op[argc]))
3031 {
3032 machine_mode vmode = mode[argc - 1];
3033 neon_lane_bounds (op[argc], 0,
3034 GET_MODE_NUNITS (vmode) / 2, exp);
3035 }
3036 /* If the lane index isn't a constant then error out. */
3037 goto constant_arg;
3038
3039 case ARG_BUILTIN_LANE_QUADTUP_INDEX:
3040 /* Previous argument must be a vector, which this indexes. */
3041 gcc_assert (argc > 0);
3042 if (CONST_INT_P (op[argc]))
3043 {
3044 machine_mode vmode = mode[argc - 1];
3045 neon_lane_bounds (op[argc], 0,
3046 GET_MODE_NUNITS (vmode) / 4, exp);
3047 }
3048 /* If the lane index isn't a constant then error out. */
3049 goto constant_arg;
3050
3051 case ARG_BUILTIN_CONSTANT:
3052 constant_arg:
3053 if (!(*insn_data[icode].operand[opno].predicate)
3054 (op[argc], mode[argc]))
3055 {
3056 if (IN_RANGE (fcode, ARM_BUILTIN_CDE_PATTERN_START,
3057 ARM_BUILTIN_CDE_PATTERN_END))
3058 {
3059 if (argc == 0)
3060 {
3061 unsigned int cp_bit = (CONST_INT_P (op[argc])
3062 ? UINTVAL (op[argc]) : -1);
3063 if (IN_RANGE (cp_bit, 0, ARM_CDE_CONST_COPROC))
3064 error_at (EXPR_LOCATION (exp),
3065 "coprocessor %d is not enabled "
3066 "with +cdecp%d", cp_bit, cp_bit);
3067 else
3068 error_at (EXPR_LOCATION (exp),
3069 "coproc must be a constant immediate in "
3070 "range [0-%d] enabled with %<+cdecp<N>%>",
3071 ARM_CDE_CONST_COPROC);
3072 }
3073 else
3074 /* Here we mention the builtin name to follow the same
3075 format that the C/C++ frontends use for referencing
3076 a given argument index. */
3077 error_at (EXPR_LOCATION (exp),
3078 "argument %d to %qE must be a constant "
3079 "immediate in range [0-%d]", argc + 1,
3080 arm_builtin_decls[fcode],
3081 cde_builtin_data[fcode -
3082 ARM_BUILTIN_CDE_PATTERN_START].imm_max);
3083 }
3084 else
3085 error_at (EXPR_LOCATION (exp),
3086 "argument %d must be a constant immediate",
3087 argc + 1);
3088 /* We have failed to expand the pattern, and are safely
3089 in to invalid code. But the mid-end will still try to
3090 build an assignment for this node while it expands,
3091 before stopping for the error, just pass it back
3092 TARGET to ensure a valid assignment. */
3093 return target;
3094 }
3095 break;
3096
3097 case ARG_BUILTIN_NEON_MEMORY:
3098 /* Check if expand failed. */
3099 if (op[argc] == const0_rtx)
3100 return 0;
3101 gcc_assert (MEM_P (op[argc]));
3102 PUT_MODE (op[argc], mode[argc]);
3103 /* ??? arm_neon.h uses the same built-in functions for signed
3104 and unsigned accesses, casting where necessary. This isn't
3105 alias safe. */
3106 set_mem_alias_set (op[argc], 0);
3107 if (!(*insn_data[icode].operand[opno].predicate)
3108 (op[argc], mode[argc]))
3109 op[argc] = (replace_equiv_address
3110 (op[argc],
3111 copy_to_mode_reg (Pmode, XEXP (op[argc], 0))));
3112 break;
3113
3114 case ARG_BUILTIN_STOP:
3115 gcc_unreachable ();
3116 }
3117
3118 argc++;
3119 }
3120 }
3121
3122 if (have_retval)
3123 switch (argc)
3124 {
3125 case 0:
3126 pat = GEN_FCN (icode) (target);
3127 break;
3128 case 1:
3129 pat = GEN_FCN (icode) (target, op[0]);
3130 break;
3131
3132 case 2:
3133 pat = GEN_FCN (icode) (target, op[0], op[1]);
3134 break;
3135
3136 case 3:
3137 pat = GEN_FCN (icode) (target, op[0], op[1], op[2]);
3138 break;
3139
3140 case 4:
3141 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3]);
3142 break;
3143
3144 case 5:
3145 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4]);
3146 break;
3147
3148 case 6:
3149 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4], op[5]);
3150 break;
3151
3152 default:
3153 gcc_unreachable ();
3154 }
3155 else
3156 switch (argc)
3157 {
3158 case 1:
3159 pat = GEN_FCN (icode) (op[0]);
3160 break;
3161
3162 case 2:
3163 pat = GEN_FCN (icode) (op[0], op[1]);
3164 break;
3165
3166 case 3:
3167 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
3168 break;
3169
3170 case 4:
3171 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
3172 break;
3173
3174 case 5:
3175 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
3176 break;
3177
3178 case 6:
3179 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
3180 break;
3181
3182 default:
3183 gcc_unreachable ();
3184 }
3185
3186 if (!pat)
3187 return 0;
3188
3189 /* Check whether our current target implements the pattern chosen for this
3190 builtin and error out if not. */
3191 start_sequence ();
3192 emit_insn (pat);
3193 insn = get_insns ();
3194 end_sequence ();
3195
3196 if (recog_memoized (insn) < 0)
3197 error ("this builtin is not supported for this target");
3198 else
3199 emit_insn (insn);
3200
3201 if (GET_MODE_CLASS (tmode) == MODE_VECTOR_BOOL)
3202 {
3203 rtx HItarget = gen_reg_rtx (HImode);
3204 emit_move_insn (HItarget, gen_lowpart (HImode, target));
3205 return HItarget;
3206 }
3207
3208 return target;
3209 }
3210
3211 /* Expand a builtin. These builtins are "special" because they don't have
3212 symbolic constants defined per-instruction or per instruction-variant.
3213 Instead, the required info is looked up in the ARM_BUILTIN_DATA record that
3214 is passed into the function. */
3215
3216 static rtx
arm_expand_builtin_1(int fcode,tree exp,rtx target,arm_builtin_datum * d)3217 arm_expand_builtin_1 (int fcode, tree exp, rtx target,
3218 arm_builtin_datum *d)
3219 {
3220 enum insn_code icode = d->code;
3221 builtin_arg args[SIMD_MAX_BUILTIN_ARGS + 1];
3222 int num_args = insn_data[d->code].n_operands;
3223 int is_void = 0;
3224 int k;
3225 bool neon = false;
3226 bool mve = false;
3227
3228 if (IN_RANGE (fcode, ARM_BUILTIN_VFP_BASE, ARM_BUILTIN_ACLE_BASE - 1))
3229 neon = true;
3230
3231 if (IN_RANGE (fcode, ARM_BUILTIN_MVE_BASE, ARM_BUILTIN_MAX - 1))
3232 mve = true;
3233
3234 is_void = !!(d->qualifiers[0] & qualifier_void);
3235
3236 num_args += is_void;
3237
3238 for (k = 1; k < num_args; k++)
3239 {
3240 /* We have four arrays of data, each indexed in a different fashion.
3241 qualifiers - element 0 always describes the function return type.
3242 operands - element 0 is either the operand for return value (if
3243 the function has a non-void return type) or the operand for the
3244 first argument.
3245 expr_args - element 0 always holds the first argument.
3246 args - element 0 is always used for the return type. */
3247 int qualifiers_k = k;
3248 int operands_k = k - is_void;
3249 int expr_args_k = k - 1;
3250
3251 if (d->qualifiers[qualifiers_k] & qualifier_lane_index)
3252 args[k] = ARG_BUILTIN_LANE_INDEX;
3253 else if (d->qualifiers[qualifiers_k] & qualifier_lane_pair_index)
3254 args[k] = ARG_BUILTIN_LANE_PAIR_INDEX;
3255 else if (d->qualifiers[qualifiers_k] & qualifier_lane_quadtup_index)
3256 args[k] = ARG_BUILTIN_LANE_QUADTUP_INDEX;
3257 else if (d->qualifiers[qualifiers_k] & qualifier_struct_load_store_lane_index)
3258 args[k] = ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX;
3259 else if (d->qualifiers[qualifiers_k] & qualifier_immediate)
3260 args[k] = ARG_BUILTIN_CONSTANT;
3261 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
3262 {
3263 rtx arg
3264 = expand_normal (CALL_EXPR_ARG (exp,
3265 (expr_args_k)));
3266 /* Handle constants only if the predicate allows it. */
3267 bool op_const_int_p =
3268 (CONST_INT_P (arg)
3269 && (*insn_data[icode].operand[operands_k].predicate)
3270 (arg, insn_data[icode].operand[operands_k].mode));
3271 args[k] = op_const_int_p ? ARG_BUILTIN_CONSTANT : ARG_BUILTIN_COPY_TO_REG;
3272 }
3273 else if (d->qualifiers[qualifiers_k] & qualifier_pointer)
3274 {
3275 if (neon || mve)
3276 args[k] = ARG_BUILTIN_NEON_MEMORY;
3277 else
3278 args[k] = ARG_BUILTIN_MEMORY;
3279 }
3280 else
3281 args[k] = ARG_BUILTIN_COPY_TO_REG;
3282 }
3283 args[k] = ARG_BUILTIN_STOP;
3284
3285 /* The interface to arm_expand_builtin_args expects a 0 if
3286 the function is void, and a 1 if it is not. */
3287 return arm_expand_builtin_args
3288 (target, d->mode, fcode, icode, !is_void, exp,
3289 &args[1]);
3290 }
3291
3292 /* Expand an ACLE builtin, i.e. those registered only if their respective
3293 target constraints are met. This check happens within
3294 arm_expand_builtin_args. */
3295
3296 static rtx
arm_expand_acle_builtin(int fcode,tree exp,rtx target)3297 arm_expand_acle_builtin (int fcode, tree exp, rtx target)
3298 {
3299 if (fcode == ARM_BUILTIN_SAT_IMM_CHECK)
3300 {
3301 /* Check the saturation immediate bounds. */
3302
3303 rtx min_sat = expand_normal (CALL_EXPR_ARG (exp, 1));
3304 rtx max_sat = expand_normal (CALL_EXPR_ARG (exp, 2));
3305 gcc_assert (CONST_INT_P (min_sat));
3306 gcc_assert (CONST_INT_P (max_sat));
3307 rtx sat_imm = expand_normal (CALL_EXPR_ARG (exp, 0));
3308 if (CONST_INT_P (sat_imm))
3309 {
3310 if (!IN_RANGE (sat_imm, min_sat, max_sat))
3311 error_at (EXPR_LOCATION (exp),
3312 "saturation bit range must be in the range [%wd, %wd]",
3313 UINTVAL (min_sat), UINTVAL (max_sat));
3314 }
3315 else
3316 error_at (EXPR_LOCATION (exp),
3317 "saturation bit range must be a constant immediate");
3318 /* Don't generate any RTL. */
3319 return const0_rtx;
3320 }
3321
3322 gcc_assert (fcode != ARM_BUILTIN_CDE_BASE);
3323 arm_builtin_datum *d
3324 = (fcode < ARM_BUILTIN_CDE_BASE)
3325 ? &acle_builtin_data[fcode - ARM_BUILTIN_ACLE_PATTERN_START]
3326 : &cde_builtin_data[fcode - ARM_BUILTIN_CDE_PATTERN_START].base;
3327
3328 return arm_expand_builtin_1 (fcode, exp, target, d);
3329 }
3330
3331 /* Expand an MVE builtin, i.e. those registered only if their respective target
3332 constraints are met. This check happens within arm_expand_builtin. */
3333
3334 static rtx
arm_expand_mve_builtin(int fcode,tree exp,rtx target)3335 arm_expand_mve_builtin (int fcode, tree exp, rtx target)
3336 {
3337 if (fcode >= ARM_BUILTIN_MVE_BASE && !TARGET_HAVE_MVE)
3338 {
3339 fatal_error (input_location,
3340 "You must enable MVE instructions"
3341 " to use these intrinsics");
3342 return const0_rtx;
3343 }
3344
3345 arm_builtin_datum *d
3346 = &mve_builtin_data[fcode - ARM_BUILTIN_MVE_PATTERN_START];
3347
3348 return arm_expand_builtin_1 (fcode, exp, target, d);
3349 }
3350
3351 /* Expand a Neon builtin, i.e. those registered only if TARGET_NEON holds.
3352 Most of these are "special" because they don't have symbolic
3353 constants defined per-instruction or per instruction-variant. Instead, the
3354 required info is looked up in the table neon_builtin_data. */
3355
3356 static rtx
arm_expand_neon_builtin(int fcode,tree exp,rtx target)3357 arm_expand_neon_builtin (int fcode, tree exp, rtx target)
3358 {
3359 if (fcode >= ARM_BUILTIN_NEON_BASE && ! TARGET_NEON)
3360 {
3361 fatal_error (input_location,
3362 "You must enable NEON instructions"
3363 " (e.g. %<-mfloat-abi=softfp%> %<-mfpu=neon%>)"
3364 " to use these intrinsics.");
3365 return const0_rtx;
3366 }
3367
3368 arm_builtin_datum *d
3369 = &neon_builtin_data[fcode - ARM_BUILTIN_NEON_PATTERN_START];
3370
3371 return arm_expand_builtin_1 (fcode, exp, target, d);
3372 }
3373
3374 /* Expand a VFP builtin. These builtins are treated like
3375 neon builtins except that the data is looked up in table
3376 VFP_BUILTIN_DATA. */
3377
3378 static rtx
arm_expand_vfp_builtin(int fcode,tree exp,rtx target)3379 arm_expand_vfp_builtin (int fcode, tree exp, rtx target)
3380 {
3381 if (fcode >= ARM_BUILTIN_VFP_BASE && ! TARGET_HARD_FLOAT)
3382 {
3383 fatal_error (input_location,
3384 "You must enable VFP instructions"
3385 " to use these intrinsics.");
3386 return const0_rtx;
3387 }
3388
3389 arm_builtin_datum *d
3390 = &vfp_builtin_data[fcode - ARM_BUILTIN_VFP_PATTERN_START];
3391
3392 return arm_expand_builtin_1 (fcode, exp, target, d);
3393 }
3394
3395 /* Expand an expression EXP that calls a built-in function,
3396 with result going to TARGET if that's convenient
3397 (and in mode MODE if that's convenient).
3398 SUBTARGET may be used as the target for computing one of EXP's operands.
3399 IGNORE is nonzero if the value is to be ignored. */
3400
3401 rtx
arm_expand_builtin(tree exp,rtx target,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)3402 arm_expand_builtin (tree exp,
3403 rtx target,
3404 rtx subtarget ATTRIBUTE_UNUSED,
3405 machine_mode mode ATTRIBUTE_UNUSED,
3406 int ignore ATTRIBUTE_UNUSED)
3407 {
3408 const struct builtin_description * d;
3409 enum insn_code icode;
3410 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3411 tree arg0;
3412 tree arg1;
3413 tree arg2;
3414 rtx op0;
3415 rtx op1;
3416 rtx op2;
3417 rtx pat;
3418 unsigned int fcode = DECL_MD_FUNCTION_CODE (fndecl);
3419 size_t i;
3420 machine_mode tmode;
3421 machine_mode mode0;
3422 machine_mode mode1;
3423 machine_mode mode2;
3424 int opint;
3425 int selector;
3426 int mask;
3427 int imm;
3428
3429 if (fcode == ARM_BUILTIN_SIMD_LANE_CHECK)
3430 {
3431 /* Builtin is only to check bounds of the lane passed to some intrinsics
3432 that are implemented with gcc vector extensions in arm_neon.h. */
3433
3434 tree nlanes = CALL_EXPR_ARG (exp, 0);
3435 gcc_assert (TREE_CODE (nlanes) == INTEGER_CST);
3436 rtx lane_idx = expand_normal (CALL_EXPR_ARG (exp, 1));
3437 if (CONST_INT_P (lane_idx))
3438 neon_lane_bounds (lane_idx, 0, TREE_INT_CST_LOW (nlanes), exp);
3439 else
3440 error_at (EXPR_LOCATION (exp),
3441 "lane index must be a constant immediate");
3442 /* Don't generate any RTL. */
3443 return const0_rtx;
3444 }
3445 if (fcode >= ARM_BUILTIN_MVE_BASE)
3446 return arm_expand_mve_builtin (fcode, exp, target);
3447
3448 if (fcode >= ARM_BUILTIN_ACLE_BASE)
3449 return arm_expand_acle_builtin (fcode, exp, target);
3450
3451 if (fcode >= ARM_BUILTIN_NEON_BASE)
3452 return arm_expand_neon_builtin (fcode, exp, target);
3453
3454 if (fcode >= ARM_BUILTIN_VFP_BASE)
3455 return arm_expand_vfp_builtin (fcode, exp, target);
3456
3457 /* Check in the context of the function making the call whether the
3458 builtin is supported. */
3459 if (fcode >= ARM_BUILTIN_CRYPTO_BASE
3460 && (!TARGET_CRYPTO || !TARGET_HARD_FLOAT))
3461 {
3462 fatal_error (input_location,
3463 "You must enable crypto instructions"
3464 " (e.g. include %<-mfloat-abi=softfp%> "
3465 "%<-mfpu=crypto-neon%>)"
3466 " to use these intrinsics.");
3467 return const0_rtx;
3468 }
3469
3470 switch (fcode)
3471 {
3472 case ARM_BUILTIN_GET_FPSCR_NZCVQC:
3473 case ARM_BUILTIN_SET_FPSCR_NZCVQC:
3474 if (fcode == ARM_BUILTIN_GET_FPSCR_NZCVQC)
3475 {
3476 icode = CODE_FOR_get_fpscr_nzcvqc;
3477 target = gen_reg_rtx (SImode);
3478 emit_insn (GEN_FCN (icode) (target));
3479 return target;
3480 }
3481 else
3482 {
3483 icode = CODE_FOR_set_fpscr_nzcvqc;
3484 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
3485 emit_insn (GEN_FCN (icode) (force_reg (SImode, op0)));
3486 return NULL_RTX;
3487 }
3488
3489 case ARM_BUILTIN_GET_FPSCR:
3490 case ARM_BUILTIN_SET_FPSCR:
3491 if (fcode == ARM_BUILTIN_GET_FPSCR)
3492 {
3493 icode = CODE_FOR_get_fpscr;
3494 target = gen_reg_rtx (SImode);
3495 pat = GEN_FCN (icode) (target);
3496 }
3497 else
3498 {
3499 target = NULL_RTX;
3500 icode = CODE_FOR_set_fpscr;
3501 arg0 = CALL_EXPR_ARG (exp, 0);
3502 op0 = expand_normal (arg0);
3503 pat = GEN_FCN (icode) (force_reg (SImode, op0));
3504 }
3505 emit_insn (pat);
3506 return target;
3507
3508 case ARM_BUILTIN_CMSE_NONSECURE_CALLER:
3509 target = gen_reg_rtx (SImode);
3510 op0 = arm_return_addr (0, NULL_RTX);
3511 emit_insn (gen_andsi3 (target, op0, const1_rtx));
3512 op1 = gen_rtx_EQ (SImode, target, const0_rtx);
3513 emit_insn (gen_cstoresi4 (target, op1, target, const0_rtx));
3514 return target;
3515
3516 case ARM_BUILTIN_TEXTRMSB:
3517 case ARM_BUILTIN_TEXTRMUB:
3518 case ARM_BUILTIN_TEXTRMSH:
3519 case ARM_BUILTIN_TEXTRMUH:
3520 case ARM_BUILTIN_TEXTRMSW:
3521 case ARM_BUILTIN_TEXTRMUW:
3522 icode = (fcode == ARM_BUILTIN_TEXTRMSB ? CODE_FOR_iwmmxt_textrmsb
3523 : fcode == ARM_BUILTIN_TEXTRMUB ? CODE_FOR_iwmmxt_textrmub
3524 : fcode == ARM_BUILTIN_TEXTRMSH ? CODE_FOR_iwmmxt_textrmsh
3525 : fcode == ARM_BUILTIN_TEXTRMUH ? CODE_FOR_iwmmxt_textrmuh
3526 : CODE_FOR_iwmmxt_textrmw);
3527
3528 arg0 = CALL_EXPR_ARG (exp, 0);
3529 arg1 = CALL_EXPR_ARG (exp, 1);
3530 op0 = expand_normal (arg0);
3531 op1 = expand_normal (arg1);
3532 tmode = insn_data[icode].operand[0].mode;
3533 mode0 = insn_data[icode].operand[1].mode;
3534 mode1 = insn_data[icode].operand[2].mode;
3535
3536 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3537 op0 = copy_to_mode_reg (mode0, op0);
3538 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3539 {
3540 /* @@@ better error message */
3541 error ("selector must be an immediate");
3542 return gen_reg_rtx (tmode);
3543 }
3544
3545 opint = INTVAL (op1);
3546 if (fcode == ARM_BUILTIN_TEXTRMSB || fcode == ARM_BUILTIN_TEXTRMUB)
3547 {
3548 if (opint > 7 || opint < 0)
3549 error ("the range of selector should be in 0 to 7");
3550 }
3551 else if (fcode == ARM_BUILTIN_TEXTRMSH || fcode == ARM_BUILTIN_TEXTRMUH)
3552 {
3553 if (opint > 3 || opint < 0)
3554 error ("the range of selector should be in 0 to 3");
3555 }
3556 else /* ARM_BUILTIN_TEXTRMSW || ARM_BUILTIN_TEXTRMUW. */
3557 {
3558 if (opint > 1 || opint < 0)
3559 error ("the range of selector should be in 0 to 1");
3560 }
3561
3562 if (target == 0
3563 || GET_MODE (target) != tmode
3564 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3565 target = gen_reg_rtx (tmode);
3566 pat = GEN_FCN (icode) (target, op0, op1);
3567 if (! pat)
3568 return 0;
3569 emit_insn (pat);
3570 return target;
3571
3572 case ARM_BUILTIN_WALIGNI:
3573 /* If op2 is immediate, call walighi, else call walighr. */
3574 arg0 = CALL_EXPR_ARG (exp, 0);
3575 arg1 = CALL_EXPR_ARG (exp, 1);
3576 arg2 = CALL_EXPR_ARG (exp, 2);
3577 op0 = expand_normal (arg0);
3578 op1 = expand_normal (arg1);
3579 op2 = expand_normal (arg2);
3580 if (CONST_INT_P (op2))
3581 {
3582 icode = CODE_FOR_iwmmxt_waligni;
3583 tmode = insn_data[icode].operand[0].mode;
3584 mode0 = insn_data[icode].operand[1].mode;
3585 mode1 = insn_data[icode].operand[2].mode;
3586 mode2 = insn_data[icode].operand[3].mode;
3587 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
3588 op0 = copy_to_mode_reg (mode0, op0);
3589 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
3590 op1 = copy_to_mode_reg (mode1, op1);
3591 gcc_assert ((*insn_data[icode].operand[3].predicate) (op2, mode2));
3592 selector = INTVAL (op2);
3593 if (selector > 7 || selector < 0)
3594 error ("the range of selector should be in 0 to 7");
3595 }
3596 else
3597 {
3598 icode = CODE_FOR_iwmmxt_walignr;
3599 tmode = insn_data[icode].operand[0].mode;
3600 mode0 = insn_data[icode].operand[1].mode;
3601 mode1 = insn_data[icode].operand[2].mode;
3602 mode2 = insn_data[icode].operand[3].mode;
3603 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
3604 op0 = copy_to_mode_reg (mode0, op0);
3605 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
3606 op1 = copy_to_mode_reg (mode1, op1);
3607 if (!(*insn_data[icode].operand[3].predicate) (op2, mode2))
3608 op2 = copy_to_mode_reg (mode2, op2);
3609 }
3610 if (target == 0
3611 || GET_MODE (target) != tmode
3612 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
3613 target = gen_reg_rtx (tmode);
3614 pat = GEN_FCN (icode) (target, op0, op1, op2);
3615 if (!pat)
3616 return 0;
3617 emit_insn (pat);
3618 return target;
3619
3620 case ARM_BUILTIN_TINSRB:
3621 case ARM_BUILTIN_TINSRH:
3622 case ARM_BUILTIN_TINSRW:
3623 case ARM_BUILTIN_WMERGE:
3624 icode = (fcode == ARM_BUILTIN_TINSRB ? CODE_FOR_iwmmxt_tinsrb
3625 : fcode == ARM_BUILTIN_TINSRH ? CODE_FOR_iwmmxt_tinsrh
3626 : fcode == ARM_BUILTIN_WMERGE ? CODE_FOR_iwmmxt_wmerge
3627 : CODE_FOR_iwmmxt_tinsrw);
3628 arg0 = CALL_EXPR_ARG (exp, 0);
3629 arg1 = CALL_EXPR_ARG (exp, 1);
3630 arg2 = CALL_EXPR_ARG (exp, 2);
3631 op0 = expand_normal (arg0);
3632 op1 = expand_normal (arg1);
3633 op2 = expand_normal (arg2);
3634 tmode = insn_data[icode].operand[0].mode;
3635 mode0 = insn_data[icode].operand[1].mode;
3636 mode1 = insn_data[icode].operand[2].mode;
3637 mode2 = insn_data[icode].operand[3].mode;
3638
3639 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3640 op0 = copy_to_mode_reg (mode0, op0);
3641 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3642 op1 = copy_to_mode_reg (mode1, op1);
3643 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3644 {
3645 error ("selector must be an immediate");
3646 return const0_rtx;
3647 }
3648 if (icode == CODE_FOR_iwmmxt_wmerge)
3649 {
3650 selector = INTVAL (op2);
3651 if (selector > 7 || selector < 0)
3652 error ("the range of selector should be in 0 to 7");
3653 }
3654 if ((icode == CODE_FOR_iwmmxt_tinsrb)
3655 || (icode == CODE_FOR_iwmmxt_tinsrh)
3656 || (icode == CODE_FOR_iwmmxt_tinsrw))
3657 {
3658 mask = 0x01;
3659 selector= INTVAL (op2);
3660 if (icode == CODE_FOR_iwmmxt_tinsrb && (selector < 0 || selector > 7))
3661 error ("the range of selector should be in 0 to 7");
3662 else if (icode == CODE_FOR_iwmmxt_tinsrh && (selector < 0 ||selector > 3))
3663 error ("the range of selector should be in 0 to 3");
3664 else if (icode == CODE_FOR_iwmmxt_tinsrw && (selector < 0 ||selector > 1))
3665 error ("the range of selector should be in 0 to 1");
3666 mask <<= selector;
3667 op2 = GEN_INT (mask);
3668 }
3669 if (target == 0
3670 || GET_MODE (target) != tmode
3671 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3672 target = gen_reg_rtx (tmode);
3673 pat = GEN_FCN (icode) (target, op0, op1, op2);
3674 if (! pat)
3675 return 0;
3676 emit_insn (pat);
3677 return target;
3678
3679 case ARM_BUILTIN_SETWCGR0:
3680 case ARM_BUILTIN_SETWCGR1:
3681 case ARM_BUILTIN_SETWCGR2:
3682 case ARM_BUILTIN_SETWCGR3:
3683 icode = (fcode == ARM_BUILTIN_SETWCGR0 ? CODE_FOR_iwmmxt_setwcgr0
3684 : fcode == ARM_BUILTIN_SETWCGR1 ? CODE_FOR_iwmmxt_setwcgr1
3685 : fcode == ARM_BUILTIN_SETWCGR2 ? CODE_FOR_iwmmxt_setwcgr2
3686 : CODE_FOR_iwmmxt_setwcgr3);
3687 arg0 = CALL_EXPR_ARG (exp, 0);
3688 op0 = expand_normal (arg0);
3689 mode0 = insn_data[icode].operand[0].mode;
3690 if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
3691 op0 = copy_to_mode_reg (mode0, op0);
3692 pat = GEN_FCN (icode) (op0);
3693 if (!pat)
3694 return 0;
3695 emit_insn (pat);
3696 return 0;
3697
3698 case ARM_BUILTIN_GETWCGR0:
3699 case ARM_BUILTIN_GETWCGR1:
3700 case ARM_BUILTIN_GETWCGR2:
3701 case ARM_BUILTIN_GETWCGR3:
3702 icode = (fcode == ARM_BUILTIN_GETWCGR0 ? CODE_FOR_iwmmxt_getwcgr0
3703 : fcode == ARM_BUILTIN_GETWCGR1 ? CODE_FOR_iwmmxt_getwcgr1
3704 : fcode == ARM_BUILTIN_GETWCGR2 ? CODE_FOR_iwmmxt_getwcgr2
3705 : CODE_FOR_iwmmxt_getwcgr3);
3706 tmode = insn_data[icode].operand[0].mode;
3707 if (target == 0
3708 || GET_MODE (target) != tmode
3709 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
3710 target = gen_reg_rtx (tmode);
3711 pat = GEN_FCN (icode) (target);
3712 if (!pat)
3713 return 0;
3714 emit_insn (pat);
3715 return target;
3716
3717 case ARM_BUILTIN_WSHUFH:
3718 icode = CODE_FOR_iwmmxt_wshufh;
3719 arg0 = CALL_EXPR_ARG (exp, 0);
3720 arg1 = CALL_EXPR_ARG (exp, 1);
3721 op0 = expand_normal (arg0);
3722 op1 = expand_normal (arg1);
3723 tmode = insn_data[icode].operand[0].mode;
3724 mode1 = insn_data[icode].operand[1].mode;
3725 mode2 = insn_data[icode].operand[2].mode;
3726
3727 if (! (*insn_data[icode].operand[1].predicate) (op0, mode1))
3728 op0 = copy_to_mode_reg (mode1, op0);
3729 if (! (*insn_data[icode].operand[2].predicate) (op1, mode2))
3730 {
3731 error ("mask must be an immediate");
3732 return const0_rtx;
3733 }
3734 selector = INTVAL (op1);
3735 if (selector < 0 || selector > 255)
3736 error ("the range of mask should be in 0 to 255");
3737 if (target == 0
3738 || GET_MODE (target) != tmode
3739 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3740 target = gen_reg_rtx (tmode);
3741 pat = GEN_FCN (icode) (target, op0, op1);
3742 if (! pat)
3743 return 0;
3744 emit_insn (pat);
3745 return target;
3746
3747 case ARM_BUILTIN_WMADDS:
3748 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmadds, exp, target);
3749 case ARM_BUILTIN_WMADDSX:
3750 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddsx, exp, target);
3751 case ARM_BUILTIN_WMADDSN:
3752 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddsn, exp, target);
3753 case ARM_BUILTIN_WMADDU:
3754 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddu, exp, target);
3755 case ARM_BUILTIN_WMADDUX:
3756 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddux, exp, target);
3757 case ARM_BUILTIN_WMADDUN:
3758 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddun, exp, target);
3759 case ARM_BUILTIN_WSADBZ:
3760 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadbz, exp, target);
3761 case ARM_BUILTIN_WSADHZ:
3762 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadhz, exp, target);
3763
3764 /* Several three-argument builtins. */
3765 case ARM_BUILTIN_WMACS:
3766 case ARM_BUILTIN_WMACU:
3767 case ARM_BUILTIN_TMIA:
3768 case ARM_BUILTIN_TMIAPH:
3769 case ARM_BUILTIN_TMIATT:
3770 case ARM_BUILTIN_TMIATB:
3771 case ARM_BUILTIN_TMIABT:
3772 case ARM_BUILTIN_TMIABB:
3773 case ARM_BUILTIN_WQMIABB:
3774 case ARM_BUILTIN_WQMIABT:
3775 case ARM_BUILTIN_WQMIATB:
3776 case ARM_BUILTIN_WQMIATT:
3777 case ARM_BUILTIN_WQMIABBN:
3778 case ARM_BUILTIN_WQMIABTN:
3779 case ARM_BUILTIN_WQMIATBN:
3780 case ARM_BUILTIN_WQMIATTN:
3781 case ARM_BUILTIN_WMIABB:
3782 case ARM_BUILTIN_WMIABT:
3783 case ARM_BUILTIN_WMIATB:
3784 case ARM_BUILTIN_WMIATT:
3785 case ARM_BUILTIN_WMIABBN:
3786 case ARM_BUILTIN_WMIABTN:
3787 case ARM_BUILTIN_WMIATBN:
3788 case ARM_BUILTIN_WMIATTN:
3789 case ARM_BUILTIN_WMIAWBB:
3790 case ARM_BUILTIN_WMIAWBT:
3791 case ARM_BUILTIN_WMIAWTB:
3792 case ARM_BUILTIN_WMIAWTT:
3793 case ARM_BUILTIN_WMIAWBBN:
3794 case ARM_BUILTIN_WMIAWBTN:
3795 case ARM_BUILTIN_WMIAWTBN:
3796 case ARM_BUILTIN_WMIAWTTN:
3797 case ARM_BUILTIN_WSADB:
3798 case ARM_BUILTIN_WSADH:
3799 icode = (fcode == ARM_BUILTIN_WMACS ? CODE_FOR_iwmmxt_wmacs
3800 : fcode == ARM_BUILTIN_WMACU ? CODE_FOR_iwmmxt_wmacu
3801 : fcode == ARM_BUILTIN_TMIA ? CODE_FOR_iwmmxt_tmia
3802 : fcode == ARM_BUILTIN_TMIAPH ? CODE_FOR_iwmmxt_tmiaph
3803 : fcode == ARM_BUILTIN_TMIABB ? CODE_FOR_iwmmxt_tmiabb
3804 : fcode == ARM_BUILTIN_TMIABT ? CODE_FOR_iwmmxt_tmiabt
3805 : fcode == ARM_BUILTIN_TMIATB ? CODE_FOR_iwmmxt_tmiatb
3806 : fcode == ARM_BUILTIN_TMIATT ? CODE_FOR_iwmmxt_tmiatt
3807 : fcode == ARM_BUILTIN_WQMIABB ? CODE_FOR_iwmmxt_wqmiabb
3808 : fcode == ARM_BUILTIN_WQMIABT ? CODE_FOR_iwmmxt_wqmiabt
3809 : fcode == ARM_BUILTIN_WQMIATB ? CODE_FOR_iwmmxt_wqmiatb
3810 : fcode == ARM_BUILTIN_WQMIATT ? CODE_FOR_iwmmxt_wqmiatt
3811 : fcode == ARM_BUILTIN_WQMIABBN ? CODE_FOR_iwmmxt_wqmiabbn
3812 : fcode == ARM_BUILTIN_WQMIABTN ? CODE_FOR_iwmmxt_wqmiabtn
3813 : fcode == ARM_BUILTIN_WQMIATBN ? CODE_FOR_iwmmxt_wqmiatbn
3814 : fcode == ARM_BUILTIN_WQMIATTN ? CODE_FOR_iwmmxt_wqmiattn
3815 : fcode == ARM_BUILTIN_WMIABB ? CODE_FOR_iwmmxt_wmiabb
3816 : fcode == ARM_BUILTIN_WMIABT ? CODE_FOR_iwmmxt_wmiabt
3817 : fcode == ARM_BUILTIN_WMIATB ? CODE_FOR_iwmmxt_wmiatb
3818 : fcode == ARM_BUILTIN_WMIATT ? CODE_FOR_iwmmxt_wmiatt
3819 : fcode == ARM_BUILTIN_WMIABBN ? CODE_FOR_iwmmxt_wmiabbn
3820 : fcode == ARM_BUILTIN_WMIABTN ? CODE_FOR_iwmmxt_wmiabtn
3821 : fcode == ARM_BUILTIN_WMIATBN ? CODE_FOR_iwmmxt_wmiatbn
3822 : fcode == ARM_BUILTIN_WMIATTN ? CODE_FOR_iwmmxt_wmiattn
3823 : fcode == ARM_BUILTIN_WMIAWBB ? CODE_FOR_iwmmxt_wmiawbb
3824 : fcode == ARM_BUILTIN_WMIAWBT ? CODE_FOR_iwmmxt_wmiawbt
3825 : fcode == ARM_BUILTIN_WMIAWTB ? CODE_FOR_iwmmxt_wmiawtb
3826 : fcode == ARM_BUILTIN_WMIAWTT ? CODE_FOR_iwmmxt_wmiawtt
3827 : fcode == ARM_BUILTIN_WMIAWBBN ? CODE_FOR_iwmmxt_wmiawbbn
3828 : fcode == ARM_BUILTIN_WMIAWBTN ? CODE_FOR_iwmmxt_wmiawbtn
3829 : fcode == ARM_BUILTIN_WMIAWTBN ? CODE_FOR_iwmmxt_wmiawtbn
3830 : fcode == ARM_BUILTIN_WMIAWTTN ? CODE_FOR_iwmmxt_wmiawttn
3831 : fcode == ARM_BUILTIN_WSADB ? CODE_FOR_iwmmxt_wsadb
3832 : CODE_FOR_iwmmxt_wsadh);
3833 arg0 = CALL_EXPR_ARG (exp, 0);
3834 arg1 = CALL_EXPR_ARG (exp, 1);
3835 arg2 = CALL_EXPR_ARG (exp, 2);
3836 op0 = expand_normal (arg0);
3837 op1 = expand_normal (arg1);
3838 op2 = expand_normal (arg2);
3839 tmode = insn_data[icode].operand[0].mode;
3840 mode0 = insn_data[icode].operand[1].mode;
3841 mode1 = insn_data[icode].operand[2].mode;
3842 mode2 = insn_data[icode].operand[3].mode;
3843
3844 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3845 op0 = copy_to_mode_reg (mode0, op0);
3846 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3847 op1 = copy_to_mode_reg (mode1, op1);
3848 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3849 op2 = copy_to_mode_reg (mode2, op2);
3850 if (target == 0
3851 || GET_MODE (target) != tmode
3852 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3853 target = gen_reg_rtx (tmode);
3854 pat = GEN_FCN (icode) (target, op0, op1, op2);
3855 if (! pat)
3856 return 0;
3857 emit_insn (pat);
3858 return target;
3859
3860 case ARM_BUILTIN_WZERO:
3861 target = gen_reg_rtx (DImode);
3862 emit_insn (gen_iwmmxt_clrdi (target));
3863 return target;
3864
3865 case ARM_BUILTIN_WSRLHI:
3866 case ARM_BUILTIN_WSRLWI:
3867 case ARM_BUILTIN_WSRLDI:
3868 case ARM_BUILTIN_WSLLHI:
3869 case ARM_BUILTIN_WSLLWI:
3870 case ARM_BUILTIN_WSLLDI:
3871 case ARM_BUILTIN_WSRAHI:
3872 case ARM_BUILTIN_WSRAWI:
3873 case ARM_BUILTIN_WSRADI:
3874 case ARM_BUILTIN_WRORHI:
3875 case ARM_BUILTIN_WRORWI:
3876 case ARM_BUILTIN_WRORDI:
3877 case ARM_BUILTIN_WSRLH:
3878 case ARM_BUILTIN_WSRLW:
3879 case ARM_BUILTIN_WSRLD:
3880 case ARM_BUILTIN_WSLLH:
3881 case ARM_BUILTIN_WSLLW:
3882 case ARM_BUILTIN_WSLLD:
3883 case ARM_BUILTIN_WSRAH:
3884 case ARM_BUILTIN_WSRAW:
3885 case ARM_BUILTIN_WSRAD:
3886 case ARM_BUILTIN_WRORH:
3887 case ARM_BUILTIN_WRORW:
3888 case ARM_BUILTIN_WRORD:
3889 icode = (fcode == ARM_BUILTIN_WSRLHI ? CODE_FOR_lshrv4hi3_iwmmxt
3890 : fcode == ARM_BUILTIN_WSRLWI ? CODE_FOR_lshrv2si3_iwmmxt
3891 : fcode == ARM_BUILTIN_WSRLDI ? CODE_FOR_lshrdi3_iwmmxt
3892 : fcode == ARM_BUILTIN_WSLLHI ? CODE_FOR_ashlv4hi3_iwmmxt
3893 : fcode == ARM_BUILTIN_WSLLWI ? CODE_FOR_ashlv2si3_iwmmxt
3894 : fcode == ARM_BUILTIN_WSLLDI ? CODE_FOR_ashldi3_iwmmxt
3895 : fcode == ARM_BUILTIN_WSRAHI ? CODE_FOR_ashrv4hi3_iwmmxt
3896 : fcode == ARM_BUILTIN_WSRAWI ? CODE_FOR_ashrv2si3_iwmmxt
3897 : fcode == ARM_BUILTIN_WSRADI ? CODE_FOR_ashrdi3_iwmmxt
3898 : fcode == ARM_BUILTIN_WRORHI ? CODE_FOR_rorv4hi3
3899 : fcode == ARM_BUILTIN_WRORWI ? CODE_FOR_rorv2si3
3900 : fcode == ARM_BUILTIN_WRORDI ? CODE_FOR_rordi3
3901 : fcode == ARM_BUILTIN_WSRLH ? CODE_FOR_lshrv4hi3_di
3902 : fcode == ARM_BUILTIN_WSRLW ? CODE_FOR_lshrv2si3_di
3903 : fcode == ARM_BUILTIN_WSRLD ? CODE_FOR_lshrdi3_di
3904 : fcode == ARM_BUILTIN_WSLLH ? CODE_FOR_ashlv4hi3_di
3905 : fcode == ARM_BUILTIN_WSLLW ? CODE_FOR_ashlv2si3_di
3906 : fcode == ARM_BUILTIN_WSLLD ? CODE_FOR_ashldi3_di
3907 : fcode == ARM_BUILTIN_WSRAH ? CODE_FOR_ashrv4hi3_di
3908 : fcode == ARM_BUILTIN_WSRAW ? CODE_FOR_ashrv2si3_di
3909 : fcode == ARM_BUILTIN_WSRAD ? CODE_FOR_ashrdi3_di
3910 : fcode == ARM_BUILTIN_WRORH ? CODE_FOR_rorv4hi3_di
3911 : fcode == ARM_BUILTIN_WRORW ? CODE_FOR_rorv2si3_di
3912 : fcode == ARM_BUILTIN_WRORD ? CODE_FOR_rordi3_di
3913 : CODE_FOR_nothing);
3914 arg1 = CALL_EXPR_ARG (exp, 1);
3915 op1 = expand_normal (arg1);
3916 if (GET_MODE (op1) == VOIDmode)
3917 {
3918 imm = INTVAL (op1);
3919 if ((fcode == ARM_BUILTIN_WRORWI || fcode == ARM_BUILTIN_WRORW)
3920 && (imm < 0 || imm > 32))
3921 {
3922 const char *builtin = (fcode == ARM_BUILTIN_WRORWI
3923 ? "_mm_rori_pi32" : "_mm_ror_pi32");
3924 error ("the range of count should be in 0 to 32; "
3925 "please check the intrinsic %qs in code", builtin);
3926 }
3927 else if ((fcode == ARM_BUILTIN_WRORHI || fcode == ARM_BUILTIN_WRORH)
3928 && (imm < 0 || imm > 16))
3929 {
3930 const char *builtin = (fcode == ARM_BUILTIN_WRORHI
3931 ? "_mm_rori_pi16" : "_mm_ror_pi16");
3932 error ("the range of count should be in 0 to 16; "
3933 "please check the intrinsic %qs in code", builtin);
3934 }
3935 else if ((fcode == ARM_BUILTIN_WRORDI || fcode == ARM_BUILTIN_WRORD)
3936 && (imm < 0 || imm > 64))
3937 {
3938 const char *builtin = (fcode == ARM_BUILTIN_WRORDI
3939 ? "_mm_rori_si64" : "_mm_ror_si64");
3940 error ("the range of count should be in 0 to 64; "
3941 "please check the intrinsic %qs in code", builtin);
3942 }
3943 else if (imm < 0)
3944 {
3945 const char *builtin;
3946 switch (fcode)
3947 {
3948 case ARM_BUILTIN_WSRLHI:
3949 builtin = "_mm_srli_pi16";
3950 break;
3951 case ARM_BUILTIN_WSRLWI:
3952 builtin = "_mm_srli_pi32";
3953 break;
3954 case ARM_BUILTIN_WSRLDI:
3955 builtin = "_mm_srli_si64";
3956 break;
3957 case ARM_BUILTIN_WSLLHI:
3958 builtin = "_mm_slli_pi16";
3959 break;
3960 case ARM_BUILTIN_WSLLWI:
3961 builtin = "_mm_slli_pi32";
3962 break;
3963 case ARM_BUILTIN_WSLLDI:
3964 builtin = "_mm_slli_si64";
3965 break;
3966 case ARM_BUILTIN_WSRAHI:
3967 builtin = "_mm_srai_pi16";
3968 break;
3969 case ARM_BUILTIN_WSRAWI:
3970 builtin = "_mm_srai_pi32";
3971 break;
3972 case ARM_BUILTIN_WSRADI:
3973 builtin = "_mm_srai_si64";
3974 break;
3975 case ARM_BUILTIN_WSRLH:
3976 builtin = "_mm_srl_pi16";
3977 break;
3978 case ARM_BUILTIN_WSRLW:
3979 builtin = "_mm_srl_pi32";
3980 break;
3981 case ARM_BUILTIN_WSRLD:
3982 builtin = "_mm_srl_si64";
3983 break;
3984 case ARM_BUILTIN_WSLLH:
3985 builtin = "_mm_sll_pi16";
3986 break;
3987 case ARM_BUILTIN_WSLLW:
3988 builtin = "_mm_sll_pi32";
3989 break;
3990 case ARM_BUILTIN_WSLLD:
3991 builtin = "_mm_sll_si64";
3992 break;
3993 case ARM_BUILTIN_WSRAH:
3994 builtin = "_mm_sra_pi16";
3995 break;
3996 case ARM_BUILTIN_WSRAW:
3997 builtin = "_mm_sra_si64";
3998 break;
3999 default:
4000 builtin = "_mm_sra_si64";
4001 break;
4002 }
4003 error ("the count should be no less than 0; "
4004 "please check the intrinsic %qs in code", builtin);
4005 }
4006 }
4007 return arm_expand_binop_builtin (icode, exp, target);
4008
4009 default:
4010 break;
4011 }
4012
4013 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4014 if (d->code == (enum arm_builtins) fcode)
4015 return arm_expand_binop_builtin (d->icode, exp, target);
4016
4017 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4018 if (d->code == (enum arm_builtins) fcode)
4019 return arm_expand_unop_builtin (d->icode, exp, target, 0);
4020
4021 for (i = 0, d = bdesc_3arg; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4022 if (d->code == (enum arm_builtins) fcode)
4023 return arm_expand_ternop_builtin (d->icode, exp, target);
4024
4025 /* @@@ Should really do something sensible here. */
4026 return NULL_RTX;
4027 }
4028
4029 tree
arm_builtin_vectorized_function(unsigned int fn,tree type_out,tree type_in)4030 arm_builtin_vectorized_function (unsigned int fn, tree type_out, tree type_in)
4031 {
4032 machine_mode in_mode, out_mode;
4033 int in_n, out_n;
4034 bool out_unsigned_p = TYPE_UNSIGNED (type_out);
4035
4036 /* Can't provide any vectorized builtins when we can't use NEON. */
4037 if (!TARGET_NEON)
4038 return NULL_TREE;
4039
4040 if (TREE_CODE (type_out) != VECTOR_TYPE
4041 || TREE_CODE (type_in) != VECTOR_TYPE)
4042 return NULL_TREE;
4043
4044 out_mode = TYPE_MODE (TREE_TYPE (type_out));
4045 out_n = TYPE_VECTOR_SUBPARTS (type_out);
4046 in_mode = TYPE_MODE (TREE_TYPE (type_in));
4047 in_n = TYPE_VECTOR_SUBPARTS (type_in);
4048
4049 /* ARM_CHECK_BUILTIN_MODE and ARM_FIND_VRINT_VARIANT are used to find the
4050 decl of the vectorized builtin for the appropriate vector mode.
4051 NULL_TREE is returned if no such builtin is available. */
4052 #undef ARM_CHECK_BUILTIN_MODE
4053 #define ARM_CHECK_BUILTIN_MODE(C) \
4054 (TARGET_VFP5 \
4055 && flag_unsafe_math_optimizations \
4056 && ARM_CHECK_BUILTIN_MODE_1 (C))
4057
4058 #undef ARM_CHECK_BUILTIN_MODE_1
4059 #define ARM_CHECK_BUILTIN_MODE_1(C) \
4060 (out_mode == SFmode && out_n == C \
4061 && in_mode == SFmode && in_n == C)
4062
4063 #undef ARM_FIND_VRINT_VARIANT
4064 #define ARM_FIND_VRINT_VARIANT(N) \
4065 (ARM_CHECK_BUILTIN_MODE (2) \
4066 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v2sf, false) \
4067 : (ARM_CHECK_BUILTIN_MODE (4) \
4068 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v4sf, false) \
4069 : NULL_TREE))
4070
4071 switch (fn)
4072 {
4073 CASE_CFN_FLOOR:
4074 return ARM_FIND_VRINT_VARIANT (vrintm);
4075 CASE_CFN_CEIL:
4076 return ARM_FIND_VRINT_VARIANT (vrintp);
4077 CASE_CFN_TRUNC:
4078 return ARM_FIND_VRINT_VARIANT (vrintz);
4079 CASE_CFN_ROUND:
4080 return ARM_FIND_VRINT_VARIANT (vrinta);
4081 #undef ARM_CHECK_BUILTIN_MODE_1
4082 #define ARM_CHECK_BUILTIN_MODE_1(C) \
4083 (out_mode == SImode && out_n == C \
4084 && in_mode == SFmode && in_n == C)
4085
4086 #define ARM_FIND_VCVT_VARIANT(N) \
4087 (ARM_CHECK_BUILTIN_MODE (2) \
4088 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v2sfv2si, false) \
4089 : (ARM_CHECK_BUILTIN_MODE (4) \
4090 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v4sfv4si, false) \
4091 : NULL_TREE))
4092
4093 #define ARM_FIND_VCVTU_VARIANT(N) \
4094 (ARM_CHECK_BUILTIN_MODE (2) \
4095 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##uv2sfv2si, false) \
4096 : (ARM_CHECK_BUILTIN_MODE (4) \
4097 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##uv4sfv4si, false) \
4098 : NULL_TREE))
4099 CASE_CFN_LROUND:
4100 return (out_unsigned_p
4101 ? ARM_FIND_VCVTU_VARIANT (vcvta)
4102 : ARM_FIND_VCVT_VARIANT (vcvta));
4103 CASE_CFN_LCEIL:
4104 return (out_unsigned_p
4105 ? ARM_FIND_VCVTU_VARIANT (vcvtp)
4106 : ARM_FIND_VCVT_VARIANT (vcvtp));
4107 CASE_CFN_LFLOOR:
4108 return (out_unsigned_p
4109 ? ARM_FIND_VCVTU_VARIANT (vcvtm)
4110 : ARM_FIND_VCVT_VARIANT (vcvtm));
4111 #undef ARM_CHECK_BUILTIN_MODE
4112 #define ARM_CHECK_BUILTIN_MODE(C, N) \
4113 (out_mode == N##mode && out_n == C \
4114 && in_mode == N##mode && in_n == C)
4115 case CFN_BUILT_IN_BSWAP16:
4116 if (ARM_CHECK_BUILTIN_MODE (4, HI))
4117 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv4hi, false);
4118 else if (ARM_CHECK_BUILTIN_MODE (8, HI))
4119 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv8hi, false);
4120 else
4121 return NULL_TREE;
4122 case CFN_BUILT_IN_BSWAP32:
4123 if (ARM_CHECK_BUILTIN_MODE (2, SI))
4124 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv2si, false);
4125 else if (ARM_CHECK_BUILTIN_MODE (4, SI))
4126 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv4si, false);
4127 else
4128 return NULL_TREE;
4129 case CFN_BUILT_IN_BSWAP64:
4130 if (ARM_CHECK_BUILTIN_MODE (2, DI))
4131 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv2di, false);
4132 else
4133 return NULL_TREE;
4134 CASE_CFN_COPYSIGN:
4135 if (ARM_CHECK_BUILTIN_MODE (2, SF))
4136 return arm_builtin_decl (ARM_BUILTIN_NEON_copysignfv2sf, false);
4137 else if (ARM_CHECK_BUILTIN_MODE (4, SF))
4138 return arm_builtin_decl (ARM_BUILTIN_NEON_copysignfv4sf, false);
4139 else
4140 return NULL_TREE;
4141
4142 default:
4143 return NULL_TREE;
4144 }
4145 return NULL_TREE;
4146 }
4147 #undef ARM_FIND_VCVT_VARIANT
4148 #undef ARM_FIND_VCVTU_VARIANT
4149 #undef ARM_CHECK_BUILTIN_MODE
4150 #undef ARM_FIND_VRINT_VARIANT
4151
4152 void
arm_atomic_assign_expand_fenv(tree * hold,tree * clear,tree * update)4153 arm_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
4154 {
4155 const unsigned ARM_FE_INVALID = 1;
4156 const unsigned ARM_FE_DIVBYZERO = 2;
4157 const unsigned ARM_FE_OVERFLOW = 4;
4158 const unsigned ARM_FE_UNDERFLOW = 8;
4159 const unsigned ARM_FE_INEXACT = 16;
4160 const unsigned HOST_WIDE_INT ARM_FE_ALL_EXCEPT = (ARM_FE_INVALID
4161 | ARM_FE_DIVBYZERO
4162 | ARM_FE_OVERFLOW
4163 | ARM_FE_UNDERFLOW
4164 | ARM_FE_INEXACT);
4165 const unsigned HOST_WIDE_INT ARM_FE_EXCEPT_SHIFT = 8;
4166 tree fenv_var, get_fpscr, set_fpscr, mask, ld_fenv, masked_fenv;
4167 tree new_fenv_var, reload_fenv, restore_fnenv;
4168 tree update_call, atomic_feraiseexcept, hold_fnclex;
4169
4170 if (!TARGET_HARD_FLOAT)
4171 return;
4172
4173 /* Generate the equivalent of :
4174 unsigned int fenv_var;
4175 fenv_var = __builtin_arm_get_fpscr ();
4176
4177 unsigned int masked_fenv;
4178 masked_fenv = fenv_var & mask;
4179
4180 __builtin_arm_set_fpscr (masked_fenv); */
4181
4182 fenv_var = create_tmp_var_raw (unsigned_type_node);
4183 get_fpscr = arm_builtin_decls[ARM_BUILTIN_GET_FPSCR];
4184 set_fpscr = arm_builtin_decls[ARM_BUILTIN_SET_FPSCR];
4185 mask = build_int_cst (unsigned_type_node,
4186 ~((ARM_FE_ALL_EXCEPT << ARM_FE_EXCEPT_SHIFT)
4187 | ARM_FE_ALL_EXCEPT));
4188 ld_fenv = build4 (TARGET_EXPR, unsigned_type_node,
4189 fenv_var, build_call_expr (get_fpscr, 0),
4190 NULL_TREE, NULL_TREE);
4191 masked_fenv = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_var, mask);
4192 hold_fnclex = build_call_expr (set_fpscr, 1, masked_fenv);
4193 *hold = build2 (COMPOUND_EXPR, void_type_node,
4194 build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
4195 hold_fnclex);
4196
4197 /* Store the value of masked_fenv to clear the exceptions:
4198 __builtin_arm_set_fpscr (masked_fenv); */
4199
4200 *clear = build_call_expr (set_fpscr, 1, masked_fenv);
4201
4202 /* Generate the equivalent of :
4203 unsigned int new_fenv_var;
4204 new_fenv_var = __builtin_arm_get_fpscr ();
4205
4206 __builtin_arm_set_fpscr (fenv_var);
4207
4208 __atomic_feraiseexcept (new_fenv_var); */
4209
4210 new_fenv_var = create_tmp_var_raw (unsigned_type_node);
4211 reload_fenv = build4 (TARGET_EXPR, unsigned_type_node, new_fenv_var,
4212 build_call_expr (get_fpscr, 0), NULL_TREE, NULL_TREE);
4213 restore_fnenv = build_call_expr (set_fpscr, 1, fenv_var);
4214 atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
4215 update_call = build_call_expr (atomic_feraiseexcept, 1,
4216 fold_convert (integer_type_node, new_fenv_var));
4217 *update = build2 (COMPOUND_EXPR, void_type_node,
4218 build2 (COMPOUND_EXPR, void_type_node,
4219 reload_fenv, restore_fnenv), update_call);
4220 }
4221
4222 /* Implement TARGET_CHECK_BUILTIN_CALL. Record a read of the Q bit through
4223 intrinsics in the machine function. */
4224 bool
arm_check_builtin_call(location_t,vec<location_t>,tree fndecl,tree,unsigned int,tree *)4225 arm_check_builtin_call (location_t , vec<location_t> , tree fndecl,
4226 tree, unsigned int, tree *)
4227 {
4228 int fcode = DECL_MD_FUNCTION_CODE (fndecl);
4229 if (fcode == ARM_BUILTIN_saturation_occurred
4230 || fcode == ARM_BUILTIN_set_saturation)
4231 {
4232 if (cfun && cfun->decl)
4233 DECL_ATTRIBUTES (cfun->decl)
4234 = tree_cons (get_identifier ("acle qbit"), NULL_TREE,
4235 DECL_ATTRIBUTES (cfun->decl));
4236 }
4237 if (fcode == ARM_BUILTIN_sel)
4238 {
4239 if (cfun && cfun->decl)
4240 DECL_ATTRIBUTES (cfun->decl)
4241 = tree_cons (get_identifier ("acle gebits"), NULL_TREE,
4242 DECL_ATTRIBUTES (cfun->decl));
4243 }
4244 return true;
4245 }
4246
4247 enum resolver_ident
arm_describe_resolver(tree fndecl)4248 arm_describe_resolver (tree fndecl)
4249 {
4250 if (DECL_MD_FUNCTION_CODE (fndecl) >= ARM_BUILTIN_vcx1qv16qi
4251 && DECL_MD_FUNCTION_CODE (fndecl) < ARM_BUILTIN_MVE_BASE)
4252 return arm_cde_resolver;
4253 return arm_no_resolver;
4254 }
4255
4256 unsigned
arm_cde_end_args(tree fndecl)4257 arm_cde_end_args (tree fndecl)
4258 {
4259 return DECL_MD_FUNCTION_CODE (fndecl) >= ARM_BUILTIN_vcx1q_p_v16qi ? 2 : 1;
4260 }
4261
4262 #include "gt-arm-builtins.h"
4263