1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "real.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "flags.h"
33 #include "recog.h"
34 #include "obstack.h"
35 #include "tree.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "except.h"
39 #include "function.h"
40 #include "output.h"
41 #include "basic-block.h"
42 #include "integrate.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "hashtab.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "langhooks.h"
50 #include "reload.h"
51
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
54 #endif
55
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
58
59 /* Target cpu type */
60
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
63 {
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
68 };
69
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
73
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
76
77 /* Whether VRSAVE instructions should be generated. */
78 int rs6000_altivec_vrsave;
79
80 /* String from -mvrsave= option. */
81 const char *rs6000_altivec_vrsave_string;
82
83 /* Nonzero if we want SPE ABI extensions. */
84 int rs6000_spe_abi;
85
86 /* Whether isel instructions should be generated. */
87 int rs6000_isel;
88
89 /* Nonzero if we have FPRs. */
90 int rs6000_fprs = 1;
91
92 /* String from -misel=. */
93 const char *rs6000_isel_string;
94
95 /* Set to nonzero once AIX common-mode calls have been defined. */
96 static int common_mode_defined;
97
98 /* Private copy of original value of flag_pic for ABI_AIX. */
99 static int rs6000_flag_pic;
100
101 /* Save information from a "cmpxx" operation until the branch or scc is
102 emitted. */
103 rtx rs6000_compare_op0, rs6000_compare_op1;
104 int rs6000_compare_fp_p;
105
106 /* Label number of label created for -mrelocatable, to call to so we can
107 get the address of the GOT section */
108 int rs6000_pic_labelno;
109
110 #ifdef USING_ELFOS_H
111 /* Which abi to adhere to */
112 const char *rs6000_abi_name = RS6000_ABI_NAME;
113
114 /* Semantics of the small data area */
115 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
116
117 /* Which small data model to use */
118 const char *rs6000_sdata_name = (char *)0;
119
120 /* Counter for labels which are to be placed in .fixup. */
121 int fixuplabelno = 0;
122 #endif
123
124 /* ABI enumeration available for subtarget to use. */
125 enum rs6000_abi rs6000_current_abi;
126
127 /* ABI string from -mabi= option. */
128 const char *rs6000_abi_string;
129
130 /* Debug flags */
131 const char *rs6000_debug_name;
132 int rs6000_debug_stack; /* debug stack applications */
133 int rs6000_debug_arg; /* debug argument handling */
134
135 const char *rs6000_traceback_name;
136 static enum {
137 traceback_default = 0,
138 traceback_none,
139 traceback_part,
140 traceback_full
141 } rs6000_traceback;
142
143 /* Flag to say the TOC is initialized */
144 int toc_initialized;
145 char toc_label_name[10];
146
147 /* Alias set for saves and restores from the rs6000 stack. */
148 static int rs6000_sr_alias_set;
149
150 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
151 The only place that looks at this is rs6000_set_default_type_attributes;
152 everywhere else should rely on the presence or absence of a longcall
153 attribute on the function declaration. Exception: init_cumulative_args
154 looks at it too, for libcalls. */
155 int rs6000_default_long_calls;
156 const char *rs6000_longcall_switch;
157
158 struct builtin_description
159 {
160 /* mask is not const because we're going to alter it below. This
161 nonsense will go away when we rewrite the -march infrastructure
162 to give us more target flag bits. */
163 unsigned int mask;
164 const enum insn_code icode;
165 const char *const name;
166 const enum rs6000_builtins code;
167 };
168
169 static void rs6000_add_gc_roots PARAMS ((void));
170 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
171 static void validate_condition_mode
172 PARAMS ((enum rtx_code, enum machine_mode));
173 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
174 static void rs6000_maybe_dead PARAMS ((rtx));
175 static void rs6000_emit_stack_tie PARAMS ((void));
176 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
177 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
178 unsigned int, int, int));
179 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
180 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
181 static unsigned rs6000_hash_constant PARAMS ((rtx));
182 static unsigned toc_hash_function PARAMS ((const void *));
183 static int toc_hash_eq PARAMS ((const void *, const void *));
184 static int toc_hash_mark_entry PARAMS ((void **, void *));
185 static void toc_hash_mark_table PARAMS ((void *));
186 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
187 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
188 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
189 #ifdef HAVE_GAS_HIDDEN
190 static void rs6000_assemble_visibility PARAMS ((tree, int));
191 #endif
192 static int rs6000_ra_ever_killed PARAMS ((void));
193 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
194 const struct attribute_spec rs6000_attribute_table[];
195 static void rs6000_set_default_type_attributes PARAMS ((tree));
196 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
197 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
198 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
199 HOST_WIDE_INT, tree));
200 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
201 HOST_WIDE_INT, HOST_WIDE_INT));
202 #if TARGET_ELF
203 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
204 int));
205 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
206 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
207 static void rs6000_elf_select_section PARAMS ((tree, int,
208 unsigned HOST_WIDE_INT));
209 static void rs6000_elf_unique_section PARAMS ((tree, int));
210 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
211 unsigned HOST_WIDE_INT));
212 static void rs6000_elf_encode_section_info PARAMS ((tree, int))
213 ATTRIBUTE_UNUSED;
214 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
215 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
216 #endif
217 #if TARGET_XCOFF
218 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
219 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
220 static void rs6000_xcoff_select_section PARAMS ((tree, int,
221 unsigned HOST_WIDE_INT));
222 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
223 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
224 unsigned HOST_WIDE_INT));
225 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
226 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
227 #endif
228 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
229 ATTRIBUTE_UNUSED;
230 static bool rs6000_binds_local_p PARAMS ((tree));
231 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
232 static int rs6000_adjust_priority PARAMS ((rtx, int));
233 static int rs6000_issue_rate PARAMS ((void));
234
235 static void rs6000_init_builtins PARAMS ((void));
236 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
237 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
238 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
239 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
240 static void altivec_init_builtins PARAMS ((void));
241 static void rs6000_common_init_builtins PARAMS ((void));
242
243 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
244 int, enum rs6000_builtins,
245 enum rs6000_builtins));
246 static void spe_init_builtins PARAMS ((void));
247 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
248 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
249 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
250 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
251
252 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
253 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
254 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
255 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
256 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
257 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
258 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
259 static void rs6000_parse_abi_options PARAMS ((void));
260 static void rs6000_parse_vrsave_option PARAMS ((void));
261 static void rs6000_parse_isel_option PARAMS ((void));
262 static int first_altivec_reg_to_save PARAMS ((void));
263 static unsigned int compute_vrsave_mask PARAMS ((void));
264 static void is_altivec_return_reg PARAMS ((rtx, void *));
265 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
266 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
267 static int easy_vector_constant PARAMS ((rtx));
268
269 /* Default register names. */
270 char rs6000_reg_names[][8] =
271 {
272 "0", "1", "2", "3", "4", "5", "6", "7",
273 "8", "9", "10", "11", "12", "13", "14", "15",
274 "16", "17", "18", "19", "20", "21", "22", "23",
275 "24", "25", "26", "27", "28", "29", "30", "31",
276 "0", "1", "2", "3", "4", "5", "6", "7",
277 "8", "9", "10", "11", "12", "13", "14", "15",
278 "16", "17", "18", "19", "20", "21", "22", "23",
279 "24", "25", "26", "27", "28", "29", "30", "31",
280 "mq", "lr", "ctr","ap",
281 "0", "1", "2", "3", "4", "5", "6", "7",
282 "xer",
283 /* AltiVec registers. */
284 "0", "1", "2", "3", "4", "5", "6", "7",
285 "8", "9", "10", "11", "12", "13", "14", "15",
286 "16", "17", "18", "19", "20", "21", "22", "23",
287 "24", "25", "26", "27", "28", "29", "30", "31",
288 "vrsave", "vscr",
289 /* SPE registers. */
290 "spe_acc", "spefscr"
291 };
292
293 #ifdef TARGET_REGNAMES
294 static const char alt_reg_names[][8] =
295 {
296 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
297 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
298 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
299 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
300 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
301 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
302 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
303 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
304 "mq", "lr", "ctr", "ap",
305 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
306 "xer",
307 /* AltiVec registers. */
308 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
309 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
310 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
311 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
312 "vrsave", "vscr",
313 /* SPE registers. */
314 "spe_acc", "spefscr"
315 };
316 #endif
317
318 #ifndef MASK_STRICT_ALIGN
319 #define MASK_STRICT_ALIGN 0
320 #endif
321
322 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
323 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
324
325 /* Initialize the GCC target structure. */
326 #undef TARGET_ATTRIBUTE_TABLE
327 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
328 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
329 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
330
331 #undef TARGET_ASM_ALIGNED_DI_OP
332 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
333
334 /* Default unaligned ops are only provided for ELF. Find the ops needed
335 for non-ELF systems. */
336 #ifndef OBJECT_FORMAT_ELF
337 #if TARGET_XCOFF
338 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
339 64-bit targets. */
340 #undef TARGET_ASM_UNALIGNED_HI_OP
341 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
342 #undef TARGET_ASM_UNALIGNED_SI_OP
343 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
344 #undef TARGET_ASM_UNALIGNED_DI_OP
345 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
346 #else
347 /* For Darwin. */
348 #undef TARGET_ASM_UNALIGNED_HI_OP
349 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
350 #undef TARGET_ASM_UNALIGNED_SI_OP
351 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
352 #endif
353 #endif
354
355 /* This hook deals with fixups for relocatable code and DI-mode objects
356 in 64-bit code. */
357 #undef TARGET_ASM_INTEGER
358 #define TARGET_ASM_INTEGER rs6000_assemble_integer
359
360 #ifdef HAVE_GAS_HIDDEN
361 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
362 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
363 #endif
364
365 #undef TARGET_ASM_FUNCTION_PROLOGUE
366 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
367 #undef TARGET_ASM_FUNCTION_EPILOGUE
368 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
369
370 #undef TARGET_SCHED_ISSUE_RATE
371 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
372 #undef TARGET_SCHED_ADJUST_COST
373 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
374 #undef TARGET_SCHED_ADJUST_PRIORITY
375 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
376
377 #undef TARGET_INIT_BUILTINS
378 #define TARGET_INIT_BUILTINS rs6000_init_builtins
379
380 #undef TARGET_EXPAND_BUILTIN
381 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
382
383 #undef TARGET_BINDS_LOCAL_P
384 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
385
386 #undef TARGET_ASM_OUTPUT_MI_THUNK
387 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
388
389 /* ??? Should work everywhere, but ask dje@watson.ibm.com before
390 enabling for AIX. */
391 #if TARGET_OBJECT_FORMAT != OBJECT_XCOFF
392 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
393 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
394 #endif
395
396 struct gcc_target targetm = TARGET_INITIALIZER;
397
398 /* Override command line options. Mostly we process the processor
399 type and sometimes adjust other TARGET_ options. */
400
401 void
rs6000_override_options(default_cpu)402 rs6000_override_options (default_cpu)
403 const char *default_cpu;
404 {
405 size_t i, j;
406 struct rs6000_cpu_select *ptr;
407
408 /* Simplify the entries below by making a mask for any POWER
409 variant and any PowerPC variant. */
410
411 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
412 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
413 | MASK_PPC_GFXOPT | MASK_POWERPC64)
414 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
415
416 static struct ptt
417 {
418 const char *const name; /* Canonical processor name. */
419 const enum processor_type processor; /* Processor type enum value. */
420 const int target_enable; /* Target flags to enable. */
421 const int target_disable; /* Target flags to disable. */
422 } const processor_target_table[]
423 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
424 POWER_MASKS | POWERPC_MASKS},
425 {"power", PROCESSOR_POWER,
426 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
427 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
428 {"power2", PROCESSOR_POWER,
429 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
430 POWERPC_MASKS | MASK_NEW_MNEMONICS},
431 {"power3", PROCESSOR_PPC630,
432 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
433 POWER_MASKS | MASK_PPC_GPOPT},
434 {"power4", PROCESSOR_POWER4,
435 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
436 POWER_MASKS | MASK_PPC_GPOPT},
437 {"powerpc", PROCESSOR_POWERPC,
438 MASK_POWERPC | MASK_NEW_MNEMONICS,
439 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
440 {"powerpc64", PROCESSOR_POWERPC64,
441 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
442 POWER_MASKS | POWERPC_OPT_MASKS},
443 {"rios", PROCESSOR_RIOS1,
444 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
445 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
446 {"rios1", PROCESSOR_RIOS1,
447 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
448 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
449 {"rsc", PROCESSOR_PPC601,
450 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
451 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
452 {"rsc1", PROCESSOR_PPC601,
453 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
454 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
455 {"rios2", PROCESSOR_RIOS2,
456 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
457 POWERPC_MASKS | MASK_NEW_MNEMONICS},
458 {"rs64a", PROCESSOR_RS64A,
459 MASK_POWERPC | MASK_NEW_MNEMONICS,
460 POWER_MASKS | POWERPC_OPT_MASKS},
461 {"401", PROCESSOR_PPC403,
462 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
463 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
464 {"403", PROCESSOR_PPC403,
465 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
466 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
467 {"405", PROCESSOR_PPC405,
468 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
469 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
470 {"505", PROCESSOR_MPCCORE,
471 MASK_POWERPC | MASK_NEW_MNEMONICS,
472 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
473 {"601", PROCESSOR_PPC601,
474 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
475 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
476 {"602", PROCESSOR_PPC603,
477 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
478 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
479 {"603", PROCESSOR_PPC603,
480 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
481 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
482 {"603e", PROCESSOR_PPC603,
483 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
484 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
485 {"ec603e", PROCESSOR_PPC603,
486 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
487 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
488 {"604", PROCESSOR_PPC604,
489 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
490 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
491 {"604e", PROCESSOR_PPC604e,
492 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
493 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
494 {"620", PROCESSOR_PPC620,
495 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
496 POWER_MASKS | MASK_PPC_GPOPT},
497 {"630", PROCESSOR_PPC630,
498 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
499 POWER_MASKS | MASK_PPC_GPOPT},
500 {"740", PROCESSOR_PPC750,
501 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
502 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
503 {"750", PROCESSOR_PPC750,
504 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
505 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
506 {"7400", PROCESSOR_PPC7400,
507 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
508 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
509 {"7450", PROCESSOR_PPC7450,
510 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
511 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
512 {"8540", PROCESSOR_PPC8540,
513 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
514 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
515 {"801", PROCESSOR_MPCCORE,
516 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
517 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
518 {"821", PROCESSOR_MPCCORE,
519 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
520 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
521 {"823", PROCESSOR_MPCCORE,
522 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
523 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
524 {"860", PROCESSOR_MPCCORE,
525 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
526 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
527
528 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
529
530 /* Save current -mmultiple/-mno-multiple status. */
531 int multiple = TARGET_MULTIPLE;
532 /* Save current -mstring/-mno-string status. */
533 int string = TARGET_STRING;
534
535 /* Identify the processor type. */
536 rs6000_select[0].string = default_cpu;
537 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
538
539 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
540 {
541 ptr = &rs6000_select[i];
542 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
543 {
544 for (j = 0; j < ptt_size; j++)
545 if (! strcmp (ptr->string, processor_target_table[j].name))
546 {
547 if (ptr->set_tune_p)
548 rs6000_cpu = processor_target_table[j].processor;
549
550 if (ptr->set_arch_p)
551 {
552 target_flags |= processor_target_table[j].target_enable;
553 target_flags &= ~processor_target_table[j].target_disable;
554 }
555 break;
556 }
557
558 if (j == ptt_size)
559 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
560 }
561 }
562
563 if (rs6000_cpu == PROCESSOR_PPC8540)
564 rs6000_isel = 1;
565
566 /* If we are optimizing big endian systems for space, use the load/store
567 multiple and string instructions. */
568 if (BYTES_BIG_ENDIAN && optimize_size)
569 target_flags |= MASK_MULTIPLE | MASK_STRING;
570
571 /* If -mmultiple or -mno-multiple was explicitly used, don't
572 override with the processor default */
573 if (TARGET_MULTIPLE_SET)
574 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
575
576 /* If -mstring or -mno-string was explicitly used, don't override
577 with the processor default. */
578 if (TARGET_STRING_SET)
579 target_flags = (target_flags & ~MASK_STRING) | string;
580
581 /* Don't allow -mmultiple or -mstring on little endian systems
582 unless the cpu is a 750, because the hardware doesn't support the
583 instructions used in little endian mode, and causes an alignment
584 trap. The 750 does not cause an alignment trap (except when the
585 target is unaligned). */
586
587 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
588 {
589 if (TARGET_MULTIPLE)
590 {
591 target_flags &= ~MASK_MULTIPLE;
592 if (TARGET_MULTIPLE_SET)
593 warning ("-mmultiple is not supported on little endian systems");
594 }
595
596 if (TARGET_STRING)
597 {
598 target_flags &= ~MASK_STRING;
599 if (TARGET_STRING_SET)
600 warning ("-mstring is not supported on little endian systems");
601 }
602 }
603
604 if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
605 {
606 rs6000_flag_pic = flag_pic;
607 flag_pic = 0;
608 }
609
610 /* For Darwin, always silently make -fpic and -fPIC identical. */
611 if (flag_pic == 1 && DEFAULT_ABI == ABI_DARWIN)
612 flag_pic = 2;
613
614 /* Set debug flags */
615 if (rs6000_debug_name)
616 {
617 if (! strcmp (rs6000_debug_name, "all"))
618 rs6000_debug_stack = rs6000_debug_arg = 1;
619 else if (! strcmp (rs6000_debug_name, "stack"))
620 rs6000_debug_stack = 1;
621 else if (! strcmp (rs6000_debug_name, "arg"))
622 rs6000_debug_arg = 1;
623 else
624 error ("unknown -mdebug-%s switch", rs6000_debug_name);
625 }
626
627 if (rs6000_traceback_name)
628 {
629 if (! strncmp (rs6000_traceback_name, "full", 4))
630 rs6000_traceback = traceback_full;
631 else if (! strncmp (rs6000_traceback_name, "part", 4))
632 rs6000_traceback = traceback_part;
633 else if (! strncmp (rs6000_traceback_name, "no", 2))
634 rs6000_traceback = traceback_none;
635 else
636 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
637 rs6000_traceback_name);
638 }
639
640 /* Set size of long double */
641 rs6000_long_double_type_size = 64;
642 if (rs6000_long_double_size_string)
643 {
644 char *tail;
645 int size = strtol (rs6000_long_double_size_string, &tail, 10);
646 if (*tail != '\0' || (size != 64 && size != 128))
647 error ("Unknown switch -mlong-double-%s",
648 rs6000_long_double_size_string);
649 else
650 rs6000_long_double_type_size = size;
651 }
652
653 /* Handle -mabi= options. */
654 rs6000_parse_abi_options ();
655
656 /* Handle -mvrsave= option. */
657 rs6000_parse_vrsave_option ();
658
659 /* Handle -misel= option. */
660 rs6000_parse_isel_option ();
661
662 #ifdef SUBTARGET_OVERRIDE_OPTIONS
663 SUBTARGET_OVERRIDE_OPTIONS;
664 #endif
665 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
666 SUBSUBTARGET_OVERRIDE_OPTIONS;
667 #endif
668
669 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
670 using TARGET_OPTIONS to handle a toggle switch, but we're out of
671 bits in target_flags so TARGET_SWITCHES cannot be used.
672 Assumption here is that rs6000_longcall_switch points into the
673 text of the complete option, rather than being a copy, so we can
674 scan back for the presence or absence of the no- modifier. */
675 if (rs6000_longcall_switch)
676 {
677 const char *base = rs6000_longcall_switch;
678 while (base[-1] != 'm') base--;
679
680 if (*rs6000_longcall_switch != '\0')
681 error ("invalid option `%s'", base);
682 rs6000_default_long_calls = (base[0] != 'n');
683 }
684
685 #ifdef TARGET_REGNAMES
686 /* If the user desires alternate register names, copy in the
687 alternate names now. */
688 if (TARGET_REGNAMES)
689 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
690 #endif
691
692 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
693 If -maix-struct-return or -msvr4-struct-return was explicitly
694 used, don't override with the ABI default. */
695 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
696 {
697 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
698 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
699 else
700 target_flags |= MASK_AIX_STRUCT_RET;
701 }
702
703 if (TARGET_LONG_DOUBLE_128
704 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
705 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
706
707 /* Register global variables with the garbage collector. */
708 rs6000_add_gc_roots ();
709
710 /* Allocate an alias set for register saves & restores from stack. */
711 rs6000_sr_alias_set = new_alias_set ();
712
713 if (TARGET_TOC)
714 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
715
716 /* We can only guarantee the availability of DI pseudo-ops when
717 assembling for 64-bit targets. */
718 if (!TARGET_64BIT)
719 {
720 targetm.asm_out.aligned_op.di = NULL;
721 targetm.asm_out.unaligned_op.di = NULL;
722 }
723
724 /* Arrange to save and restore machine status around nested functions. */
725 init_machine_status = rs6000_init_machine_status;
726 }
727
728 /* Handle -misel= option. */
729 static void
rs6000_parse_isel_option()730 rs6000_parse_isel_option ()
731 {
732 if (rs6000_isel_string == 0)
733 return;
734 else if (! strcmp (rs6000_isel_string, "yes"))
735 rs6000_isel = 1;
736 else if (! strcmp (rs6000_isel_string, "no"))
737 rs6000_isel = 0;
738 else
739 error ("unknown -misel= option specified: '%s'",
740 rs6000_isel_string);
741 }
742
743 /* Handle -mvrsave= options. */
744 static void
rs6000_parse_vrsave_option()745 rs6000_parse_vrsave_option ()
746 {
747 /* Generate VRSAVE instructions by default. */
748 if (rs6000_altivec_vrsave_string == 0
749 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
750 rs6000_altivec_vrsave = 1;
751 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
752 rs6000_altivec_vrsave = 0;
753 else
754 error ("unknown -mvrsave= option specified: '%s'",
755 rs6000_altivec_vrsave_string);
756 }
757
758 /* Handle -mabi= options. */
759 static void
rs6000_parse_abi_options()760 rs6000_parse_abi_options ()
761 {
762 if (rs6000_abi_string == 0)
763 return;
764 else if (! strcmp (rs6000_abi_string, "altivec"))
765 rs6000_altivec_abi = 1;
766 else if (! strcmp (rs6000_abi_string, "no-altivec"))
767 rs6000_altivec_abi = 0;
768 else if (! strcmp (rs6000_abi_string, "spe"))
769 rs6000_spe_abi = 1;
770 else if (! strcmp (rs6000_abi_string, "no-spe"))
771 rs6000_spe_abi = 0;
772 else
773 error ("unknown ABI specified: '%s'", rs6000_abi_string);
774 }
775
776 void
optimization_options(level,size)777 optimization_options (level, size)
778 int level ATTRIBUTE_UNUSED;
779 int size ATTRIBUTE_UNUSED;
780 {
781 }
782
783 /* Do anything needed at the start of the asm file. */
784
785 void
rs6000_file_start(file,default_cpu)786 rs6000_file_start (file, default_cpu)
787 FILE *file;
788 const char *default_cpu;
789 {
790 size_t i;
791 char buffer[80];
792 const char *start = buffer;
793 struct rs6000_cpu_select *ptr;
794
795 if (flag_verbose_asm)
796 {
797 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
798 rs6000_select[0].string = default_cpu;
799
800 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
801 {
802 ptr = &rs6000_select[i];
803 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
804 {
805 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
806 start = "";
807 }
808 }
809
810 #ifdef USING_ELFOS_H
811 switch (rs6000_sdata)
812 {
813 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
814 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
815 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
816 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
817 }
818
819 if (rs6000_sdata && g_switch_value)
820 {
821 fprintf (file, "%s -G %d", start, g_switch_value);
822 start = "";
823 }
824 #endif
825
826 if (*start == '\0')
827 putc ('\n', file);
828 }
829 }
830
831 /* Return nonzero if this function is known to have a null epilogue. */
832
833 int
direct_return()834 direct_return ()
835 {
836 if (reload_completed)
837 {
838 rs6000_stack_t *info = rs6000_stack_info ();
839
840 if (info->first_gp_reg_save == 32
841 && info->first_fp_reg_save == 64
842 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
843 && ! info->lr_save_p
844 && ! info->cr_save_p
845 && info->vrsave_mask == 0
846 && ! info->push_p)
847 return 1;
848 }
849
850 return 0;
851 }
852
853 /* Returns 1 always. */
854
855 int
any_operand(op,mode)856 any_operand (op, mode)
857 rtx op ATTRIBUTE_UNUSED;
858 enum machine_mode mode ATTRIBUTE_UNUSED;
859 {
860 return 1;
861 }
862
863 /* Returns 1 if op is the count register. */
864 int
count_register_operand(op,mode)865 count_register_operand (op, mode)
866 rtx op;
867 enum machine_mode mode ATTRIBUTE_UNUSED;
868 {
869 if (GET_CODE (op) != REG)
870 return 0;
871
872 if (REGNO (op) == COUNT_REGISTER_REGNUM)
873 return 1;
874
875 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
876 return 1;
877
878 return 0;
879 }
880
881 /* Returns 1 if op is an altivec register. */
882 int
altivec_register_operand(op,mode)883 altivec_register_operand (op, mode)
884 rtx op;
885 enum machine_mode mode ATTRIBUTE_UNUSED;
886 {
887
888 return (register_operand (op, mode)
889 && (GET_CODE (op) != REG
890 || REGNO (op) > FIRST_PSEUDO_REGISTER
891 || ALTIVEC_REGNO_P (REGNO (op))));
892 }
893
894 int
xer_operand(op,mode)895 xer_operand (op, mode)
896 rtx op;
897 enum machine_mode mode ATTRIBUTE_UNUSED;
898 {
899 if (GET_CODE (op) != REG)
900 return 0;
901
902 if (XER_REGNO_P (REGNO (op)))
903 return 1;
904
905 return 0;
906 }
907
908 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
909 by such constants completes more quickly. */
910
911 int
s8bit_cint_operand(op,mode)912 s8bit_cint_operand (op, mode)
913 rtx op;
914 enum machine_mode mode ATTRIBUTE_UNUSED;
915 {
916 return ( GET_CODE (op) == CONST_INT
917 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
918 }
919
920 /* Return 1 if OP is a constant that can fit in a D field. */
921
922 int
short_cint_operand(op,mode)923 short_cint_operand (op, mode)
924 rtx op;
925 enum machine_mode mode ATTRIBUTE_UNUSED;
926 {
927 return (GET_CODE (op) == CONST_INT
928 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
929 }
930
931 /* Similar for an unsigned D field. */
932
933 int
u_short_cint_operand(op,mode)934 u_short_cint_operand (op, mode)
935 rtx op;
936 enum machine_mode mode ATTRIBUTE_UNUSED;
937 {
938 return (GET_CODE (op) == CONST_INT
939 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
940 }
941
942 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
943
944 int
non_short_cint_operand(op,mode)945 non_short_cint_operand (op, mode)
946 rtx op;
947 enum machine_mode mode ATTRIBUTE_UNUSED;
948 {
949 return (GET_CODE (op) == CONST_INT
950 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
951 }
952
953 /* Returns 1 if OP is a CONST_INT that is a positive value
954 and an exact power of 2. */
955
956 int
exact_log2_cint_operand(op,mode)957 exact_log2_cint_operand (op, mode)
958 rtx op;
959 enum machine_mode mode ATTRIBUTE_UNUSED;
960 {
961 return (GET_CODE (op) == CONST_INT
962 && INTVAL (op) > 0
963 && exact_log2 (INTVAL (op)) >= 0);
964 }
965
966 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
967 ctr, or lr). */
968
969 int
gpc_reg_operand(op,mode)970 gpc_reg_operand (op, mode)
971 rtx op;
972 enum machine_mode mode;
973 {
974 return (register_operand (op, mode)
975 && (GET_CODE (op) != REG
976 || (REGNO (op) >= ARG_POINTER_REGNUM
977 && !XER_REGNO_P (REGNO (op)))
978 || REGNO (op) < MQ_REGNO));
979 }
980
981 /* Returns 1 if OP is either a pseudo-register or a register denoting a
982 CR field. */
983
984 int
cc_reg_operand(op,mode)985 cc_reg_operand (op, mode)
986 rtx op;
987 enum machine_mode mode;
988 {
989 return (register_operand (op, mode)
990 && (GET_CODE (op) != REG
991 || REGNO (op) >= FIRST_PSEUDO_REGISTER
992 || CR_REGNO_P (REGNO (op))));
993 }
994
995 /* Returns 1 if OP is either a pseudo-register or a register denoting a
996 CR field that isn't CR0. */
997
998 int
cc_reg_not_cr0_operand(op,mode)999 cc_reg_not_cr0_operand (op, mode)
1000 rtx op;
1001 enum machine_mode mode;
1002 {
1003 return (register_operand (op, mode)
1004 && (GET_CODE (op) != REG
1005 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1006 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1007 }
1008
1009 /* Returns 1 if OP is either a constant integer valid for a D-field or
1010 a non-special register. If a register, it must be in the proper
1011 mode unless MODE is VOIDmode. */
1012
1013 int
reg_or_short_operand(op,mode)1014 reg_or_short_operand (op, mode)
1015 rtx op;
1016 enum machine_mode mode;
1017 {
1018 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1019 }
1020
1021 /* Similar, except check if the negation of the constant would be
1022 valid for a D-field. */
1023
1024 int
reg_or_neg_short_operand(op,mode)1025 reg_or_neg_short_operand (op, mode)
1026 rtx op;
1027 enum machine_mode mode;
1028 {
1029 if (GET_CODE (op) == CONST_INT)
1030 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1031
1032 return gpc_reg_operand (op, mode);
1033 }
1034
1035 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1036 a non-special register. If a register, it must be in the proper
1037 mode unless MODE is VOIDmode. */
1038
1039 int
reg_or_aligned_short_operand(op,mode)1040 reg_or_aligned_short_operand (op, mode)
1041 rtx op;
1042 enum machine_mode mode;
1043 {
1044 if (gpc_reg_operand (op, mode))
1045 return 1;
1046 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1047 return 1;
1048
1049 return 0;
1050 }
1051
1052
1053 /* Return 1 if the operand is either a register or an integer whose
1054 high-order 16 bits are zero. */
1055
1056 int
reg_or_u_short_operand(op,mode)1057 reg_or_u_short_operand (op, mode)
1058 rtx op;
1059 enum machine_mode mode;
1060 {
1061 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1062 }
1063
1064 /* Return 1 is the operand is either a non-special register or ANY
1065 constant integer. */
1066
1067 int
reg_or_cint_operand(op,mode)1068 reg_or_cint_operand (op, mode)
1069 rtx op;
1070 enum machine_mode mode;
1071 {
1072 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1073 }
1074
1075 /* Return 1 is the operand is either a non-special register or ANY
1076 32-bit signed constant integer. */
1077
1078 int
reg_or_arith_cint_operand(op,mode)1079 reg_or_arith_cint_operand (op, mode)
1080 rtx op;
1081 enum machine_mode mode;
1082 {
1083 return (gpc_reg_operand (op, mode)
1084 || (GET_CODE (op) == CONST_INT
1085 #if HOST_BITS_PER_WIDE_INT != 32
1086 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1087 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1088 #endif
1089 ));
1090 }
1091
1092 /* Return 1 is the operand is either a non-special register or a 32-bit
1093 signed constant integer valid for 64-bit addition. */
1094
1095 int
reg_or_add_cint64_operand(op,mode)1096 reg_or_add_cint64_operand (op, mode)
1097 rtx op;
1098 enum machine_mode mode;
1099 {
1100 return (gpc_reg_operand (op, mode)
1101 || (GET_CODE (op) == CONST_INT
1102 #if HOST_BITS_PER_WIDE_INT == 32
1103 && INTVAL (op) < 0x7fff8000
1104 #else
1105 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1106 < 0x100000000ll)
1107 #endif
1108 ));
1109 }
1110
1111 /* Return 1 is the operand is either a non-special register or a 32-bit
1112 signed constant integer valid for 64-bit subtraction. */
1113
1114 int
reg_or_sub_cint64_operand(op,mode)1115 reg_or_sub_cint64_operand (op, mode)
1116 rtx op;
1117 enum machine_mode mode;
1118 {
1119 return (gpc_reg_operand (op, mode)
1120 || (GET_CODE (op) == CONST_INT
1121 #if HOST_BITS_PER_WIDE_INT == 32
1122 && (- INTVAL (op)) < 0x7fff8000
1123 #else
1124 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1125 < 0x100000000ll)
1126 #endif
1127 ));
1128 }
1129
1130 /* Return 1 is the operand is either a non-special register or ANY
1131 32-bit unsigned constant integer. */
1132
1133 int
reg_or_logical_cint_operand(op,mode)1134 reg_or_logical_cint_operand (op, mode)
1135 rtx op;
1136 enum machine_mode mode;
1137 {
1138 if (GET_CODE (op) == CONST_INT)
1139 {
1140 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1141 {
1142 if (GET_MODE_BITSIZE (mode) <= 32)
1143 abort ();
1144
1145 if (INTVAL (op) < 0)
1146 return 0;
1147 }
1148
1149 return ((INTVAL (op) & GET_MODE_MASK (mode)
1150 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1151 }
1152 else if (GET_CODE (op) == CONST_DOUBLE)
1153 {
1154 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1155 || mode != DImode)
1156 abort ();
1157
1158 return CONST_DOUBLE_HIGH (op) == 0;
1159 }
1160 else
1161 return gpc_reg_operand (op, mode);
1162 }
1163
1164 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1165
1166 int
got_operand(op,mode)1167 got_operand (op, mode)
1168 rtx op;
1169 enum machine_mode mode ATTRIBUTE_UNUSED;
1170 {
1171 return (GET_CODE (op) == SYMBOL_REF
1172 || GET_CODE (op) == CONST
1173 || GET_CODE (op) == LABEL_REF);
1174 }
1175
1176 /* Return 1 if the operand is a simple references that can be loaded via
1177 the GOT (labels involving addition aren't allowed). */
1178
1179 int
got_no_const_operand(op,mode)1180 got_no_const_operand (op, mode)
1181 rtx op;
1182 enum machine_mode mode ATTRIBUTE_UNUSED;
1183 {
1184 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1185 }
1186
1187 /* Return the number of instructions it takes to form a constant in an
1188 integer register. */
1189
1190 static int
num_insns_constant_wide(value)1191 num_insns_constant_wide (value)
1192 HOST_WIDE_INT value;
1193 {
1194 /* signed constant loadable with {cal|addi} */
1195 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1196 return 1;
1197
1198 /* constant loadable with {cau|addis} */
1199 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1200 return 1;
1201
1202 #if HOST_BITS_PER_WIDE_INT == 64
1203 else if (TARGET_POWERPC64)
1204 {
1205 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1206 HOST_WIDE_INT high = value >> 31;
1207
1208 if (high == 0 || high == -1)
1209 return 2;
1210
1211 high >>= 1;
1212
1213 if (low == 0)
1214 return num_insns_constant_wide (high) + 1;
1215 else
1216 return (num_insns_constant_wide (high)
1217 + num_insns_constant_wide (low) + 1);
1218 }
1219 #endif
1220
1221 else
1222 return 2;
1223 }
1224
1225 int
num_insns_constant(op,mode)1226 num_insns_constant (op, mode)
1227 rtx op;
1228 enum machine_mode mode;
1229 {
1230 if (GET_CODE (op) == CONST_INT)
1231 {
1232 #if HOST_BITS_PER_WIDE_INT == 64
1233 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1234 && mask64_operand (op, mode))
1235 return 2;
1236 else
1237 #endif
1238 return num_insns_constant_wide (INTVAL (op));
1239 }
1240
1241 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1242 {
1243 long l;
1244 REAL_VALUE_TYPE rv;
1245
1246 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1247 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1248 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1249 }
1250
1251 else if (GET_CODE (op) == CONST_DOUBLE)
1252 {
1253 HOST_WIDE_INT low;
1254 HOST_WIDE_INT high;
1255 long l[2];
1256 REAL_VALUE_TYPE rv;
1257 int endian = (WORDS_BIG_ENDIAN == 0);
1258
1259 if (mode == VOIDmode || mode == DImode)
1260 {
1261 high = CONST_DOUBLE_HIGH (op);
1262 low = CONST_DOUBLE_LOW (op);
1263 }
1264 else
1265 {
1266 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1267 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1268 high = l[endian];
1269 low = l[1 - endian];
1270 }
1271
1272 if (TARGET_32BIT)
1273 return (num_insns_constant_wide (low)
1274 + num_insns_constant_wide (high));
1275
1276 else
1277 {
1278 if (high == 0 && low >= 0)
1279 return num_insns_constant_wide (low);
1280
1281 else if (high == -1 && low < 0)
1282 return num_insns_constant_wide (low);
1283
1284 else if (mask64_operand (op, mode))
1285 return 2;
1286
1287 else if (low == 0)
1288 return num_insns_constant_wide (high) + 1;
1289
1290 else
1291 return (num_insns_constant_wide (high)
1292 + num_insns_constant_wide (low) + 1);
1293 }
1294 }
1295
1296 else
1297 abort ();
1298 }
1299
1300 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1301 register with one instruction per word. We only do this if we can
1302 safely read CONST_DOUBLE_{LOW,HIGH}. */
1303
1304 int
easy_fp_constant(op,mode)1305 easy_fp_constant (op, mode)
1306 rtx op;
1307 enum machine_mode mode;
1308 {
1309 if (GET_CODE (op) != CONST_DOUBLE
1310 || GET_MODE (op) != mode
1311 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1312 return 0;
1313
1314 /* Consider all constants with -msoft-float to be easy. */
1315 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1316 && mode != DImode)
1317 return 1;
1318
1319 /* If we are using V.4 style PIC, consider all constants to be hard. */
1320 if (flag_pic && DEFAULT_ABI == ABI_V4)
1321 return 0;
1322
1323 #ifdef TARGET_RELOCATABLE
1324 /* Similarly if we are using -mrelocatable, consider all constants
1325 to be hard. */
1326 if (TARGET_RELOCATABLE)
1327 return 0;
1328 #endif
1329
1330 if (mode == TFmode)
1331 {
1332 long k[4];
1333 REAL_VALUE_TYPE rv;
1334
1335 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1336 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1337
1338 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1339 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1340 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1341 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1342 }
1343
1344 else if (mode == DFmode)
1345 {
1346 long k[2];
1347 REAL_VALUE_TYPE rv;
1348
1349 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1350 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1351
1352 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1353 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1354 }
1355
1356 else if (mode == SFmode)
1357 {
1358 long l;
1359 REAL_VALUE_TYPE rv;
1360
1361 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1362 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1363
1364 return num_insns_constant_wide (l) == 1;
1365 }
1366
1367 else if (mode == DImode)
1368 return ((TARGET_POWERPC64
1369 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1370 || (num_insns_constant (op, DImode) <= 2));
1371
1372 else if (mode == SImode)
1373 return 1;
1374 else
1375 abort ();
1376 }
1377
1378 /* Return 1 if the operand is a CONST_INT and can be put into a
1379 register with one instruction. */
1380
1381 static int
easy_vector_constant(op)1382 easy_vector_constant (op)
1383 rtx op;
1384 {
1385 rtx elt;
1386 int units, i;
1387
1388 if (GET_CODE (op) != CONST_VECTOR)
1389 return 0;
1390
1391 units = CONST_VECTOR_NUNITS (op);
1392
1393 /* We can generate 0 easily. Look for that. */
1394 for (i = 0; i < units; ++i)
1395 {
1396 elt = CONST_VECTOR_ELT (op, i);
1397
1398 /* We could probably simplify this by just checking for equality
1399 with CONST0_RTX for the current mode, but let's be safe
1400 instead. */
1401
1402 switch (GET_CODE (elt))
1403 {
1404 case CONST_INT:
1405 if (INTVAL (elt) != 0)
1406 return 0;
1407 break;
1408 case CONST_DOUBLE:
1409 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1410 return 0;
1411 break;
1412 default:
1413 return 0;
1414 }
1415 }
1416
1417 /* We could probably generate a few other constants trivially, but
1418 gcc doesn't generate them yet. FIXME later. */
1419 return 1;
1420 }
1421
1422 /* Return 1 if the operand is the constant 0. This works for scalars
1423 as well as vectors. */
1424 int
zero_constant(op,mode)1425 zero_constant (op, mode)
1426 rtx op;
1427 enum machine_mode mode;
1428 {
1429 return op == CONST0_RTX (mode);
1430 }
1431
1432 /* Return 1 if the operand is 0.0. */
1433 int
zero_fp_constant(op,mode)1434 zero_fp_constant (op, mode)
1435 rtx op;
1436 enum machine_mode mode;
1437 {
1438 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1439 }
1440
1441 /* Return 1 if the operand is in volatile memory. Note that during
1442 the RTL generation phase, memory_operand does not return TRUE for
1443 volatile memory references. So this function allows us to
1444 recognize volatile references where its safe. */
1445
1446 int
volatile_mem_operand(op,mode)1447 volatile_mem_operand (op, mode)
1448 rtx op;
1449 enum machine_mode mode;
1450 {
1451 if (GET_CODE (op) != MEM)
1452 return 0;
1453
1454 if (!MEM_VOLATILE_P (op))
1455 return 0;
1456
1457 if (mode != GET_MODE (op))
1458 return 0;
1459
1460 if (reload_completed)
1461 return memory_operand (op, mode);
1462
1463 if (reload_in_progress)
1464 return strict_memory_address_p (mode, XEXP (op, 0));
1465
1466 return memory_address_p (mode, XEXP (op, 0));
1467 }
1468
1469 /* Return 1 if the operand is an offsettable memory operand. */
1470
1471 int
offsettable_mem_operand(op,mode)1472 offsettable_mem_operand (op, mode)
1473 rtx op;
1474 enum machine_mode mode;
1475 {
1476 return ((GET_CODE (op) == MEM)
1477 && offsettable_address_p (reload_completed || reload_in_progress,
1478 mode, XEXP (op, 0)));
1479 }
1480
1481 /* Return 1 if the operand is either an easy FP constant (see above) or
1482 memory. */
1483
1484 int
mem_or_easy_const_operand(op,mode)1485 mem_or_easy_const_operand (op, mode)
1486 rtx op;
1487 enum machine_mode mode;
1488 {
1489 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1490 }
1491
1492 /* Return 1 if the operand is either a non-special register or an item
1493 that can be used as the operand of a `mode' add insn. */
1494
1495 int
add_operand(op,mode)1496 add_operand (op, mode)
1497 rtx op;
1498 enum machine_mode mode;
1499 {
1500 if (GET_CODE (op) == CONST_INT)
1501 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1502 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1503
1504 return gpc_reg_operand (op, mode);
1505 }
1506
1507 /* Return 1 if OP is a constant but not a valid add_operand. */
1508
1509 int
non_add_cint_operand(op,mode)1510 non_add_cint_operand (op, mode)
1511 rtx op;
1512 enum machine_mode mode ATTRIBUTE_UNUSED;
1513 {
1514 return (GET_CODE (op) == CONST_INT
1515 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1516 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1517 }
1518
1519 /* Return 1 if the operand is a non-special register or a constant that
1520 can be used as the operand of an OR or XOR insn on the RS/6000. */
1521
1522 int
logical_operand(op,mode)1523 logical_operand (op, mode)
1524 rtx op;
1525 enum machine_mode mode;
1526 {
1527 HOST_WIDE_INT opl, oph;
1528
1529 if (gpc_reg_operand (op, mode))
1530 return 1;
1531
1532 if (GET_CODE (op) == CONST_INT)
1533 {
1534 opl = INTVAL (op) & GET_MODE_MASK (mode);
1535
1536 #if HOST_BITS_PER_WIDE_INT <= 32
1537 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1538 return 0;
1539 #endif
1540 }
1541 else if (GET_CODE (op) == CONST_DOUBLE)
1542 {
1543 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1544 abort ();
1545
1546 opl = CONST_DOUBLE_LOW (op);
1547 oph = CONST_DOUBLE_HIGH (op);
1548 if (oph != 0)
1549 return 0;
1550 }
1551 else
1552 return 0;
1553
1554 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1555 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1556 }
1557
1558 /* Return 1 if C is a constant that is not a logical operand (as
1559 above), but could be split into one. */
1560
1561 int
non_logical_cint_operand(op,mode)1562 non_logical_cint_operand (op, mode)
1563 rtx op;
1564 enum machine_mode mode;
1565 {
1566 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1567 && ! logical_operand (op, mode)
1568 && reg_or_logical_cint_operand (op, mode));
1569 }
1570
1571 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1572 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1573 Reject all ones and all zeros, since these should have been optimized
1574 away and confuse the making of MB and ME. */
1575
1576 int
mask_operand(op,mode)1577 mask_operand (op, mode)
1578 rtx op;
1579 enum machine_mode mode ATTRIBUTE_UNUSED;
1580 {
1581 HOST_WIDE_INT c, lsb;
1582
1583 if (GET_CODE (op) != CONST_INT)
1584 return 0;
1585
1586 c = INTVAL (op);
1587
1588 /* Fail in 64-bit mode if the mask wraps around because the upper
1589 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1590 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1591 return 0;
1592
1593 /* We don't change the number of transitions by inverting,
1594 so make sure we start with the LS bit zero. */
1595 if (c & 1)
1596 c = ~c;
1597
1598 /* Reject all zeros or all ones. */
1599 if (c == 0)
1600 return 0;
1601
1602 /* Find the first transition. */
1603 lsb = c & -c;
1604
1605 /* Invert to look for a second transition. */
1606 c = ~c;
1607
1608 /* Erase first transition. */
1609 c &= -lsb;
1610
1611 /* Find the second transition (if any). */
1612 lsb = c & -c;
1613
1614 /* Match if all the bits above are 1's (or c is zero). */
1615 return c == -lsb;
1616 }
1617
1618 /* Return 1 for the PowerPC64 rlwinm corner case. */
1619
1620 int
mask_operand_wrap(op,mode)1621 mask_operand_wrap (op, mode)
1622 rtx op;
1623 enum machine_mode mode ATTRIBUTE_UNUSED;
1624 {
1625 HOST_WIDE_INT c, lsb;
1626
1627 if (GET_CODE (op) != CONST_INT)
1628 return 0;
1629
1630 c = INTVAL (op);
1631
1632 if ((c & 0x80000001) != 0x80000001)
1633 return 0;
1634
1635 c = ~c;
1636 if (c == 0)
1637 return 0;
1638
1639 lsb = c & -c;
1640 c = ~c;
1641 c &= -lsb;
1642 lsb = c & -c;
1643 return c == -lsb;
1644 }
1645
1646 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1647 It is if there are no more than one 1->0 or 0->1 transitions.
1648 Reject all zeros, since zero should have been optimized away and
1649 confuses the making of MB and ME. */
1650
1651 int
mask64_operand(op,mode)1652 mask64_operand (op, mode)
1653 rtx op;
1654 enum machine_mode mode ATTRIBUTE_UNUSED;
1655 {
1656 if (GET_CODE (op) == CONST_INT)
1657 {
1658 HOST_WIDE_INT c, lsb;
1659
1660 c = INTVAL (op);
1661
1662 /* Reject all zeros. */
1663 if (c == 0)
1664 return 0;
1665
1666 /* We don't change the number of transitions by inverting,
1667 so make sure we start with the LS bit zero. */
1668 if (c & 1)
1669 c = ~c;
1670
1671 /* Find the transition, and check that all bits above are 1's. */
1672 lsb = c & -c;
1673 return c == -lsb;
1674 }
1675 return 0;
1676 }
1677
1678 /* Like mask64_operand, but allow up to three transitions. This
1679 predicate is used by insn patterns that generate two rldicl or
1680 rldicr machine insns. */
1681
1682 int
mask64_2_operand(op,mode)1683 mask64_2_operand (op, mode)
1684 rtx op;
1685 enum machine_mode mode ATTRIBUTE_UNUSED;
1686 {
1687 if (GET_CODE (op) == CONST_INT)
1688 {
1689 HOST_WIDE_INT c, lsb;
1690
1691 c = INTVAL (op);
1692
1693 /* Disallow all zeros. */
1694 if (c == 0)
1695 return 0;
1696
1697 /* We don't change the number of transitions by inverting,
1698 so make sure we start with the LS bit zero. */
1699 if (c & 1)
1700 c = ~c;
1701
1702 /* Find the first transition. */
1703 lsb = c & -c;
1704
1705 /* Invert to look for a second transition. */
1706 c = ~c;
1707
1708 /* Erase first transition. */
1709 c &= -lsb;
1710
1711 /* Find the second transition. */
1712 lsb = c & -c;
1713
1714 /* Invert to look for a third transition. */
1715 c = ~c;
1716
1717 /* Erase second transition. */
1718 c &= -lsb;
1719
1720 /* Find the third transition (if any). */
1721 lsb = c & -c;
1722
1723 /* Match if all the bits above are 1's (or c is zero). */
1724 return c == -lsb;
1725 }
1726 return 0;
1727 }
1728
1729 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1730 implement ANDing by the mask IN. */
1731 void
build_mask64_2_operands(in,out)1732 build_mask64_2_operands (in, out)
1733 rtx in;
1734 rtx *out;
1735 {
1736 #if HOST_BITS_PER_WIDE_INT >= 64
1737 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1738 int shift;
1739
1740 if (GET_CODE (in) != CONST_INT)
1741 abort ();
1742
1743 c = INTVAL (in);
1744 if (c & 1)
1745 {
1746 /* Assume c initially something like 0x00fff000000fffff. The idea
1747 is to rotate the word so that the middle ^^^^^^ group of zeros
1748 is at the MS end and can be cleared with an rldicl mask. We then
1749 rotate back and clear off the MS ^^ group of zeros with a
1750 second rldicl. */
1751 c = ~c; /* c == 0xff000ffffff00000 */
1752 lsb = c & -c; /* lsb == 0x0000000000100000 */
1753 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1754 c = ~c; /* c == 0x00fff000000fffff */
1755 c &= -lsb; /* c == 0x00fff00000000000 */
1756 lsb = c & -c; /* lsb == 0x0000100000000000 */
1757 c = ~c; /* c == 0xff000fffffffffff */
1758 c &= -lsb; /* c == 0xff00000000000000 */
1759 shift = 0;
1760 while ((lsb >>= 1) != 0)
1761 shift++; /* shift == 44 on exit from loop */
1762 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1763 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1764 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1765 }
1766 else
1767 {
1768 /* Assume c initially something like 0xff000f0000000000. The idea
1769 is to rotate the word so that the ^^^ middle group of zeros
1770 is at the LS end and can be cleared with an rldicr mask. We then
1771 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1772 a second rldicr. */
1773 lsb = c & -c; /* lsb == 0x0000010000000000 */
1774 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1775 c = ~c; /* c == 0x00fff0ffffffffff */
1776 c &= -lsb; /* c == 0x00fff00000000000 */
1777 lsb = c & -c; /* lsb == 0x0000100000000000 */
1778 c = ~c; /* c == 0xff000fffffffffff */
1779 c &= -lsb; /* c == 0xff00000000000000 */
1780 shift = 0;
1781 while ((lsb >>= 1) != 0)
1782 shift++; /* shift == 44 on exit from loop */
1783 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1784 m1 >>= shift; /* m1 == 0x0000000000000fff */
1785 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1786 }
1787
1788 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1789 masks will be all 1's. We are guaranteed more than one transition. */
1790 out[0] = GEN_INT (64 - shift);
1791 out[1] = GEN_INT (m1);
1792 out[2] = GEN_INT (shift);
1793 out[3] = GEN_INT (m2);
1794 #else
1795 (void)in;
1796 (void)out;
1797 abort ();
1798 #endif
1799 }
1800
1801 /* Return 1 if the operand is either a non-special register or a constant
1802 that can be used as the operand of a PowerPC64 logical AND insn. */
1803
1804 int
and64_operand(op,mode)1805 and64_operand (op, mode)
1806 rtx op;
1807 enum machine_mode mode;
1808 {
1809 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1810 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1811
1812 return (logical_operand (op, mode) || mask64_operand (op, mode));
1813 }
1814
1815 /* Like the above, but also match constants that can be implemented
1816 with two rldicl or rldicr insns. */
1817
1818 int
and64_2_operand(op,mode)1819 and64_2_operand (op, mode)
1820 rtx op;
1821 enum machine_mode mode;
1822 {
1823 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1824 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
1825
1826 return logical_operand (op, mode) || mask64_2_operand (op, mode);
1827 }
1828
1829 /* Return 1 if the operand is either a non-special register or a
1830 constant that can be used as the operand of an RS/6000 logical AND insn. */
1831
1832 int
and_operand(op,mode)1833 and_operand (op, mode)
1834 rtx op;
1835 enum machine_mode mode;
1836 {
1837 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1838 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1839
1840 return (logical_operand (op, mode) || mask_operand (op, mode));
1841 }
1842
1843 /* Return 1 if the operand is a general register or memory operand. */
1844
1845 int
reg_or_mem_operand(op,mode)1846 reg_or_mem_operand (op, mode)
1847 rtx op;
1848 enum machine_mode mode;
1849 {
1850 return (gpc_reg_operand (op, mode)
1851 || memory_operand (op, mode)
1852 || volatile_mem_operand (op, mode));
1853 }
1854
1855 /* Return 1 if the operand is a general register or memory operand without
1856 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1857 instruction. */
1858
1859 int
lwa_operand(op,mode)1860 lwa_operand (op, mode)
1861 rtx op;
1862 enum machine_mode mode;
1863 {
1864 rtx inner = op;
1865
1866 if (reload_completed && GET_CODE (inner) == SUBREG)
1867 inner = SUBREG_REG (inner);
1868
1869 return gpc_reg_operand (inner, mode)
1870 || (memory_operand (inner, mode)
1871 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1872 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1873 && (GET_CODE (XEXP (inner, 0)) != PLUS
1874 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1875 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1876 }
1877
1878 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1879
1880 int
symbol_ref_operand(op,mode)1881 symbol_ref_operand (op, mode)
1882 rtx op;
1883 enum machine_mode mode;
1884 {
1885 if (mode != VOIDmode && GET_MODE (op) != mode)
1886 return 0;
1887
1888 return (GET_CODE (op) == SYMBOL_REF);
1889 }
1890
1891 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1892 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1893
1894 int
call_operand(op,mode)1895 call_operand (op, mode)
1896 rtx op;
1897 enum machine_mode mode;
1898 {
1899 if (mode != VOIDmode && GET_MODE (op) != mode)
1900 return 0;
1901
1902 return (GET_CODE (op) == SYMBOL_REF
1903 || (GET_CODE (op) == REG
1904 && (REGNO (op) == LINK_REGISTER_REGNUM
1905 || REGNO (op) == COUNT_REGISTER_REGNUM
1906 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1907 }
1908
1909 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1910 this file and the function is not weakly defined. */
1911
1912 int
current_file_function_operand(op,mode)1913 current_file_function_operand (op, mode)
1914 rtx op;
1915 enum machine_mode mode ATTRIBUTE_UNUSED;
1916 {
1917 return (GET_CODE (op) == SYMBOL_REF
1918 && (SYMBOL_REF_FLAG (op)
1919 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1920 && ! DECL_WEAK (current_function_decl))));
1921 }
1922
1923 /* Return 1 if this operand is a valid input for a move insn. */
1924
1925 int
input_operand(op,mode)1926 input_operand (op, mode)
1927 rtx op;
1928 enum machine_mode mode;
1929 {
1930 /* Memory is always valid. */
1931 if (memory_operand (op, mode))
1932 return 1;
1933
1934 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1935 if (GET_CODE (op) == CONSTANT_P_RTX)
1936 return 1;
1937
1938 /* For floating-point, easy constants are valid. */
1939 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1940 && CONSTANT_P (op)
1941 && easy_fp_constant (op, mode))
1942 return 1;
1943
1944 /* Allow any integer constant. */
1945 if (GET_MODE_CLASS (mode) == MODE_INT
1946 && (GET_CODE (op) == CONST_INT
1947 || GET_CODE (op) == CONST_DOUBLE))
1948 return 1;
1949
1950 /* For floating-point or multi-word mode, the only remaining valid type
1951 is a register. */
1952 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1953 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1954 return register_operand (op, mode);
1955
1956 /* The only cases left are integral modes one word or smaller (we
1957 do not get called for MODE_CC values). These can be in any
1958 register. */
1959 if (register_operand (op, mode))
1960 return 1;
1961
1962 /* A SYMBOL_REF referring to the TOC is valid. */
1963 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1964 return 1;
1965
1966 /* A constant pool expression (relative to the TOC) is valid */
1967 if (TOC_RELATIVE_EXPR_P (op))
1968 return 1;
1969
1970 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1971 to be valid. */
1972 if (DEFAULT_ABI == ABI_V4
1973 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1974 && small_data_operand (op, Pmode))
1975 return 1;
1976
1977 return 0;
1978 }
1979
1980 /* Return 1 for an operand in small memory on V.4/eabi. */
1981
1982 int
small_data_operand(op,mode)1983 small_data_operand (op, mode)
1984 rtx op ATTRIBUTE_UNUSED;
1985 enum machine_mode mode ATTRIBUTE_UNUSED;
1986 {
1987 #if TARGET_ELF
1988 rtx sym_ref;
1989
1990 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1991 return 0;
1992
1993 if (DEFAULT_ABI != ABI_V4)
1994 return 0;
1995
1996 if (GET_CODE (op) == SYMBOL_REF)
1997 sym_ref = op;
1998
1999 else if (GET_CODE (op) != CONST
2000 || GET_CODE (XEXP (op, 0)) != PLUS
2001 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2002 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2003 return 0;
2004
2005 else
2006 {
2007 rtx sum = XEXP (op, 0);
2008 HOST_WIDE_INT summand;
2009
2010 /* We have to be careful here, because it is the referenced address
2011 that must be 32k from _SDA_BASE_, not just the symbol. */
2012 summand = INTVAL (XEXP (sum, 1));
2013 if (summand < 0 || summand > g_switch_value)
2014 return 0;
2015
2016 sym_ref = XEXP (sum, 0);
2017 }
2018
2019 if (*XSTR (sym_ref, 0) != '@')
2020 return 0;
2021
2022 return 1;
2023
2024 #else
2025 return 0;
2026 #endif
2027 }
2028
2029 static int
constant_pool_expr_1(op,have_sym,have_toc)2030 constant_pool_expr_1 (op, have_sym, have_toc)
2031 rtx op;
2032 int *have_sym;
2033 int *have_toc;
2034 {
2035 switch (GET_CODE(op))
2036 {
2037 case SYMBOL_REF:
2038 if (CONSTANT_POOL_ADDRESS_P (op))
2039 {
2040 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2041 {
2042 *have_sym = 1;
2043 return 1;
2044 }
2045 else
2046 return 0;
2047 }
2048 else if (! strcmp (XSTR (op, 0), toc_label_name))
2049 {
2050 *have_toc = 1;
2051 return 1;
2052 }
2053 else
2054 return 0;
2055 case PLUS:
2056 case MINUS:
2057 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2058 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2059 case CONST:
2060 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2061 case CONST_INT:
2062 return 1;
2063 default:
2064 return 0;
2065 }
2066 }
2067
2068 int
constant_pool_expr_p(op)2069 constant_pool_expr_p (op)
2070 rtx op;
2071 {
2072 int have_sym = 0;
2073 int have_toc = 0;
2074 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2075 }
2076
2077 int
toc_relative_expr_p(op)2078 toc_relative_expr_p (op)
2079 rtx op;
2080 {
2081 int have_sym = 0;
2082 int have_toc = 0;
2083 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2084 }
2085
2086 /* Try machine-dependent ways of modifying an illegitimate address
2087 to be legitimate. If we find one, return the new, valid address.
2088 This is used from only one place: `memory_address' in explow.c.
2089
2090 OLDX is the address as it was before break_out_memory_refs was
2091 called. In some cases it is useful to look at this to decide what
2092 needs to be done.
2093
2094 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2095
2096 It is always safe for this function to do nothing. It exists to
2097 recognize opportunities to optimize the output.
2098
2099 On RS/6000, first check for the sum of a register with a constant
2100 integer that is out of range. If so, generate code to add the
2101 constant with the low-order 16 bits masked to the register and force
2102 this result into another register (this can be done with `cau').
2103 Then generate an address of REG+(CONST&0xffff), allowing for the
2104 possibility of bit 16 being a one.
2105
2106 Then check for the sum of a register and something not constant, try to
2107 load the other things into a register and return the sum. */
2108 rtx
rs6000_legitimize_address(x,oldx,mode)2109 rs6000_legitimize_address (x, oldx, mode)
2110 rtx x;
2111 rtx oldx ATTRIBUTE_UNUSED;
2112 enum machine_mode mode;
2113 {
2114 if (GET_CODE (x) == PLUS
2115 && GET_CODE (XEXP (x, 0)) == REG
2116 && GET_CODE (XEXP (x, 1)) == CONST_INT
2117 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2118 {
2119 HOST_WIDE_INT high_int, low_int;
2120 rtx sum;
2121 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2122 high_int = INTVAL (XEXP (x, 1)) - low_int;
2123 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2124 GEN_INT (high_int)), 0);
2125 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2126 }
2127 else if (GET_CODE (x) == PLUS
2128 && GET_CODE (XEXP (x, 0)) == REG
2129 && GET_CODE (XEXP (x, 1)) != CONST_INT
2130 && GET_MODE_NUNITS (mode) == 1
2131 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2132 || TARGET_POWERPC64
2133 || (mode != DFmode && mode != TFmode))
2134 && (TARGET_POWERPC64 || mode != DImode)
2135 && mode != TImode)
2136 {
2137 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2138 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2139 }
2140 else if (ALTIVEC_VECTOR_MODE (mode))
2141 {
2142 rtx reg;
2143
2144 /* Make sure both operands are registers. */
2145 if (GET_CODE (x) == PLUS)
2146 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2147 force_reg (Pmode, XEXP (x, 1)));
2148
2149 reg = force_reg (Pmode, x);
2150 return reg;
2151 }
2152 else if (SPE_VECTOR_MODE (mode))
2153 {
2154 /* We accept [reg + reg] and [reg + OFFSET]. */
2155
2156 if (GET_CODE (x) == PLUS)
2157 {
2158 rtx op1 = XEXP (x, 0);
2159 rtx op2 = XEXP (x, 1);
2160
2161 op1 = force_reg (Pmode, op1);
2162
2163 if (GET_CODE (op2) != REG
2164 && (GET_CODE (op2) != CONST_INT
2165 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2166 op2 = force_reg (Pmode, op2);
2167
2168 return gen_rtx_PLUS (Pmode, op1, op2);
2169 }
2170
2171 return force_reg (Pmode, x);
2172 }
2173 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
2174 && GET_CODE (x) != CONST_INT
2175 && GET_CODE (x) != CONST_DOUBLE
2176 && CONSTANT_P (x)
2177 && GET_MODE_NUNITS (mode) == 1
2178 && (GET_MODE_BITSIZE (mode) <= 32
2179 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2180 {
2181 rtx reg = gen_reg_rtx (Pmode);
2182 emit_insn (gen_elf_high (reg, (x)));
2183 return gen_rtx_LO_SUM (Pmode, reg, (x));
2184 }
2185 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2186 && ! flag_pic
2187 && GET_CODE (x) != CONST_INT
2188 && GET_CODE (x) != CONST_DOUBLE
2189 && CONSTANT_P (x)
2190 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2191 && mode != DImode
2192 && mode != TImode)
2193 {
2194 rtx reg = gen_reg_rtx (Pmode);
2195 emit_insn (gen_macho_high (reg, (x)));
2196 return gen_rtx_LO_SUM (Pmode, reg, (x));
2197 }
2198 else if (TARGET_TOC
2199 && CONSTANT_POOL_EXPR_P (x)
2200 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2201 {
2202 return create_TOC_reference (x);
2203 }
2204 else
2205 return NULL_RTX;
2206 }
2207
2208 /* The convention appears to be to define this wherever it is used.
2209 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2210 is now used here. */
2211 #ifndef REG_MODE_OK_FOR_BASE_P
2212 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2213 #endif
2214
2215 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2216 replace the input X, or the original X if no replacement is called for.
2217 The output parameter *WIN is 1 if the calling macro should goto WIN,
2218 0 if it should not.
2219
2220 For RS/6000, we wish to handle large displacements off a base
2221 register by splitting the addend across an addiu/addis and the mem insn.
2222 This cuts number of extra insns needed from 3 to 1.
2223
2224 On Darwin, we use this to generate code for floating point constants.
2225 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2226 The Darwin code is inside #if TARGET_MACHO because only then is
2227 machopic_function_base_name() defined. */
2228 rtx
rs6000_legitimize_reload_address(x,mode,opnum,type,ind_levels,win)2229 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2230 rtx x;
2231 enum machine_mode mode;
2232 int opnum;
2233 int type;
2234 int ind_levels ATTRIBUTE_UNUSED;
2235 int *win;
2236 {
2237 /* We must recognize output that we have already generated ourselves. */
2238 if (GET_CODE (x) == PLUS
2239 && GET_CODE (XEXP (x, 0)) == PLUS
2240 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2241 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2242 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2243 {
2244 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2245 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2246 opnum, (enum reload_type)type);
2247 *win = 1;
2248 return x;
2249 }
2250
2251 #if TARGET_MACHO
2252 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2253 && GET_CODE (x) == LO_SUM
2254 && GET_CODE (XEXP (x, 0)) == PLUS
2255 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2256 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2257 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2258 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2259 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2260 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2261 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2262 {
2263 /* Result of previous invocation of this function on Darwin
2264 floating point constant. */
2265 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2266 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2267 opnum, (enum reload_type)type);
2268 *win = 1;
2269 return x;
2270 }
2271 #endif
2272 if (GET_CODE (x) == PLUS
2273 && GET_CODE (XEXP (x, 0)) == REG
2274 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2275 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2276 && GET_CODE (XEXP (x, 1)) == CONST_INT
2277 && !SPE_VECTOR_MODE (mode)
2278 && !ALTIVEC_VECTOR_MODE (mode))
2279 {
2280 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2281 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2282 HOST_WIDE_INT high
2283 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2284
2285 /* Check for 32-bit overflow. */
2286 if (high + low != val)
2287 {
2288 *win = 0;
2289 return x;
2290 }
2291
2292 /* Reload the high part into a base reg; leave the low part
2293 in the mem directly. */
2294
2295 x = gen_rtx_PLUS (GET_MODE (x),
2296 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2297 GEN_INT (high)),
2298 GEN_INT (low));
2299
2300 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2301 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2302 opnum, (enum reload_type)type);
2303 *win = 1;
2304 return x;
2305 }
2306 #if TARGET_MACHO
2307 if (GET_CODE (x) == SYMBOL_REF
2308 && DEFAULT_ABI == ABI_DARWIN
2309 && !ALTIVEC_VECTOR_MODE (mode)
2310 && flag_pic)
2311 {
2312 /* Darwin load of floating point constant. */
2313 rtx offset = gen_rtx (CONST, Pmode,
2314 gen_rtx (MINUS, Pmode, x,
2315 gen_rtx (SYMBOL_REF, Pmode,
2316 machopic_function_base_name ())));
2317 x = gen_rtx (LO_SUM, GET_MODE (x),
2318 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2319 gen_rtx (HIGH, Pmode, offset)), offset);
2320 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2321 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2322 opnum, (enum reload_type)type);
2323 *win = 1;
2324 return x;
2325 }
2326 #endif
2327 if (TARGET_TOC
2328 && CONSTANT_POOL_EXPR_P (x)
2329 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2330 {
2331 (x) = create_TOC_reference (x);
2332 *win = 1;
2333 return x;
2334 }
2335 *win = 0;
2336 return x;
2337 }
2338
2339 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2340 that is a valid memory address for an instruction.
2341 The MODE argument is the machine mode for the MEM expression
2342 that wants to use this address.
2343
2344 On the RS/6000, there are four valid address: a SYMBOL_REF that
2345 refers to a constant pool entry of an address (or the sum of it
2346 plus a constant), a short (16-bit signed) constant plus a register,
2347 the sum of two registers, or a register indirect, possibly with an
2348 auto-increment. For DFmode and DImode with an constant plus register,
2349 we must ensure that both words are addressable or PowerPC64 with offset
2350 word aligned.
2351
2352 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2353 32-bit DImode, TImode), indexed addressing cannot be used because
2354 adjacent memory cells are accessed by adding word-sized offsets
2355 during assembly output. */
2356 int
rs6000_legitimate_address(mode,x,reg_ok_strict)2357 rs6000_legitimate_address (mode, x, reg_ok_strict)
2358 enum machine_mode mode;
2359 rtx x;
2360 int reg_ok_strict;
2361 {
2362 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2363 return 1;
2364 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2365 && !ALTIVEC_VECTOR_MODE (mode)
2366 && !SPE_VECTOR_MODE (mode)
2367 && TARGET_UPDATE
2368 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2369 return 1;
2370 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2371 return 1;
2372 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2373 return 1;
2374 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2375 if (! reg_ok_strict
2376 && GET_CODE (x) == PLUS
2377 && GET_CODE (XEXP (x, 0)) == REG
2378 && XEXP (x, 0) == virtual_stack_vars_rtx
2379 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2380 return 1;
2381 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2382 return 1;
2383 if (mode != TImode
2384 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2385 || TARGET_POWERPC64
2386 || (mode != DFmode && mode != TFmode))
2387 && (TARGET_POWERPC64 || mode != DImode)
2388 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2389 return 1;
2390 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2391 return 1;
2392 return 0;
2393 }
2394
2395 /* Try to output insns to set TARGET equal to the constant C if it can
2396 be done in less than N insns. Do all computations in MODE.
2397 Returns the place where the output has been placed if it can be
2398 done and the insns have been emitted. If it would take more than N
2399 insns, zero is returned and no insns and emitted. */
2400
2401 rtx
rs6000_emit_set_const(dest,mode,source,n)2402 rs6000_emit_set_const (dest, mode, source, n)
2403 rtx dest, source;
2404 enum machine_mode mode;
2405 int n ATTRIBUTE_UNUSED;
2406 {
2407 rtx result, insn, set;
2408 HOST_WIDE_INT c0, c1;
2409
2410 if (mode == QImode || mode == HImode)
2411 {
2412 if (dest == NULL)
2413 dest = gen_reg_rtx (mode);
2414 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2415 return dest;
2416 }
2417 else if (mode == SImode)
2418 {
2419 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
2420
2421 emit_insn (gen_rtx_SET (VOIDmode, result,
2422 GEN_INT (INTVAL (source)
2423 & (~ (HOST_WIDE_INT) 0xffff))));
2424 emit_insn (gen_rtx_SET (VOIDmode, dest,
2425 gen_rtx_IOR (SImode, result,
2426 GEN_INT (INTVAL (source) & 0xffff))));
2427 result = dest;
2428 }
2429 else if (mode == DImode)
2430 {
2431 if (GET_CODE (source) == CONST_INT)
2432 {
2433 c0 = INTVAL (source);
2434 c1 = -(c0 < 0);
2435 }
2436 else if (GET_CODE (source) == CONST_DOUBLE)
2437 {
2438 #if HOST_BITS_PER_WIDE_INT >= 64
2439 c0 = CONST_DOUBLE_LOW (source);
2440 c1 = -(c0 < 0);
2441 #else
2442 c0 = CONST_DOUBLE_LOW (source);
2443 c1 = CONST_DOUBLE_HIGH (source);
2444 #endif
2445 }
2446 else
2447 abort ();
2448
2449 result = rs6000_emit_set_long_const (dest, c0, c1);
2450 }
2451 else
2452 abort ();
2453
2454 insn = get_last_insn ();
2455 set = single_set (insn);
2456 if (! CONSTANT_P (SET_SRC (set)))
2457 set_unique_reg_note (insn, REG_EQUAL, source);
2458
2459 return result;
2460 }
2461
2462 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2463 fall back to a straight forward decomposition. We do this to avoid
2464 exponential run times encountered when looking for longer sequences
2465 with rs6000_emit_set_const. */
2466 static rtx
rs6000_emit_set_long_const(dest,c1,c2)2467 rs6000_emit_set_long_const (dest, c1, c2)
2468 rtx dest;
2469 HOST_WIDE_INT c1, c2;
2470 {
2471 if (!TARGET_POWERPC64)
2472 {
2473 rtx operand1, operand2;
2474
2475 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2476 DImode);
2477 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2478 DImode);
2479 emit_move_insn (operand1, GEN_INT (c1));
2480 emit_move_insn (operand2, GEN_INT (c2));
2481 }
2482 else
2483 {
2484 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2485
2486 ud1 = c1 & 0xffff;
2487 ud2 = (c1 & 0xffff0000) >> 16;
2488 #if HOST_BITS_PER_WIDE_INT >= 64
2489 c2 = c1 >> 32;
2490 #endif
2491 ud3 = c2 & 0xffff;
2492 ud4 = (c2 & 0xffff0000) >> 16;
2493
2494 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2495 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2496 {
2497 if (ud1 & 0x8000)
2498 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2499 else
2500 emit_move_insn (dest, GEN_INT (ud1));
2501 }
2502
2503 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2504 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2505 {
2506 if (ud2 & 0x8000)
2507 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2508 - 0x80000000));
2509 else
2510 emit_move_insn (dest, GEN_INT (ud2 << 16));
2511 if (ud1 != 0)
2512 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2513 }
2514 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2515 || (ud4 == 0 && ! (ud3 & 0x8000)))
2516 {
2517 if (ud3 & 0x8000)
2518 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2519 - 0x80000000));
2520 else
2521 emit_move_insn (dest, GEN_INT (ud3 << 16));
2522
2523 if (ud2 != 0)
2524 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2525 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2526 if (ud1 != 0)
2527 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2528 }
2529 else
2530 {
2531 if (ud4 & 0x8000)
2532 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2533 - 0x80000000));
2534 else
2535 emit_move_insn (dest, GEN_INT (ud4 << 16));
2536
2537 if (ud3 != 0)
2538 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2539
2540 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2541 if (ud2 != 0)
2542 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2543 GEN_INT (ud2 << 16)));
2544 if (ud1 != 0)
2545 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2546 }
2547 }
2548 return dest;
2549 }
2550
2551 /* Emit a move from SOURCE to DEST in mode MODE. */
2552 void
rs6000_emit_move(dest,source,mode)2553 rs6000_emit_move (dest, source, mode)
2554 rtx dest;
2555 rtx source;
2556 enum machine_mode mode;
2557 {
2558 rtx operands[2];
2559 operands[0] = dest;
2560 operands[1] = source;
2561
2562 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2563 if (GET_CODE (operands[1]) == CONST_DOUBLE
2564 && ! FLOAT_MODE_P (mode)
2565 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2566 {
2567 /* FIXME. This should never happen. */
2568 /* Since it seems that it does, do the safe thing and convert
2569 to a CONST_INT. */
2570 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2571 }
2572 if (GET_CODE (operands[1]) == CONST_DOUBLE
2573 && ! FLOAT_MODE_P (mode)
2574 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2575 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2576 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2577 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2578 abort ();
2579
2580 /* Check if GCC is setting up a block move that will end up using FP
2581 registers as temporaries. We must make sure this is acceptable. */
2582 if (GET_CODE (operands[0]) == MEM
2583 && GET_CODE (operands[1]) == MEM
2584 && mode == DImode
2585 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2586 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2587 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2588 ? 32 : MEM_ALIGN (operands[0])))
2589 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2590 ? 32
2591 : MEM_ALIGN (operands[1]))))
2592 && ! MEM_VOLATILE_P (operands [0])
2593 && ! MEM_VOLATILE_P (operands [1]))
2594 {
2595 emit_move_insn (adjust_address (operands[0], SImode, 0),
2596 adjust_address (operands[1], SImode, 0));
2597 emit_move_insn (adjust_address (operands[0], SImode, 4),
2598 adjust_address (operands[1], SImode, 4));
2599 return;
2600 }
2601
2602 if (!no_new_pseudos)
2603 {
2604 if (GET_CODE (operands[1]) == MEM && optimize > 0
2605 && (mode == QImode || mode == HImode || mode == SImode)
2606 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2607 {
2608 rtx reg = gen_reg_rtx (word_mode);
2609
2610 emit_insn (gen_rtx_SET (word_mode, reg,
2611 gen_rtx_ZERO_EXTEND (word_mode,
2612 operands[1])));
2613 operands[1] = gen_lowpart (mode, reg);
2614 }
2615 if (GET_CODE (operands[0]) != REG)
2616 operands[1] = force_reg (mode, operands[1]);
2617 }
2618
2619 if (mode == SFmode && ! TARGET_POWERPC
2620 && TARGET_HARD_FLOAT && TARGET_FPRS
2621 && GET_CODE (operands[0]) == MEM)
2622 {
2623 int regnum;
2624
2625 if (reload_in_progress || reload_completed)
2626 regnum = true_regnum (operands[1]);
2627 else if (GET_CODE (operands[1]) == REG)
2628 regnum = REGNO (operands[1]);
2629 else
2630 regnum = -1;
2631
2632 /* If operands[1] is a register, on POWER it may have
2633 double-precision data in it, so truncate it to single
2634 precision. */
2635 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2636 {
2637 rtx newreg;
2638 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2639 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2640 operands[1] = newreg;
2641 }
2642 }
2643
2644 /* Handle the case where reload calls us with an invalid address. */
2645 if (reload_in_progress && mode == Pmode
2646 && (! general_operand (operands[1], mode)
2647 || ! nonimmediate_operand (operands[0], mode)))
2648 goto emit_set;
2649
2650 /* Handle the case of CONSTANT_P_RTX. */
2651 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
2652 goto emit_set;
2653
2654 /* FIXME: In the long term, this switch statement should go away
2655 and be replaced by a sequence of tests based on things like
2656 mode == Pmode. */
2657 switch (mode)
2658 {
2659 case HImode:
2660 case QImode:
2661 if (CONSTANT_P (operands[1])
2662 && GET_CODE (operands[1]) != CONST_INT)
2663 operands[1] = force_const_mem (mode, operands[1]);
2664 break;
2665
2666 case TFmode:
2667 case DFmode:
2668 case SFmode:
2669 if (CONSTANT_P (operands[1])
2670 && ! easy_fp_constant (operands[1], mode))
2671 operands[1] = force_const_mem (mode, operands[1]);
2672 break;
2673
2674 case V16QImode:
2675 case V8HImode:
2676 case V4SFmode:
2677 case V4SImode:
2678 case V4HImode:
2679 case V2SFmode:
2680 case V2SImode:
2681 case V1DImode:
2682 if (CONSTANT_P (operands[1])
2683 && !easy_vector_constant (operands[1]))
2684 operands[1] = force_const_mem (mode, operands[1]);
2685 break;
2686
2687 case SImode:
2688 case DImode:
2689 /* Use default pattern for address of ELF small data */
2690 if (TARGET_ELF
2691 && mode == Pmode
2692 && DEFAULT_ABI == ABI_V4
2693 && (GET_CODE (operands[1]) == SYMBOL_REF
2694 || GET_CODE (operands[1]) == CONST)
2695 && small_data_operand (operands[1], mode))
2696 {
2697 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2698 return;
2699 }
2700
2701 if (DEFAULT_ABI == ABI_V4
2702 && mode == Pmode && mode == SImode
2703 && flag_pic == 1 && got_operand (operands[1], mode))
2704 {
2705 emit_insn (gen_movsi_got (operands[0], operands[1]));
2706 return;
2707 }
2708
2709 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2710 && TARGET_NO_TOC && ! flag_pic
2711 && mode == Pmode
2712 && CONSTANT_P (operands[1])
2713 && GET_CODE (operands[1]) != HIGH
2714 && GET_CODE (operands[1]) != CONST_INT)
2715 {
2716 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2717
2718 /* If this is a function address on -mcall-aixdesc,
2719 convert it to the address of the descriptor. */
2720 if (DEFAULT_ABI == ABI_AIX
2721 && GET_CODE (operands[1]) == SYMBOL_REF
2722 && XSTR (operands[1], 0)[0] == '.')
2723 {
2724 const char *name = XSTR (operands[1], 0);
2725 rtx new_ref;
2726 while (*name == '.')
2727 name++;
2728 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2729 CONSTANT_POOL_ADDRESS_P (new_ref)
2730 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2731 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2732 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2733 operands[1] = new_ref;
2734 }
2735
2736 if (DEFAULT_ABI == ABI_DARWIN)
2737 {
2738 emit_insn (gen_macho_high (target, operands[1]));
2739 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2740 return;
2741 }
2742
2743 emit_insn (gen_elf_high (target, operands[1]));
2744 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2745 return;
2746 }
2747
2748 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2749 and we have put it in the TOC, we just need to make a TOC-relative
2750 reference to it. */
2751 if (TARGET_TOC
2752 && GET_CODE (operands[1]) == SYMBOL_REF
2753 && CONSTANT_POOL_EXPR_P (operands[1])
2754 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2755 get_pool_mode (operands[1])))
2756 {
2757 operands[1] = create_TOC_reference (operands[1]);
2758 }
2759 else if (mode == Pmode
2760 && CONSTANT_P (operands[1])
2761 && ((GET_CODE (operands[1]) != CONST_INT
2762 && ! easy_fp_constant (operands[1], mode))
2763 || (GET_CODE (operands[1]) == CONST_INT
2764 && num_insns_constant (operands[1], mode) > 2)
2765 || (GET_CODE (operands[0]) == REG
2766 && FP_REGNO_P (REGNO (operands[0]))))
2767 && GET_CODE (operands[1]) != HIGH
2768 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2769 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2770 {
2771 /* Emit a USE operation so that the constant isn't deleted if
2772 expensive optimizations are turned on because nobody
2773 references it. This should only be done for operands that
2774 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2775 This should not be done for operands that contain LABEL_REFs.
2776 For now, we just handle the obvious case. */
2777 if (GET_CODE (operands[1]) != LABEL_REF)
2778 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2779
2780 #if TARGET_MACHO
2781 /* Darwin uses a special PIC legitimizer. */
2782 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2783 {
2784 operands[1] =
2785 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2786 operands[0]);
2787 if (operands[0] != operands[1])
2788 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2789 return;
2790 }
2791 #endif
2792
2793 /* If we are to limit the number of things we put in the TOC and
2794 this is a symbol plus a constant we can add in one insn,
2795 just put the symbol in the TOC and add the constant. Don't do
2796 this if reload is in progress. */
2797 if (GET_CODE (operands[1]) == CONST
2798 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2799 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2800 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2801 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2802 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2803 && ! side_effects_p (operands[0]))
2804 {
2805 rtx sym =
2806 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2807 rtx other = XEXP (XEXP (operands[1], 0), 1);
2808
2809 sym = force_reg (mode, sym);
2810 if (mode == SImode)
2811 emit_insn (gen_addsi3 (operands[0], sym, other));
2812 else
2813 emit_insn (gen_adddi3 (operands[0], sym, other));
2814 return;
2815 }
2816
2817 operands[1] = force_const_mem (mode, operands[1]);
2818
2819 if (TARGET_TOC
2820 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2821 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2822 get_pool_constant (XEXP (operands[1], 0)),
2823 get_pool_mode (XEXP (operands[1], 0))))
2824 {
2825 operands[1]
2826 = gen_rtx_MEM (mode,
2827 create_TOC_reference (XEXP (operands[1], 0)));
2828 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2829 RTX_UNCHANGING_P (operands[1]) = 1;
2830 }
2831 }
2832 break;
2833
2834 case TImode:
2835 if (GET_CODE (operands[0]) == MEM
2836 && GET_CODE (XEXP (operands[0], 0)) != REG
2837 && ! reload_in_progress)
2838 operands[0]
2839 = replace_equiv_address (operands[0],
2840 copy_addr_to_reg (XEXP (operands[0], 0)));
2841
2842 if (GET_CODE (operands[1]) == MEM
2843 && GET_CODE (XEXP (operands[1], 0)) != REG
2844 && ! reload_in_progress)
2845 operands[1]
2846 = replace_equiv_address (operands[1],
2847 copy_addr_to_reg (XEXP (operands[1], 0)));
2848 if (TARGET_POWER)
2849 {
2850 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2851 gen_rtvec (2,
2852 gen_rtx_SET (VOIDmode,
2853 operands[0], operands[1]),
2854 gen_rtx_CLOBBER (VOIDmode,
2855 gen_rtx_SCRATCH (SImode)))));
2856 return;
2857 }
2858 break;
2859
2860 default:
2861 abort ();
2862 }
2863
2864 /* Above, we may have called force_const_mem which may have returned
2865 an invalid address. If we can, fix this up; otherwise, reload will
2866 have to deal with it. */
2867 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
2868 operands[1] = validize_mem (operands[1]);
2869
2870 emit_set:
2871 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2872 }
2873
2874 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2875 for a call to a function whose data type is FNTYPE.
2876 For a library call, FNTYPE is 0.
2877
2878 For incoming args we set the number of arguments in the prototype large
2879 so we never return a PARALLEL. */
2880
2881 void
init_cumulative_args(cum,fntype,libname,incoming,libcall)2882 init_cumulative_args (cum, fntype, libname, incoming, libcall)
2883 CUMULATIVE_ARGS *cum;
2884 tree fntype;
2885 rtx libname ATTRIBUTE_UNUSED;
2886 int incoming;
2887 int libcall;
2888 {
2889 static CUMULATIVE_ARGS zero_cumulative;
2890
2891 *cum = zero_cumulative;
2892 cum->words = 0;
2893 cum->fregno = FP_ARG_MIN_REG;
2894 cum->vregno = ALTIVEC_ARG_MIN_REG;
2895 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2896 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
2897 ? CALL_LIBCALL : CALL_NORMAL);
2898 cum->sysv_gregno = GP_ARG_MIN_REG;
2899
2900 if (incoming)
2901 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2902
2903 else if (cum->prototype)
2904 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2905 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2906 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2907
2908 else
2909 cum->nargs_prototype = 0;
2910
2911 cum->orig_nargs = cum->nargs_prototype;
2912
2913 /* Check for a longcall attribute. */
2914 if ((!fntype && rs6000_default_long_calls)
2915 || (fntype
2916 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2917 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
2918 cum->call_cookie |= CALL_LONG;
2919
2920 if (TARGET_DEBUG_ARG)
2921 {
2922 fprintf (stderr, "\ninit_cumulative_args:");
2923 if (fntype)
2924 {
2925 tree ret_type = TREE_TYPE (fntype);
2926 fprintf (stderr, " ret code = %s,",
2927 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2928 }
2929
2930 if (cum->call_cookie & CALL_LONG)
2931 fprintf (stderr, " longcall,");
2932
2933 fprintf (stderr, " proto = %d, nargs = %d\n",
2934 cum->prototype, cum->nargs_prototype);
2935 }
2936 }
2937
2938 /* If defined, a C expression which determines whether, and in which
2939 direction, to pad out an argument with extra space. The value
2940 should be of type `enum direction': either `upward' to pad above
2941 the argument, `downward' to pad below, or `none' to inhibit
2942 padding.
2943
2944 For the AIX ABI structs are always stored left shifted in their
2945 argument slot. */
2946
2947 enum direction
function_arg_padding(mode,type)2948 function_arg_padding (mode, type)
2949 enum machine_mode mode;
2950 tree type;
2951 {
2952 if (type != 0 && AGGREGATE_TYPE_P (type))
2953 return upward;
2954
2955 /* This is the default definition. */
2956 return (! BYTES_BIG_ENDIAN
2957 ? upward
2958 : ((mode == BLKmode
2959 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2960 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2961 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2962 ? downward : upward));
2963 }
2964
2965 /* If defined, a C expression that gives the alignment boundary, in bits,
2966 of an argument with the specified mode and type. If it is not defined,
2967 PARM_BOUNDARY is used for all arguments.
2968
2969 V.4 wants long longs to be double word aligned. */
2970
2971 int
function_arg_boundary(mode,type)2972 function_arg_boundary (mode, type)
2973 enum machine_mode mode;
2974 tree type ATTRIBUTE_UNUSED;
2975 {
2976 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2977 return 64;
2978 else if (SPE_VECTOR_MODE (mode))
2979 return 64;
2980 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2981 return 128;
2982 else
2983 return PARM_BOUNDARY;
2984 }
2985
2986 /* Update the data in CUM to advance over an argument
2987 of mode MODE and data type TYPE.
2988 (TYPE is null for libcalls where that information may not be available.) */
2989
2990 void
function_arg_advance(cum,mode,type,named)2991 function_arg_advance (cum, mode, type, named)
2992 CUMULATIVE_ARGS *cum;
2993 enum machine_mode mode;
2994 tree type;
2995 int named;
2996 {
2997 cum->nargs_prototype--;
2998
2999 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3000 {
3001 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3002 cum->vregno++;
3003 else
3004 cum->words += RS6000_ARG_SIZE (mode, type);
3005 }
3006 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3007 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
3008 cum->sysv_gregno++;
3009 else if (DEFAULT_ABI == ABI_V4)
3010 {
3011 if (TARGET_HARD_FLOAT && TARGET_FPRS
3012 && (mode == SFmode || mode == DFmode))
3013 {
3014 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3015 cum->fregno++;
3016 else
3017 {
3018 if (mode == DFmode)
3019 cum->words += cum->words & 1;
3020 cum->words += RS6000_ARG_SIZE (mode, type);
3021 }
3022 }
3023 else
3024 {
3025 int n_words;
3026 int gregno = cum->sysv_gregno;
3027
3028 /* Aggregates and IEEE quad get passed by reference. */
3029 if ((type && AGGREGATE_TYPE_P (type))
3030 || mode == TFmode)
3031 n_words = 1;
3032 else
3033 n_words = RS6000_ARG_SIZE (mode, type);
3034
3035 /* Long long and SPE vectors are put in odd registers. */
3036 if (n_words == 2 && (gregno & 1) == 0)
3037 gregno += 1;
3038
3039 /* Long long and SPE vectors are not split between registers
3040 and stack. */
3041 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3042 {
3043 /* Long long is aligned on the stack. */
3044 if (n_words == 2)
3045 cum->words += cum->words & 1;
3046 cum->words += n_words;
3047 }
3048
3049 /* Note: continuing to accumulate gregno past when we've started
3050 spilling to the stack indicates the fact that we've started
3051 spilling to the stack to expand_builtin_saveregs. */
3052 cum->sysv_gregno = gregno + n_words;
3053 }
3054
3055 if (TARGET_DEBUG_ARG)
3056 {
3057 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3058 cum->words, cum->fregno);
3059 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3060 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3061 fprintf (stderr, "mode = %4s, named = %d\n",
3062 GET_MODE_NAME (mode), named);
3063 }
3064 }
3065 else
3066 {
3067 int align = (TARGET_32BIT && (cum->words & 1) != 0
3068 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3069
3070 cum->words += align + RS6000_ARG_SIZE (mode, type);
3071
3072 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3073 && TARGET_HARD_FLOAT && TARGET_FPRS)
3074 cum->fregno += (mode == TFmode ? 2 : 1);
3075
3076 if (TARGET_DEBUG_ARG)
3077 {
3078 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3079 cum->words, cum->fregno);
3080 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3081 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3082 fprintf (stderr, "named = %d, align = %d\n", named, align);
3083 }
3084 }
3085 }
3086
3087 /* Determine where to put an argument to a function.
3088 Value is zero to push the argument on the stack,
3089 or a hard register in which to store the argument.
3090
3091 MODE is the argument's machine mode.
3092 TYPE is the data type of the argument (as a tree).
3093 This is null for libcalls where that information may
3094 not be available.
3095 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3096 the preceding args and about the function being called.
3097 NAMED is nonzero if this argument is a named parameter
3098 (otherwise it is an extra parameter matching an ellipsis).
3099
3100 On RS/6000 the first eight words of non-FP are normally in registers
3101 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3102 Under V.4, the first 8 FP args are in registers.
3103
3104 If this is floating-point and no prototype is specified, we use
3105 both an FP and integer register (or possibly FP reg and stack). Library
3106 functions (when CALL_LIBCALL is set) always have the proper types for args,
3107 so we can pass the FP value just in one register. emit_library_function
3108 doesn't support PARALLEL anyway. */
3109
3110 struct rtx_def *
function_arg(cum,mode,type,named)3111 function_arg (cum, mode, type, named)
3112 CUMULATIVE_ARGS *cum;
3113 enum machine_mode mode;
3114 tree type;
3115 int named;
3116 {
3117 enum rs6000_abi abi = DEFAULT_ABI;
3118
3119 /* Return a marker to indicate whether CR1 needs to set or clear the
3120 bit that V.4 uses to say fp args were passed in registers.
3121 Assume that we don't need the marker for software floating point,
3122 or compiler generated library calls. */
3123 if (mode == VOIDmode)
3124 {
3125 if (abi == ABI_V4
3126 && cum->nargs_prototype < 0
3127 && (cum->call_cookie & CALL_LIBCALL) == 0
3128 && (cum->prototype || TARGET_NO_PROTOTYPE))
3129 {
3130 /* For the SPE, we need to crxor CR6 always. */
3131 if (TARGET_SPE_ABI)
3132 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3133 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3134 return GEN_INT (cum->call_cookie
3135 | ((cum->fregno == FP_ARG_MIN_REG)
3136 ? CALL_V4_SET_FP_ARGS
3137 : CALL_V4_CLEAR_FP_ARGS));
3138 }
3139
3140 return GEN_INT (cum->call_cookie);
3141 }
3142
3143 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3144 {
3145 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3146 return gen_rtx_REG (mode, cum->vregno);
3147 else
3148 return NULL;
3149 }
3150 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3151 {
3152 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3153 return gen_rtx_REG (mode, cum->sysv_gregno);
3154 else
3155 return NULL;
3156 }
3157 else if (abi == ABI_V4)
3158 {
3159 if (TARGET_HARD_FLOAT && TARGET_FPRS
3160 && (mode == SFmode || mode == DFmode))
3161 {
3162 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3163 return gen_rtx_REG (mode, cum->fregno);
3164 else
3165 return NULL;
3166 }
3167 else
3168 {
3169 int n_words;
3170 int gregno = cum->sysv_gregno;
3171
3172 /* Aggregates and IEEE quad get passed by reference. */
3173 if ((type && AGGREGATE_TYPE_P (type))
3174 || mode == TFmode)
3175 n_words = 1;
3176 else
3177 n_words = RS6000_ARG_SIZE (mode, type);
3178
3179 /* Long long and SPE vectors are put in odd registers. */
3180 if (n_words == 2 && (gregno & 1) == 0)
3181 gregno += 1;
3182
3183 /* Long long and SPE vectors are not split between registers
3184 and stack. */
3185 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3186 {
3187 /* SPE vectors in ... get split into 2 registers. */
3188 if (TARGET_SPE && TARGET_SPE_ABI
3189 && SPE_VECTOR_MODE (mode) && !named)
3190 {
3191 rtx r1, r2;
3192 enum machine_mode m = SImode;
3193
3194 r1 = gen_rtx_REG (m, gregno);
3195 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3196 r2 = gen_rtx_REG (m, gregno + 1);
3197 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3198 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3199 }
3200 return gen_rtx_REG (mode, gregno);
3201 }
3202 else
3203 return NULL;
3204 }
3205 }
3206 else
3207 {
3208 int align = (TARGET_32BIT && (cum->words & 1) != 0
3209 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3210 int align_words = cum->words + align;
3211
3212 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3213 return NULL_RTX;
3214
3215 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3216 {
3217 if (! type
3218 || ((cum->nargs_prototype > 0)
3219 /* IBM AIX extended its linkage convention definition always
3220 to require FP args after register save area hole on the
3221 stack. */
3222 && (DEFAULT_ABI != ABI_AIX
3223 || ! TARGET_XL_CALL
3224 || (align_words < GP_ARG_NUM_REG))))
3225 return gen_rtx_REG (mode, cum->fregno);
3226
3227 return gen_rtx_PARALLEL (mode,
3228 gen_rtvec (2,
3229 gen_rtx_EXPR_LIST (VOIDmode,
3230 ((align_words >= GP_ARG_NUM_REG)
3231 ? NULL_RTX
3232 : (align_words
3233 + RS6000_ARG_SIZE (mode, type)
3234 > GP_ARG_NUM_REG
3235 /* If this is partially on the stack, then
3236 we only include the portion actually
3237 in registers here. */
3238 ? gen_rtx_REG (SImode,
3239 GP_ARG_MIN_REG + align_words)
3240 : gen_rtx_REG (mode,
3241 GP_ARG_MIN_REG + align_words))),
3242 const0_rtx),
3243 gen_rtx_EXPR_LIST (VOIDmode,
3244 gen_rtx_REG (mode, cum->fregno),
3245 const0_rtx)));
3246 }
3247 else if (align_words < GP_ARG_NUM_REG)
3248 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3249 else
3250 return NULL_RTX;
3251 }
3252 }
3253
3254 /* For an arg passed partly in registers and partly in memory,
3255 this is the number of registers used.
3256 For args passed entirely in registers or entirely in memory, zero. */
3257
3258 int
function_arg_partial_nregs(cum,mode,type,named)3259 function_arg_partial_nregs (cum, mode, type, named)
3260 CUMULATIVE_ARGS *cum;
3261 enum machine_mode mode;
3262 tree type;
3263 int named ATTRIBUTE_UNUSED;
3264 {
3265 if (DEFAULT_ABI == ABI_V4)
3266 return 0;
3267
3268 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3269 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3270 {
3271 if (cum->nargs_prototype >= 0)
3272 return 0;
3273 }
3274
3275 if (cum->words < GP_ARG_NUM_REG
3276 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3277 {
3278 int ret = GP_ARG_NUM_REG - cum->words;
3279 if (ret && TARGET_DEBUG_ARG)
3280 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3281
3282 return ret;
3283 }
3284
3285 return 0;
3286 }
3287
3288 /* A C expression that indicates when an argument must be passed by
3289 reference. If nonzero for an argument, a copy of that argument is
3290 made in memory and a pointer to the argument is passed instead of
3291 the argument itself. The pointer is passed in whatever way is
3292 appropriate for passing a pointer to that type.
3293
3294 Under V.4, structures and unions are passed by reference.
3295
3296 As an extension to all ABIs, variable sized types are passed by
3297 reference. */
3298
3299 int
function_arg_pass_by_reference(cum,mode,type,named)3300 function_arg_pass_by_reference (cum, mode, type, named)
3301 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3302 enum machine_mode mode ATTRIBUTE_UNUSED;
3303 tree type;
3304 int named ATTRIBUTE_UNUSED;
3305 {
3306 if (DEFAULT_ABI == ABI_V4
3307 && ((type && AGGREGATE_TYPE_P (type))
3308 || mode == TFmode))
3309 {
3310 if (TARGET_DEBUG_ARG)
3311 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3312
3313 return 1;
3314 }
3315 return type && int_size_in_bytes (type) < 0;
3316 }
3317
3318 /* Perform any needed actions needed for a function that is receiving a
3319 variable number of arguments.
3320
3321 CUM is as above.
3322
3323 MODE and TYPE are the mode and type of the current parameter.
3324
3325 PRETEND_SIZE is a variable that should be set to the amount of stack
3326 that must be pushed by the prolog to pretend that our caller pushed
3327 it.
3328
3329 Normally, this macro will push all remaining incoming registers on the
3330 stack and set PRETEND_SIZE to the length of the registers pushed. */
3331
3332 void
setup_incoming_varargs(cum,mode,type,pretend_size,no_rtl)3333 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3334 CUMULATIVE_ARGS *cum;
3335 enum machine_mode mode;
3336 tree type;
3337 int *pretend_size;
3338 int no_rtl;
3339
3340 {
3341 CUMULATIVE_ARGS next_cum;
3342 int reg_size = TARGET_32BIT ? 4 : 8;
3343 rtx save_area = NULL_RTX, mem;
3344 int first_reg_offset, set;
3345 tree fntype;
3346 int stdarg_p;
3347
3348 fntype = TREE_TYPE (current_function_decl);
3349 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3350 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3351 != void_type_node));
3352
3353 /* For varargs, we do not want to skip the dummy va_dcl argument.
3354 For stdargs, we do want to skip the last named argument. */
3355 next_cum = *cum;
3356 if (stdarg_p)
3357 function_arg_advance (&next_cum, mode, type, 1);
3358
3359 if (DEFAULT_ABI == ABI_V4)
3360 {
3361 /* Indicate to allocate space on the stack for varargs save area. */
3362 cfun->machine->sysv_varargs_p = 1;
3363 if (! no_rtl)
3364 save_area = plus_constant (virtual_stack_vars_rtx,
3365 - RS6000_VARARGS_SIZE);
3366
3367 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3368 }
3369 else
3370 {
3371 first_reg_offset = next_cum.words;
3372 save_area = virtual_incoming_args_rtx;
3373 cfun->machine->sysv_varargs_p = 0;
3374
3375 if (MUST_PASS_IN_STACK (mode, type))
3376 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3377 }
3378
3379 set = get_varargs_alias_set ();
3380 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3381 {
3382 mem = gen_rtx_MEM (BLKmode,
3383 plus_constant (save_area,
3384 first_reg_offset * reg_size)),
3385 set_mem_alias_set (mem, set);
3386 set_mem_align (mem, BITS_PER_WORD);
3387
3388 move_block_from_reg
3389 (GP_ARG_MIN_REG + first_reg_offset, mem,
3390 GP_ARG_NUM_REG - first_reg_offset,
3391 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3392
3393 /* ??? Does ABI_V4 need this at all? */
3394 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
3395 }
3396
3397 /* Save FP registers if needed. */
3398 if (DEFAULT_ABI == ABI_V4
3399 && TARGET_HARD_FLOAT && TARGET_FPRS
3400 && ! no_rtl
3401 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3402 {
3403 int fregno = next_cum.fregno;
3404 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3405 rtx lab = gen_label_rtx ();
3406 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3407
3408 emit_jump_insn (gen_rtx_SET (VOIDmode,
3409 pc_rtx,
3410 gen_rtx_IF_THEN_ELSE (VOIDmode,
3411 gen_rtx_NE (VOIDmode, cr1,
3412 const0_rtx),
3413 gen_rtx_LABEL_REF (VOIDmode, lab),
3414 pc_rtx)));
3415
3416 while (fregno <= FP_ARG_V4_MAX_REG)
3417 {
3418 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3419 set_mem_alias_set (mem, set);
3420 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3421 fregno++;
3422 off += 8;
3423 }
3424
3425 emit_label (lab);
3426 }
3427 }
3428
3429 /* Create the va_list data type. */
3430
3431 tree
rs6000_build_va_list()3432 rs6000_build_va_list ()
3433 {
3434 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3435
3436 /* For AIX, prefer 'char *' because that's what the system
3437 header files like. */
3438 if (DEFAULT_ABI != ABI_V4)
3439 return build_pointer_type (char_type_node);
3440
3441 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3442 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3443
3444 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3445 unsigned_char_type_node);
3446 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3447 unsigned_char_type_node);
3448 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3449 ptr_type_node);
3450 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3451 ptr_type_node);
3452
3453 DECL_FIELD_CONTEXT (f_gpr) = record;
3454 DECL_FIELD_CONTEXT (f_fpr) = record;
3455 DECL_FIELD_CONTEXT (f_ovf) = record;
3456 DECL_FIELD_CONTEXT (f_sav) = record;
3457
3458 TREE_CHAIN (record) = type_decl;
3459 TYPE_NAME (record) = type_decl;
3460 TYPE_FIELDS (record) = f_gpr;
3461 TREE_CHAIN (f_gpr) = f_fpr;
3462 TREE_CHAIN (f_fpr) = f_ovf;
3463 TREE_CHAIN (f_ovf) = f_sav;
3464
3465 layout_type (record);
3466
3467 /* The correct type is an array type of one element. */
3468 return build_array_type (record, build_index_type (size_zero_node));
3469 }
3470
3471 /* Implement va_start. */
3472
3473 void
rs6000_va_start(valist,nextarg)3474 rs6000_va_start (valist, nextarg)
3475 tree valist;
3476 rtx nextarg;
3477 {
3478 HOST_WIDE_INT words, n_gpr, n_fpr;
3479 tree f_gpr, f_fpr, f_ovf, f_sav;
3480 tree gpr, fpr, ovf, sav, t;
3481
3482 /* Only SVR4 needs something special. */
3483 if (DEFAULT_ABI != ABI_V4)
3484 {
3485 std_expand_builtin_va_start (valist, nextarg);
3486 return;
3487 }
3488
3489 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3490 f_fpr = TREE_CHAIN (f_gpr);
3491 f_ovf = TREE_CHAIN (f_fpr);
3492 f_sav = TREE_CHAIN (f_ovf);
3493
3494 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3495 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3496 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3497 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3498 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3499
3500 /* Count number of gp and fp argument registers used. */
3501 words = current_function_args_info.words;
3502 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3503 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3504
3505 if (TARGET_DEBUG_ARG)
3506 {
3507 fputs ("va_start: words = ", stderr);
3508 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3509 fputs (", n_gpr = ", stderr);
3510 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3511 fputs (", n_fpr = ", stderr);
3512 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3513 putc ('\n', stderr);
3514 }
3515
3516 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3517 TREE_SIDE_EFFECTS (t) = 1;
3518 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3519
3520 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3521 TREE_SIDE_EFFECTS (t) = 1;
3522 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3523
3524 /* Find the overflow area. */
3525 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3526 if (words != 0)
3527 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3528 build_int_2 (words * UNITS_PER_WORD, 0));
3529 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3530 TREE_SIDE_EFFECTS (t) = 1;
3531 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3532
3533 /* Find the register save area. */
3534 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3535 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3536 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3537 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3538 TREE_SIDE_EFFECTS (t) = 1;
3539 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3540 }
3541
3542 /* Implement va_arg. */
3543
3544 rtx
rs6000_va_arg(valist,type)3545 rs6000_va_arg (valist, type)
3546 tree valist, type;
3547 {
3548 tree f_gpr, f_fpr, f_ovf, f_sav;
3549 tree gpr, fpr, ovf, sav, reg, t, u;
3550 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3551 rtx lab_false, lab_over, addr_rtx, r;
3552
3553 if (DEFAULT_ABI != ABI_V4)
3554 {
3555 /* Variable sized types are passed by reference. */
3556 if (int_size_in_bytes (type) < 0)
3557 {
3558 u = build_pointer_type (type);
3559
3560 /* Args grow upward. */
3561 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
3562 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
3563 TREE_SIDE_EFFECTS (t) = 1;
3564
3565 t = build1 (NOP_EXPR, build_pointer_type (u), t);
3566 TREE_SIDE_EFFECTS (t) = 1;
3567
3568 t = build1 (INDIRECT_REF, u, t);
3569 TREE_SIDE_EFFECTS (t) = 1;
3570
3571 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3572 }
3573 else
3574 return std_expand_builtin_va_arg (valist, type);
3575 }
3576
3577 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3578 f_fpr = TREE_CHAIN (f_gpr);
3579 f_ovf = TREE_CHAIN (f_fpr);
3580 f_sav = TREE_CHAIN (f_ovf);
3581
3582 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3583 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3584 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3585 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3586 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3587
3588 size = int_size_in_bytes (type);
3589 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3590
3591 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3592 {
3593 /* Aggregates and long doubles are passed by reference. */
3594 indirect_p = 1;
3595 reg = gpr;
3596 n_reg = 1;
3597 sav_ofs = 0;
3598 sav_scale = 4;
3599 size = UNITS_PER_WORD;
3600 rsize = 1;
3601 }
3602 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
3603 {
3604 /* FP args go in FP registers, if present. */
3605 indirect_p = 0;
3606 reg = fpr;
3607 n_reg = 1;
3608 sav_ofs = 8*4;
3609 sav_scale = 8;
3610 }
3611 else
3612 {
3613 /* Otherwise into GP registers. */
3614 indirect_p = 0;
3615 reg = gpr;
3616 n_reg = rsize;
3617 sav_ofs = 0;
3618 sav_scale = 4;
3619 }
3620
3621 /* Pull the value out of the saved registers ... */
3622
3623 lab_false = gen_label_rtx ();
3624 lab_over = gen_label_rtx ();
3625 addr_rtx = gen_reg_rtx (Pmode);
3626
3627 /* AltiVec vectors never go in registers. */
3628 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3629 {
3630 TREE_THIS_VOLATILE (reg) = 1;
3631 emit_cmp_and_jump_insns
3632 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3633 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3634 lab_false);
3635
3636 /* Long long is aligned in the registers. */
3637 if (n_reg > 1)
3638 {
3639 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3640 build_int_2 (n_reg - 1, 0));
3641 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3642 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3643 TREE_SIDE_EFFECTS (u) = 1;
3644 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3645 }
3646
3647 if (sav_ofs)
3648 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3649 else
3650 t = sav;
3651
3652 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3653 build_int_2 (n_reg, 0));
3654 TREE_SIDE_EFFECTS (u) = 1;
3655
3656 u = build1 (CONVERT_EXPR, integer_type_node, u);
3657 TREE_SIDE_EFFECTS (u) = 1;
3658
3659 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3660 TREE_SIDE_EFFECTS (u) = 1;
3661
3662 t = build (PLUS_EXPR, ptr_type_node, t, u);
3663 TREE_SIDE_EFFECTS (t) = 1;
3664
3665 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3666 if (r != addr_rtx)
3667 emit_move_insn (addr_rtx, r);
3668
3669 emit_jump_insn (gen_jump (lab_over));
3670 emit_barrier ();
3671 }
3672
3673 emit_label (lab_false);
3674
3675 /* ... otherwise out of the overflow area. */
3676
3677 /* Make sure we don't find reg 7 for the next int arg.
3678
3679 All AltiVec vectors go in the overflow area. So in the AltiVec
3680 case we need to get the vectors from the overflow area, but
3681 remember where the GPRs and FPRs are. */
3682 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3683 || !TARGET_ALTIVEC))
3684 {
3685 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3686 TREE_SIDE_EFFECTS (t) = 1;
3687 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3688 }
3689
3690 /* Care for on-stack alignment if needed. */
3691 if (rsize <= 1)
3692 t = ovf;
3693 else
3694 {
3695 int align;
3696
3697 /* AltiVec vectors are 16 byte aligned. */
3698 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3699 align = 15;
3700 else
3701 align = 7;
3702
3703 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3704 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3705 }
3706 t = save_expr (t);
3707
3708 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3709 if (r != addr_rtx)
3710 emit_move_insn (addr_rtx, r);
3711
3712 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3713 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3714 TREE_SIDE_EFFECTS (t) = 1;
3715 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3716
3717 emit_label (lab_over);
3718
3719 if (indirect_p)
3720 {
3721 r = gen_rtx_MEM (Pmode, addr_rtx);
3722 set_mem_alias_set (r, get_varargs_alias_set ());
3723 emit_move_insn (addr_rtx, r);
3724 }
3725
3726 return addr_rtx;
3727 }
3728
3729 /* Builtins. */
3730
3731 #define def_builtin(MASK, NAME, TYPE, CODE) \
3732 do { \
3733 if ((MASK) & target_flags) \
3734 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3735 NULL, NULL_TREE); \
3736 } while (0)
3737
3738 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3739
3740 static const struct builtin_description bdesc_3arg[] =
3741 {
3742 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3743 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3744 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3745 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3746 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3747 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3748 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3749 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3750 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3751 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3752 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3753 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3754 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3755 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3756 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3757 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3758 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3759 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3760 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3761 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3762 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3763 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3764 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3765 };
3766
3767 /* DST operations: void foo (void *, const int, const char). */
3768
3769 static const struct builtin_description bdesc_dst[] =
3770 {
3771 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3772 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3773 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3774 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3775 };
3776
3777 /* Simple binary operations: VECc = foo (VECa, VECb). */
3778
3779 static struct builtin_description bdesc_2arg[] =
3780 {
3781 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3782 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3783 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3784 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3785 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3786 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3787 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3788 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3789 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3790 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3791 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3792 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3793 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3794 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3795 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3796 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3797 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3798 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3799 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3800 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3801 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3802 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3803 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3804 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3805 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3806 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3807 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3808 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3809 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3810 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3811 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3812 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3813 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3814 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3815 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3816 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3817 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3818 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3819 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3820 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3821 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3822 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3823 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3824 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3825 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3826 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3827 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3828 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3829 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3830 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3831 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3832 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3833 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3834 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3835 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3836 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3837 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3838 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3839 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3840 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3841 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3842 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3843 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3844 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3845 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3846 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3847 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3848 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3849 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3850 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3851 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3852 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3853 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3854 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3855 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3856 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3857 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3858 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3859 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3860 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3861 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3862 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3863 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3864 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3865 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3866 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3867 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3868 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3869 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3870 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3871 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3872 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3873 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3874 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3875 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3876 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3877 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3878 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3879 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3880 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3881 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3882 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3883 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3884 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3885 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3886 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3887 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3888 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3889 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3890 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3891 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3892 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3893 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3894
3895 /* Place holder, leave as first spe builtin. */
3896 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
3897 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
3898 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
3899 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
3900 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
3901 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
3902 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
3903 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
3904 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
3905 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
3906 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
3907 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
3908 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
3909 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
3910 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
3911 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
3912 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
3913 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
3914 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
3915 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
3916 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
3917 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
3918 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
3919 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
3920 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
3921 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
3922 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
3923 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
3924 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
3925 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
3926 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
3927 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
3928 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
3929 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
3930 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
3931 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
3932 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
3933 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
3934 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
3935 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
3936 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
3937 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
3938 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
3939 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
3940 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
3941 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
3942 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
3943 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
3944 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
3945 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
3946 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
3947 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
3948 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
3949 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
3950 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
3951 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
3952 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
3953 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
3954 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
3955 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
3956 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
3957 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
3958 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
3959 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
3960 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
3961 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
3962 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
3963 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
3964 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
3965 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
3966 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
3967 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
3968 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
3969 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
3970 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
3971 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
3972 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
3973 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
3974 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
3975 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
3976 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
3977 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
3978 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
3979 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
3980 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
3981 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
3982 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
3983 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
3984 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
3985 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
3986 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
3987 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
3988 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
3989 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
3990 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
3991 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
3992 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
3993 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
3994 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
3995 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
3996 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
3997 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
3998 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
3999 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4000 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4001 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4002 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4003 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4004 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4005
4006 /* SPE binary operations expecting a 5-bit unsigned literal. */
4007 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4008
4009 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4010 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4011 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4012 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4013 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4014 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4015 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4016 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4017 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4018 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4019 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4020 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4021 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4022 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4023 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4024 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4025 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4026 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4027 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4028 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4029 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4030 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4031 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4032 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4033 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4034 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4035
4036 /* Place-holder. Leave as last binary SPE builtin. */
4037 { 0, CODE_FOR_spe_evxor, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4038 };
4039
4040 /* AltiVec predicates. */
4041
4042 struct builtin_description_predicates
4043 {
4044 const unsigned int mask;
4045 const enum insn_code icode;
4046 const char *opcode;
4047 const char *const name;
4048 const enum rs6000_builtins code;
4049 };
4050
4051 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4052 {
4053 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4054 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4055 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4056 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4057 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4058 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4059 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4060 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4061 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4062 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4063 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4064 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4065 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4066 };
4067
4068 /* SPE predicates. */
4069 static struct builtin_description bdesc_spe_predicates[] =
4070 {
4071 /* Place-holder. Leave as first. */
4072 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4073 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4074 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4075 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4076 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4077 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4078 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4079 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4080 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4081 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4082 /* Place-holder. Leave as last. */
4083 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4084 };
4085
4086 /* SPE evsel predicates. */
4087 static struct builtin_description bdesc_spe_evsel[] =
4088 {
4089 /* Place-holder. Leave as first. */
4090 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4091 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4092 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4093 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4094 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4095 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4096 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4097 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4098 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4099 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4100 /* Place-holder. Leave as last. */
4101 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4102 };
4103
4104 /* ABS* opreations. */
4105
4106 static const struct builtin_description bdesc_abs[] =
4107 {
4108 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4109 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4110 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4111 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4112 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4113 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4114 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4115 };
4116
4117 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4118 foo (VECa). */
4119
4120 static struct builtin_description bdesc_1arg[] =
4121 {
4122 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4123 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4124 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4125 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4126 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4127 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4128 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4129 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4130 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4131 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4132 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4133 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4134 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4135 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4136 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4137 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4138 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4139
4140 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4141 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4142 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4143 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4144 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4145 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4146 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4147 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4148 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4149 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4150 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4151 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4152 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4153 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4154 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4155 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4156 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4157 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4158 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4159 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4160 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4161 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4162 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4163 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4164 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4165 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4166 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4167 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4168 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4169 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4170 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4171 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4172
4173 /* Place-holder. Leave as last unary SPE builtin. */
4174 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4175 };
4176
4177 static rtx
rs6000_expand_unop_builtin(icode,arglist,target)4178 rs6000_expand_unop_builtin (icode, arglist, target)
4179 enum insn_code icode;
4180 tree arglist;
4181 rtx target;
4182 {
4183 rtx pat;
4184 tree arg0 = TREE_VALUE (arglist);
4185 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4186 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4187 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4188
4189 if (icode == CODE_FOR_nothing)
4190 /* Builtin not supported on this processor. */
4191 return 0;
4192
4193 /* If we got invalid arguments bail out before generating bad rtl. */
4194 if (arg0 == error_mark_node)
4195 return const0_rtx;
4196
4197 if (icode == CODE_FOR_altivec_vspltisb
4198 || icode == CODE_FOR_altivec_vspltish
4199 || icode == CODE_FOR_altivec_vspltisw
4200 || icode == CODE_FOR_spe_evsplatfi
4201 || icode == CODE_FOR_spe_evsplati)
4202 {
4203 /* Only allow 5-bit *signed* literals. */
4204 if (GET_CODE (op0) != CONST_INT
4205 || INTVAL (op0) > 0x1f
4206 || INTVAL (op0) < -0x1f)
4207 {
4208 error ("argument 1 must be a 5-bit signed literal");
4209 return const0_rtx;
4210 }
4211 }
4212
4213 if (target == 0
4214 || GET_MODE (target) != tmode
4215 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4216 target = gen_reg_rtx (tmode);
4217
4218 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4219 op0 = copy_to_mode_reg (mode0, op0);
4220
4221 pat = GEN_FCN (icode) (target, op0);
4222 if (! pat)
4223 return 0;
4224 emit_insn (pat);
4225
4226 return target;
4227 }
4228
4229 static rtx
altivec_expand_abs_builtin(icode,arglist,target)4230 altivec_expand_abs_builtin (icode, arglist, target)
4231 enum insn_code icode;
4232 tree arglist;
4233 rtx target;
4234 {
4235 rtx pat, scratch1, scratch2;
4236 tree arg0 = TREE_VALUE (arglist);
4237 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4238 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4239 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4240
4241 /* If we have invalid arguments, bail out before generating bad rtl. */
4242 if (arg0 == error_mark_node)
4243 return const0_rtx;
4244
4245 if (target == 0
4246 || GET_MODE (target) != tmode
4247 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4248 target = gen_reg_rtx (tmode);
4249
4250 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4251 op0 = copy_to_mode_reg (mode0, op0);
4252
4253 scratch1 = gen_reg_rtx (mode0);
4254 scratch2 = gen_reg_rtx (mode0);
4255
4256 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4257 if (! pat)
4258 return 0;
4259 emit_insn (pat);
4260
4261 return target;
4262 }
4263
4264 static rtx
rs6000_expand_binop_builtin(icode,arglist,target)4265 rs6000_expand_binop_builtin (icode, arglist, target)
4266 enum insn_code icode;
4267 tree arglist;
4268 rtx target;
4269 {
4270 rtx pat;
4271 tree arg0 = TREE_VALUE (arglist);
4272 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4273 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4274 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4275 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4276 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4277 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4278
4279 if (icode == CODE_FOR_nothing)
4280 /* Builtin not supported on this processor. */
4281 return 0;
4282
4283 /* If we got invalid arguments bail out before generating bad rtl. */
4284 if (arg0 == error_mark_node || arg1 == error_mark_node)
4285 return const0_rtx;
4286
4287 if (icode == CODE_FOR_altivec_vcfux
4288 || icode == CODE_FOR_altivec_vcfsx
4289 || icode == CODE_FOR_altivec_vctsxs
4290 || icode == CODE_FOR_altivec_vctuxs
4291 || icode == CODE_FOR_altivec_vspltb
4292 || icode == CODE_FOR_altivec_vsplth
4293 || icode == CODE_FOR_altivec_vspltw
4294 || icode == CODE_FOR_spe_evaddiw
4295 || icode == CODE_FOR_spe_evldd
4296 || icode == CODE_FOR_spe_evldh
4297 || icode == CODE_FOR_spe_evldw
4298 || icode == CODE_FOR_spe_evlhhesplat
4299 || icode == CODE_FOR_spe_evlhhossplat
4300 || icode == CODE_FOR_spe_evlhhousplat
4301 || icode == CODE_FOR_spe_evlwhe
4302 || icode == CODE_FOR_spe_evlwhos
4303 || icode == CODE_FOR_spe_evlwhou
4304 || icode == CODE_FOR_spe_evlwhsplat
4305 || icode == CODE_FOR_spe_evlwwsplat
4306 || icode == CODE_FOR_spe_evrlwi
4307 || icode == CODE_FOR_spe_evslwi
4308 || icode == CODE_FOR_spe_evsrwis
4309 || icode == CODE_FOR_spe_evsrwiu)
4310 {
4311 /* Only allow 5-bit unsigned literals. */
4312 if (TREE_CODE (arg1) != INTEGER_CST
4313 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4314 {
4315 error ("argument 2 must be a 5-bit unsigned literal");
4316 return const0_rtx;
4317 }
4318 }
4319
4320 if (target == 0
4321 || GET_MODE (target) != tmode
4322 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4323 target = gen_reg_rtx (tmode);
4324
4325 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4326 op0 = copy_to_mode_reg (mode0, op0);
4327 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4328 op1 = copy_to_mode_reg (mode1, op1);
4329
4330 pat = GEN_FCN (icode) (target, op0, op1);
4331 if (! pat)
4332 return 0;
4333 emit_insn (pat);
4334
4335 return target;
4336 }
4337
4338 static rtx
altivec_expand_predicate_builtin(icode,opcode,arglist,target)4339 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4340 enum insn_code icode;
4341 const char *opcode;
4342 tree arglist;
4343 rtx target;
4344 {
4345 rtx pat, scratch;
4346 tree cr6_form = TREE_VALUE (arglist);
4347 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4348 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4349 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4350 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4351 enum machine_mode tmode = SImode;
4352 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4353 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4354 int cr6_form_int;
4355
4356 if (TREE_CODE (cr6_form) != INTEGER_CST)
4357 {
4358 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4359 return const0_rtx;
4360 }
4361 else
4362 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4363
4364 if (mode0 != mode1)
4365 abort ();
4366
4367 /* If we have invalid arguments, bail out before generating bad rtl. */
4368 if (arg0 == error_mark_node || arg1 == error_mark_node)
4369 return const0_rtx;
4370
4371 if (target == 0
4372 || GET_MODE (target) != tmode
4373 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4374 target = gen_reg_rtx (tmode);
4375
4376 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4377 op0 = copy_to_mode_reg (mode0, op0);
4378 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4379 op1 = copy_to_mode_reg (mode1, op1);
4380
4381 scratch = gen_reg_rtx (mode0);
4382
4383 pat = GEN_FCN (icode) (scratch, op0, op1,
4384 gen_rtx (SYMBOL_REF, Pmode, opcode));
4385 if (! pat)
4386 return 0;
4387 emit_insn (pat);
4388
4389 /* The vec_any* and vec_all* predicates use the same opcodes for two
4390 different operations, but the bits in CR6 will be different
4391 depending on what information we want. So we have to play tricks
4392 with CR6 to get the right bits out.
4393
4394 If you think this is disgusting, look at the specs for the
4395 AltiVec predicates. */
4396
4397 switch (cr6_form_int)
4398 {
4399 case 0:
4400 emit_insn (gen_cr6_test_for_zero (target));
4401 break;
4402 case 1:
4403 emit_insn (gen_cr6_test_for_zero_reverse (target));
4404 break;
4405 case 2:
4406 emit_insn (gen_cr6_test_for_lt (target));
4407 break;
4408 case 3:
4409 emit_insn (gen_cr6_test_for_lt_reverse (target));
4410 break;
4411 default:
4412 error ("argument 1 of __builtin_altivec_predicate is out of range");
4413 break;
4414 }
4415
4416 return target;
4417 }
4418
4419 static rtx
altivec_expand_stv_builtin(icode,arglist)4420 altivec_expand_stv_builtin (icode, arglist)
4421 enum insn_code icode;
4422 tree arglist;
4423 {
4424 tree arg0 = TREE_VALUE (arglist);
4425 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4426 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4427 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4428 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4429 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4430 rtx pat;
4431 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4432 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4433 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4434
4435 /* Invalid arguments. Bail before doing anything stoopid! */
4436 if (arg0 == error_mark_node
4437 || arg1 == error_mark_node
4438 || arg2 == error_mark_node)
4439 return const0_rtx;
4440
4441 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4442 op0 = copy_to_mode_reg (mode2, op0);
4443 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4444 op1 = copy_to_mode_reg (mode0, op1);
4445 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4446 op2 = copy_to_mode_reg (mode1, op2);
4447
4448 pat = GEN_FCN (icode) (op1, op2, op0);
4449 if (pat)
4450 emit_insn (pat);
4451 return NULL_RTX;
4452 }
4453
4454 static rtx
rs6000_expand_ternop_builtin(icode,arglist,target)4455 rs6000_expand_ternop_builtin (icode, arglist, target)
4456 enum insn_code icode;
4457 tree arglist;
4458 rtx target;
4459 {
4460 rtx pat;
4461 tree arg0 = TREE_VALUE (arglist);
4462 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4463 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4464 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4465 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4466 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4467 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4468 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4469 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4470 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4471
4472 if (icode == CODE_FOR_nothing)
4473 /* Builtin not supported on this processor. */
4474 return 0;
4475
4476 /* If we got invalid arguments bail out before generating bad rtl. */
4477 if (arg0 == error_mark_node
4478 || arg1 == error_mark_node
4479 || arg2 == error_mark_node)
4480 return const0_rtx;
4481
4482 if (icode == CODE_FOR_altivec_vsldoi_4sf
4483 || icode == CODE_FOR_altivec_vsldoi_4si
4484 || icode == CODE_FOR_altivec_vsldoi_8hi
4485 || icode == CODE_FOR_altivec_vsldoi_16qi)
4486 {
4487 /* Only allow 4-bit unsigned literals. */
4488 if (TREE_CODE (arg2) != INTEGER_CST
4489 || TREE_INT_CST_LOW (arg2) & ~0xf)
4490 {
4491 error ("argument 3 must be a 4-bit unsigned literal");
4492 return const0_rtx;
4493 }
4494 }
4495
4496 if (target == 0
4497 || GET_MODE (target) != tmode
4498 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4499 target = gen_reg_rtx (tmode);
4500
4501 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4502 op0 = copy_to_mode_reg (mode0, op0);
4503 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4504 op1 = copy_to_mode_reg (mode1, op1);
4505 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4506 op2 = copy_to_mode_reg (mode2, op2);
4507
4508 pat = GEN_FCN (icode) (target, op0, op1, op2);
4509 if (! pat)
4510 return 0;
4511 emit_insn (pat);
4512
4513 return target;
4514 }
4515
4516 /* Expand the lvx builtins. */
4517 static rtx
altivec_expand_ld_builtin(exp,target,expandedp)4518 altivec_expand_ld_builtin (exp, target, expandedp)
4519 tree exp;
4520 rtx target;
4521 bool *expandedp;
4522 {
4523 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4524 tree arglist = TREE_OPERAND (exp, 1);
4525 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4526 tree arg0;
4527 enum machine_mode tmode, mode0;
4528 rtx pat, op0;
4529 enum insn_code icode;
4530
4531 switch (fcode)
4532 {
4533 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4534 icode = CODE_FOR_altivec_lvx_16qi;
4535 break;
4536 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4537 icode = CODE_FOR_altivec_lvx_8hi;
4538 break;
4539 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4540 icode = CODE_FOR_altivec_lvx_4si;
4541 break;
4542 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4543 icode = CODE_FOR_altivec_lvx_4sf;
4544 break;
4545 default:
4546 *expandedp = false;
4547 return NULL_RTX;
4548 }
4549
4550 *expandedp = true;
4551
4552 arg0 = TREE_VALUE (arglist);
4553 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4554 tmode = insn_data[icode].operand[0].mode;
4555 mode0 = insn_data[icode].operand[1].mode;
4556
4557 if (target == 0
4558 || GET_MODE (target) != tmode
4559 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4560 target = gen_reg_rtx (tmode);
4561
4562 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4563 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4564
4565 pat = GEN_FCN (icode) (target, op0);
4566 if (! pat)
4567 return 0;
4568 emit_insn (pat);
4569 return target;
4570 }
4571
4572 /* Expand the stvx builtins. */
4573 static rtx
altivec_expand_st_builtin(exp,target,expandedp)4574 altivec_expand_st_builtin (exp, target, expandedp)
4575 tree exp;
4576 rtx target ATTRIBUTE_UNUSED;
4577 bool *expandedp;
4578 {
4579 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4580 tree arglist = TREE_OPERAND (exp, 1);
4581 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4582 tree arg0, arg1;
4583 enum machine_mode mode0, mode1;
4584 rtx pat, op0, op1;
4585 enum insn_code icode;
4586
4587 switch (fcode)
4588 {
4589 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
4590 icode = CODE_FOR_altivec_stvx_16qi;
4591 break;
4592 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4593 icode = CODE_FOR_altivec_stvx_8hi;
4594 break;
4595 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4596 icode = CODE_FOR_altivec_stvx_4si;
4597 break;
4598 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4599 icode = CODE_FOR_altivec_stvx_4sf;
4600 break;
4601 default:
4602 *expandedp = false;
4603 return NULL_RTX;
4604 }
4605
4606 arg0 = TREE_VALUE (arglist);
4607 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4608 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4609 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4610 mode0 = insn_data[icode].operand[0].mode;
4611 mode1 = insn_data[icode].operand[1].mode;
4612
4613 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4614 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4615 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4616 op1 = copy_to_mode_reg (mode1, op1);
4617
4618 pat = GEN_FCN (icode) (op0, op1);
4619 if (pat)
4620 emit_insn (pat);
4621
4622 *expandedp = true;
4623 return NULL_RTX;
4624 }
4625
4626 /* Expand the dst builtins. */
4627 static rtx
altivec_expand_dst_builtin(exp,target,expandedp)4628 altivec_expand_dst_builtin (exp, target, expandedp)
4629 tree exp;
4630 rtx target ATTRIBUTE_UNUSED;
4631 bool *expandedp;
4632 {
4633 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4634 tree arglist = TREE_OPERAND (exp, 1);
4635 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4636 tree arg0, arg1, arg2;
4637 enum machine_mode mode0, mode1, mode2;
4638 rtx pat, op0, op1, op2;
4639 struct builtin_description *d;
4640 size_t i;
4641
4642 *expandedp = false;
4643
4644 /* Handle DST variants. */
4645 d = (struct builtin_description *) bdesc_dst;
4646 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4647 if (d->code == fcode)
4648 {
4649 arg0 = TREE_VALUE (arglist);
4650 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4651 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4652 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4653 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4654 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4655 mode0 = insn_data[d->icode].operand[0].mode;
4656 mode1 = insn_data[d->icode].operand[1].mode;
4657 mode2 = insn_data[d->icode].operand[2].mode;
4658
4659 /* Invalid arguments, bail out before generating bad rtl. */
4660 if (arg0 == error_mark_node
4661 || arg1 == error_mark_node
4662 || arg2 == error_mark_node)
4663 return const0_rtx;
4664
4665 if (TREE_CODE (arg2) != INTEGER_CST
4666 || TREE_INT_CST_LOW (arg2) & ~0x3)
4667 {
4668 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4669 return const0_rtx;
4670 }
4671
4672 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4673 op0 = copy_to_mode_reg (mode0, op0);
4674 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4675 op1 = copy_to_mode_reg (mode1, op1);
4676
4677 pat = GEN_FCN (d->icode) (op0, op1, op2);
4678 if (pat != 0)
4679 emit_insn (pat);
4680
4681 *expandedp = true;
4682 return NULL_RTX;
4683 }
4684
4685 return NULL_RTX;
4686 }
4687
4688 /* Expand the builtin in EXP and store the result in TARGET. Store
4689 true in *EXPANDEDP if we found a builtin to expand. */
4690 static rtx
altivec_expand_builtin(exp,target,expandedp)4691 altivec_expand_builtin (exp, target, expandedp)
4692 tree exp;
4693 rtx target;
4694 bool *expandedp;
4695 {
4696 struct builtin_description *d;
4697 struct builtin_description_predicates *dp;
4698 size_t i;
4699 enum insn_code icode;
4700 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4701 tree arglist = TREE_OPERAND (exp, 1);
4702 tree arg0;
4703 rtx op0, pat;
4704 enum machine_mode tmode, mode0;
4705 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4706
4707 target = altivec_expand_ld_builtin (exp, target, expandedp);
4708 if (*expandedp)
4709 return target;
4710
4711 target = altivec_expand_st_builtin (exp, target, expandedp);
4712 if (*expandedp)
4713 return target;
4714
4715 target = altivec_expand_dst_builtin (exp, target, expandedp);
4716 if (*expandedp)
4717 return target;
4718
4719 *expandedp = true;
4720
4721 switch (fcode)
4722 {
4723 case ALTIVEC_BUILTIN_STVX:
4724 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4725 case ALTIVEC_BUILTIN_STVEBX:
4726 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4727 case ALTIVEC_BUILTIN_STVEHX:
4728 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4729 case ALTIVEC_BUILTIN_STVEWX:
4730 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4731 case ALTIVEC_BUILTIN_STVXL:
4732 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4733
4734 case ALTIVEC_BUILTIN_MFVSCR:
4735 icode = CODE_FOR_altivec_mfvscr;
4736 tmode = insn_data[icode].operand[0].mode;
4737
4738 if (target == 0
4739 || GET_MODE (target) != tmode
4740 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4741 target = gen_reg_rtx (tmode);
4742
4743 pat = GEN_FCN (icode) (target);
4744 if (! pat)
4745 return 0;
4746 emit_insn (pat);
4747 return target;
4748
4749 case ALTIVEC_BUILTIN_MTVSCR:
4750 icode = CODE_FOR_altivec_mtvscr;
4751 arg0 = TREE_VALUE (arglist);
4752 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4753 mode0 = insn_data[icode].operand[0].mode;
4754
4755 /* If we got invalid arguments bail out before generating bad rtl. */
4756 if (arg0 == error_mark_node)
4757 return const0_rtx;
4758
4759 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4760 op0 = copy_to_mode_reg (mode0, op0);
4761
4762 pat = GEN_FCN (icode) (op0);
4763 if (pat)
4764 emit_insn (pat);
4765 return NULL_RTX;
4766
4767 case ALTIVEC_BUILTIN_DSSALL:
4768 emit_insn (gen_altivec_dssall ());
4769 return NULL_RTX;
4770
4771 case ALTIVEC_BUILTIN_DSS:
4772 icode = CODE_FOR_altivec_dss;
4773 arg0 = TREE_VALUE (arglist);
4774 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4775 mode0 = insn_data[icode].operand[0].mode;
4776
4777 /* If we got invalid arguments bail out before generating bad rtl. */
4778 if (arg0 == error_mark_node)
4779 return const0_rtx;
4780
4781 if (TREE_CODE (arg0) != INTEGER_CST
4782 || TREE_INT_CST_LOW (arg0) & ~0x3)
4783 {
4784 error ("argument to dss must be a 2-bit unsigned literal");
4785 return const0_rtx;
4786 }
4787
4788 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4789 op0 = copy_to_mode_reg (mode0, op0);
4790
4791 emit_insn (gen_altivec_dss (op0));
4792 return NULL_RTX;
4793 }
4794
4795 /* Expand abs* operations. */
4796 d = (struct builtin_description *) bdesc_abs;
4797 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4798 if (d->code == fcode)
4799 return altivec_expand_abs_builtin (d->icode, arglist, target);
4800
4801 /* Expand the AltiVec predicates. */
4802 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4803 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4804 if (dp->code == fcode)
4805 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4806
4807 /* LV* are funky. We initialized them differently. */
4808 switch (fcode)
4809 {
4810 case ALTIVEC_BUILTIN_LVSL:
4811 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4812 arglist, target);
4813 case ALTIVEC_BUILTIN_LVSR:
4814 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4815 arglist, target);
4816 case ALTIVEC_BUILTIN_LVEBX:
4817 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4818 arglist, target);
4819 case ALTIVEC_BUILTIN_LVEHX:
4820 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4821 arglist, target);
4822 case ALTIVEC_BUILTIN_LVEWX:
4823 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4824 arglist, target);
4825 case ALTIVEC_BUILTIN_LVXL:
4826 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4827 arglist, target);
4828 case ALTIVEC_BUILTIN_LVX:
4829 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
4830 arglist, target);
4831 default:
4832 break;
4833 /* Fall through. */
4834 }
4835
4836 *expandedp = false;
4837 return NULL_RTX;
4838 }
4839
4840 /* Binops that need to be initialized manually, but can be expanded
4841 automagically by rs6000_expand_binop_builtin. */
4842 static struct builtin_description bdesc_2arg_spe[] =
4843 {
4844 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
4845 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
4846 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
4847 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
4848 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
4849 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
4850 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
4851 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
4852 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
4853 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
4854 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
4855 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
4856 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
4857 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
4858 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
4859 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
4860 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
4861 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
4862 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
4863 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
4864 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
4865 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
4866 };
4867
4868 /* Expand the builtin in EXP and store the result in TARGET. Store
4869 true in *EXPANDEDP if we found a builtin to expand.
4870
4871 This expands the SPE builtins that are not simple unary and binary
4872 operations. */
4873 static rtx
spe_expand_builtin(exp,target,expandedp)4874 spe_expand_builtin (exp, target, expandedp)
4875 tree exp;
4876 rtx target;
4877 bool *expandedp;
4878 {
4879 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4880 tree arglist = TREE_OPERAND (exp, 1);
4881 tree arg1, arg0;
4882 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4883 enum insn_code icode;
4884 enum machine_mode tmode, mode0;
4885 rtx pat, op0;
4886 struct builtin_description *d;
4887 size_t i;
4888
4889 *expandedp = true;
4890
4891 /* Syntax check for a 5-bit unsigned immediate. */
4892 switch (fcode)
4893 {
4894 case SPE_BUILTIN_EVSTDD:
4895 case SPE_BUILTIN_EVSTDH:
4896 case SPE_BUILTIN_EVSTDW:
4897 case SPE_BUILTIN_EVSTWHE:
4898 case SPE_BUILTIN_EVSTWHO:
4899 case SPE_BUILTIN_EVSTWWE:
4900 case SPE_BUILTIN_EVSTWWO:
4901 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4902 if (TREE_CODE (arg1) != INTEGER_CST
4903 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4904 {
4905 error ("argument 2 must be a 5-bit unsigned literal");
4906 return const0_rtx;
4907 }
4908 break;
4909 default:
4910 break;
4911 }
4912
4913 d = (struct builtin_description *) bdesc_2arg_spe;
4914 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
4915 if (d->code == fcode)
4916 return rs6000_expand_binop_builtin (d->icode, arglist, target);
4917
4918 d = (struct builtin_description *) bdesc_spe_predicates;
4919 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
4920 if (d->code == fcode)
4921 return spe_expand_predicate_builtin (d->icode, arglist, target);
4922
4923 d = (struct builtin_description *) bdesc_spe_evsel;
4924 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
4925 if (d->code == fcode)
4926 return spe_expand_evsel_builtin (d->icode, arglist, target);
4927
4928 switch (fcode)
4929 {
4930 case SPE_BUILTIN_EVSTDDX:
4931 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
4932 case SPE_BUILTIN_EVSTDHX:
4933 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
4934 case SPE_BUILTIN_EVSTDWX:
4935 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
4936 case SPE_BUILTIN_EVSTWHEX:
4937 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
4938 case SPE_BUILTIN_EVSTWHOX:
4939 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
4940 case SPE_BUILTIN_EVSTWWEX:
4941 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
4942 case SPE_BUILTIN_EVSTWWOX:
4943 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
4944 case SPE_BUILTIN_EVSTDD:
4945 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
4946 case SPE_BUILTIN_EVSTDH:
4947 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
4948 case SPE_BUILTIN_EVSTDW:
4949 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
4950 case SPE_BUILTIN_EVSTWHE:
4951 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
4952 case SPE_BUILTIN_EVSTWHO:
4953 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
4954 case SPE_BUILTIN_EVSTWWE:
4955 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
4956 case SPE_BUILTIN_EVSTWWO:
4957 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
4958 case SPE_BUILTIN_MFSPEFSCR:
4959 icode = CODE_FOR_spe_mfspefscr;
4960 tmode = insn_data[icode].operand[0].mode;
4961
4962 if (target == 0
4963 || GET_MODE (target) != tmode
4964 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4965 target = gen_reg_rtx (tmode);
4966
4967 pat = GEN_FCN (icode) (target);
4968 if (! pat)
4969 return 0;
4970 emit_insn (pat);
4971 return target;
4972 case SPE_BUILTIN_MTSPEFSCR:
4973 icode = CODE_FOR_spe_mtspefscr;
4974 arg0 = TREE_VALUE (arglist);
4975 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4976 mode0 = insn_data[icode].operand[0].mode;
4977
4978 if (arg0 == error_mark_node)
4979 return const0_rtx;
4980
4981 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4982 op0 = copy_to_mode_reg (mode0, op0);
4983
4984 pat = GEN_FCN (icode) (op0);
4985 if (pat)
4986 emit_insn (pat);
4987 return NULL_RTX;
4988 default:
4989 break;
4990 }
4991
4992 *expandedp = false;
4993 return NULL_RTX;
4994 }
4995
4996 static rtx
spe_expand_predicate_builtin(icode,arglist,target)4997 spe_expand_predicate_builtin (icode, arglist, target)
4998 enum insn_code icode;
4999 tree arglist;
5000 rtx target;
5001 {
5002 rtx pat, scratch, tmp;
5003 tree form = TREE_VALUE (arglist);
5004 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5005 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5006 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5007 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5008 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5009 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5010 int form_int;
5011 enum rtx_code code;
5012
5013 if (TREE_CODE (form) != INTEGER_CST)
5014 {
5015 error ("argument 1 of __builtin_spe_predicate must be a constant");
5016 return const0_rtx;
5017 }
5018 else
5019 form_int = TREE_INT_CST_LOW (form);
5020
5021 if (mode0 != mode1)
5022 abort ();
5023
5024 if (arg0 == error_mark_node || arg1 == error_mark_node)
5025 return const0_rtx;
5026
5027 if (target == 0
5028 || GET_MODE (target) != SImode
5029 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5030 target = gen_reg_rtx (SImode);
5031
5032 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5033 op0 = copy_to_mode_reg (mode0, op0);
5034 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5035 op1 = copy_to_mode_reg (mode1, op1);
5036
5037 scratch = gen_reg_rtx (CCmode);
5038
5039 pat = GEN_FCN (icode) (scratch, op0, op1);
5040 if (! pat)
5041 return const0_rtx;
5042 emit_insn (pat);
5043
5044 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5045 _lower_. We use one compare, but look in different bits of the
5046 CR for each variant.
5047
5048 There are 2 elements in each SPE simd type (upper/lower). The CR
5049 bits are set as follows:
5050
5051 BIT0 | BIT 1 | BIT 2 | BIT 3
5052 U | L | (U | L) | (U & L)
5053
5054 So, for an "all" relationship, BIT 3 would be set.
5055 For an "any" relationship, BIT 2 would be set. Etc.
5056
5057 Following traditional nomenclature, these bits map to:
5058
5059 BIT0 | BIT 1 | BIT 2 | BIT 3
5060 LT | GT | EQ | OV
5061
5062 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5063 */
5064
5065 switch (form_int)
5066 {
5067 /* All variant. OV bit. */
5068 case 0:
5069 /* We need to get to the OV bit, which is the ORDERED bit. We
5070 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5071 that's ugly and will trigger a validate_condition_mode abort.
5072 So let's just use another pattern. */
5073 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5074 return target;
5075 /* Any variant. EQ bit. */
5076 case 1:
5077 code = EQ;
5078 break;
5079 /* Upper variant. LT bit. */
5080 case 2:
5081 code = LT;
5082 break;
5083 /* Lower variant. GT bit. */
5084 case 3:
5085 code = GT;
5086 break;
5087 default:
5088 error ("argument 1 of __builtin_spe_predicate is out of range");
5089 return const0_rtx;
5090 }
5091
5092 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5093 emit_move_insn (target, tmp);
5094
5095 return target;
5096 }
5097
5098 /* The evsel builtins look like this:
5099
5100 e = __builtin_spe_evsel_OP (a, b, c, d);
5101
5102 and work like this:
5103
5104 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5105 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5106 */
5107
5108 static rtx
spe_expand_evsel_builtin(icode,arglist,target)5109 spe_expand_evsel_builtin (icode, arglist, target)
5110 enum insn_code icode;
5111 tree arglist;
5112 rtx target;
5113 {
5114 rtx pat, scratch;
5115 tree arg0 = TREE_VALUE (arglist);
5116 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5117 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5118 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5119 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5120 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5121 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5122 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5123 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5124 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5125
5126 if (mode0 != mode1)
5127 abort ();
5128
5129 if (arg0 == error_mark_node || arg1 == error_mark_node
5130 || arg2 == error_mark_node || arg3 == error_mark_node)
5131 return const0_rtx;
5132
5133 if (target == 0
5134 || GET_MODE (target) != mode0
5135 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5136 target = gen_reg_rtx (mode0);
5137
5138 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5139 op0 = copy_to_mode_reg (mode0, op0);
5140 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5141 op1 = copy_to_mode_reg (mode0, op1);
5142 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5143 op2 = copy_to_mode_reg (mode0, op2);
5144 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5145 op3 = copy_to_mode_reg (mode0, op3);
5146
5147 /* Generate the compare. */
5148 scratch = gen_reg_rtx (CCmode);
5149 pat = GEN_FCN (icode) (scratch, op0, op1);
5150 if (! pat)
5151 return const0_rtx;
5152 emit_insn (pat);
5153
5154 if (mode0 == V2SImode)
5155 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5156 else
5157 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5158
5159 return target;
5160 }
5161
5162 /* Expand an expression EXP that calls a built-in function,
5163 with result going to TARGET if that's convenient
5164 (and in mode MODE if that's convenient).
5165 SUBTARGET may be used as the target for computing one of EXP's operands.
5166 IGNORE is nonzero if the value is to be ignored. */
5167
5168 static rtx
rs6000_expand_builtin(exp,target,subtarget,mode,ignore)5169 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5170 tree exp;
5171 rtx target;
5172 rtx subtarget ATTRIBUTE_UNUSED;
5173 enum machine_mode mode ATTRIBUTE_UNUSED;
5174 int ignore ATTRIBUTE_UNUSED;
5175 {
5176 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5177 tree arglist = TREE_OPERAND (exp, 1);
5178 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5179 struct builtin_description *d;
5180 size_t i;
5181 rtx ret;
5182 bool success;
5183
5184 if (TARGET_ALTIVEC)
5185 {
5186 ret = altivec_expand_builtin (exp, target, &success);
5187
5188 if (success)
5189 return ret;
5190 }
5191 if (TARGET_SPE)
5192 {
5193 ret = spe_expand_builtin (exp, target, &success);
5194
5195 if (success)
5196 return ret;
5197 }
5198
5199 if (TARGET_ALTIVEC || TARGET_SPE)
5200 {
5201 /* Handle simple unary operations. */
5202 d = (struct builtin_description *) bdesc_1arg;
5203 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5204 if (d->code == fcode)
5205 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5206
5207 /* Handle simple binary operations. */
5208 d = (struct builtin_description *) bdesc_2arg;
5209 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5210 if (d->code == fcode)
5211 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5212
5213 /* Handle simple ternary operations. */
5214 d = (struct builtin_description *) bdesc_3arg;
5215 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5216 if (d->code == fcode)
5217 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5218 }
5219
5220 abort ();
5221 return NULL_RTX;
5222 }
5223
5224 static void
rs6000_init_builtins()5225 rs6000_init_builtins ()
5226 {
5227 if (TARGET_SPE)
5228 spe_init_builtins ();
5229 if (TARGET_ALTIVEC)
5230 altivec_init_builtins ();
5231 if (TARGET_ALTIVEC || TARGET_SPE)
5232 rs6000_common_init_builtins ();
5233 }
5234
5235 /* Search through a set of builtins and enable the mask bits.
5236 DESC is an array of builtins.
5237 SIZE is the totaly number of builtins.
5238 START is the builtin enum at which to start.
5239 END is the builtin enum at which to end. */
5240 static void
enable_mask_for_builtins(desc,size,start,end)5241 enable_mask_for_builtins (desc, size, start, end)
5242 struct builtin_description *desc;
5243 int size;
5244 enum rs6000_builtins start, end;
5245 {
5246 int i;
5247
5248 for (i = 0; i < size; ++i)
5249 if (desc[i].code == start)
5250 break;
5251
5252 if (i == size)
5253 return;
5254
5255 for (; i < size; ++i)
5256 {
5257 /* Flip all the bits on. */
5258 desc[i].mask = target_flags;
5259 if (desc[i].code == end)
5260 break;
5261 }
5262 }
5263
5264 static void
spe_init_builtins()5265 spe_init_builtins ()
5266 {
5267 tree endlink = void_list_node;
5268 tree puint_type_node = build_pointer_type (unsigned_type_node);
5269 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5270 tree pv2si_type_node = build_pointer_type (V2SI_type_node);
5271 struct builtin_description *d;
5272 size_t i;
5273
5274 tree v2si_ftype_4_v2si
5275 = build_function_type
5276 (V2SI_type_node,
5277 tree_cons (NULL_TREE, V2SI_type_node,
5278 tree_cons (NULL_TREE, V2SI_type_node,
5279 tree_cons (NULL_TREE, V2SI_type_node,
5280 tree_cons (NULL_TREE, V2SI_type_node,
5281 endlink)))));
5282
5283 tree v2sf_ftype_4_v2sf
5284 = build_function_type
5285 (V2SF_type_node,
5286 tree_cons (NULL_TREE, V2SF_type_node,
5287 tree_cons (NULL_TREE, V2SF_type_node,
5288 tree_cons (NULL_TREE, V2SF_type_node,
5289 tree_cons (NULL_TREE, V2SF_type_node,
5290 endlink)))));
5291
5292 tree int_ftype_int_v2si_v2si
5293 = build_function_type
5294 (integer_type_node,
5295 tree_cons (NULL_TREE, integer_type_node,
5296 tree_cons (NULL_TREE, V2SI_type_node,
5297 tree_cons (NULL_TREE, V2SI_type_node,
5298 endlink))));
5299
5300 tree int_ftype_int_v2sf_v2sf
5301 = build_function_type
5302 (integer_type_node,
5303 tree_cons (NULL_TREE, integer_type_node,
5304 tree_cons (NULL_TREE, V2SF_type_node,
5305 tree_cons (NULL_TREE, V2SF_type_node,
5306 endlink))));
5307
5308 tree void_ftype_v2si_puint_int
5309 = build_function_type (void_type_node,
5310 tree_cons (NULL_TREE, V2SI_type_node,
5311 tree_cons (NULL_TREE, puint_type_node,
5312 tree_cons (NULL_TREE,
5313 integer_type_node,
5314 endlink))));
5315
5316 tree void_ftype_v2si_puint_char
5317 = build_function_type (void_type_node,
5318 tree_cons (NULL_TREE, V2SI_type_node,
5319 tree_cons (NULL_TREE, puint_type_node,
5320 tree_cons (NULL_TREE,
5321 char_type_node,
5322 endlink))));
5323
5324 tree void_ftype_v2si_pv2si_int
5325 = build_function_type (void_type_node,
5326 tree_cons (NULL_TREE, V2SI_type_node,
5327 tree_cons (NULL_TREE, pv2si_type_node,
5328 tree_cons (NULL_TREE,
5329 integer_type_node,
5330 endlink))));
5331
5332 tree void_ftype_v2si_pv2si_char
5333 = build_function_type (void_type_node,
5334 tree_cons (NULL_TREE, V2SI_type_node,
5335 tree_cons (NULL_TREE, pv2si_type_node,
5336 tree_cons (NULL_TREE,
5337 char_type_node,
5338 endlink))));
5339
5340 tree void_ftype_int
5341 = build_function_type (void_type_node,
5342 tree_cons (NULL_TREE, integer_type_node, endlink));
5343
5344 tree int_ftype_void
5345 = build_function_type (integer_type_node,
5346 tree_cons (NULL_TREE, void_type_node, endlink));
5347
5348 tree v2si_ftype_pv2si_int
5349 = build_function_type (V2SI_type_node,
5350 tree_cons (NULL_TREE, pv2si_type_node,
5351 tree_cons (NULL_TREE, integer_type_node,
5352 endlink)));
5353
5354 tree v2si_ftype_puint_int
5355 = build_function_type (V2SI_type_node,
5356 tree_cons (NULL_TREE, puint_type_node,
5357 tree_cons (NULL_TREE, integer_type_node,
5358 endlink)));
5359
5360 tree v2si_ftype_pushort_int
5361 = build_function_type (V2SI_type_node,
5362 tree_cons (NULL_TREE, pushort_type_node,
5363 tree_cons (NULL_TREE, integer_type_node,
5364 endlink)));
5365
5366 /* The initialization of the simple binary and unary builtins is
5367 done in rs6000_common_init_builtins, but we have to enable the
5368 mask bits here manually because we have run out of `target_flags'
5369 bits. We really need to redesign this mask business. */
5370
5371 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5372 ARRAY_SIZE (bdesc_2arg),
5373 SPE_BUILTIN_EVADDW,
5374 SPE_BUILTIN_EVXOR);
5375 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5376 ARRAY_SIZE (bdesc_1arg),
5377 SPE_BUILTIN_EVABS,
5378 SPE_BUILTIN_EVSUBFUSIAAW);
5379 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5380 ARRAY_SIZE (bdesc_spe_predicates),
5381 SPE_BUILTIN_EVCMPEQ,
5382 SPE_BUILTIN_EVFSTSTLT);
5383 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5384 ARRAY_SIZE (bdesc_spe_evsel),
5385 SPE_BUILTIN_EVSEL_CMPGTS,
5386 SPE_BUILTIN_EVSEL_FSTSTEQ);
5387
5388 /* Initialize irregular SPE builtins. */
5389
5390 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5391 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5392 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5393 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5394 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5395 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5396 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5397 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5398 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5399 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5400 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5401 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5402 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5403 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5404 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5405 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5406
5407 /* Loads. */
5408 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5409 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5410 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5411 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5412 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5413 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5414 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5415 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5416 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5417 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5418 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5419 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5420 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5421 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5422 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5423 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5424 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5425 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5426 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5427 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5428 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5429 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5430
5431 /* Predicates. */
5432 d = (struct builtin_description *) bdesc_spe_predicates;
5433 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5434 {
5435 tree type;
5436
5437 switch (insn_data[d->icode].operand[1].mode)
5438 {
5439 case V2SImode:
5440 type = int_ftype_int_v2si_v2si;
5441 break;
5442 case V2SFmode:
5443 type = int_ftype_int_v2sf_v2sf;
5444 break;
5445 default:
5446 abort ();
5447 }
5448
5449 def_builtin (d->mask, d->name, type, d->code);
5450 }
5451
5452 /* Evsel predicates. */
5453 d = (struct builtin_description *) bdesc_spe_evsel;
5454 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5455 {
5456 tree type;
5457
5458 switch (insn_data[d->icode].operand[1].mode)
5459 {
5460 case V2SImode:
5461 type = v2si_ftype_4_v2si;
5462 break;
5463 case V2SFmode:
5464 type = v2sf_ftype_4_v2sf;
5465 break;
5466 default:
5467 abort ();
5468 }
5469
5470 def_builtin (d->mask, d->name, type, d->code);
5471 }
5472 }
5473
5474 static void
altivec_init_builtins()5475 altivec_init_builtins ()
5476 {
5477 struct builtin_description *d;
5478 struct builtin_description_predicates *dp;
5479 size_t i;
5480 tree pfloat_type_node = build_pointer_type (float_type_node);
5481 tree pint_type_node = build_pointer_type (integer_type_node);
5482 tree pshort_type_node = build_pointer_type (short_integer_type_node);
5483 tree pchar_type_node = build_pointer_type (char_type_node);
5484
5485 tree pvoid_type_node = build_pointer_type (void_type_node);
5486
5487 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
5488 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
5489 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
5490 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
5491
5492 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
5493
5494 tree int_ftype_int_v4si_v4si
5495 = build_function_type_list (integer_type_node,
5496 integer_type_node, V4SI_type_node,
5497 V4SI_type_node, NULL_TREE);
5498 tree v4sf_ftype_pcfloat
5499 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
5500 tree void_ftype_pfloat_v4sf
5501 = build_function_type_list (void_type_node,
5502 pfloat_type_node, V4SF_type_node, NULL_TREE);
5503 tree v4si_ftype_pcint
5504 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
5505 tree void_ftype_pint_v4si
5506 = build_function_type_list (void_type_node,
5507 pint_type_node, V4SI_type_node, NULL_TREE);
5508 tree v8hi_ftype_pcshort
5509 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
5510 tree void_ftype_pshort_v8hi
5511 = build_function_type_list (void_type_node,
5512 pshort_type_node, V8HI_type_node, NULL_TREE);
5513 tree v16qi_ftype_pcchar
5514 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
5515 tree void_ftype_pchar_v16qi
5516 = build_function_type_list (void_type_node,
5517 pchar_type_node, V16QI_type_node, NULL_TREE);
5518 tree void_ftype_v4si
5519 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5520 tree v8hi_ftype_void
5521 = build_function_type (V8HI_type_node, void_list_node);
5522 tree void_ftype_void
5523 = build_function_type (void_type_node, void_list_node);
5524 tree void_ftype_qi
5525 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5526
5527 tree v16qi_ftype_int_pcvoid
5528 = build_function_type_list (V16QI_type_node,
5529 integer_type_node, pcvoid_type_node, NULL_TREE);
5530 tree v8hi_ftype_int_pcvoid
5531 = build_function_type_list (V8HI_type_node,
5532 integer_type_node, pcvoid_type_node, NULL_TREE);
5533 tree v4si_ftype_int_pcvoid
5534 = build_function_type_list (V4SI_type_node,
5535 integer_type_node, pcvoid_type_node, NULL_TREE);
5536
5537 tree void_ftype_v4si_int_pvoid
5538 = build_function_type_list (void_type_node,
5539 V4SI_type_node, integer_type_node,
5540 pvoid_type_node, NULL_TREE);
5541 tree void_ftype_v16qi_int_pvoid
5542 = build_function_type_list (void_type_node,
5543 V16QI_type_node, integer_type_node,
5544 pvoid_type_node, NULL_TREE);
5545 tree void_ftype_v8hi_int_pvoid
5546 = build_function_type_list (void_type_node,
5547 V8HI_type_node, integer_type_node,
5548 pvoid_type_node, NULL_TREE);
5549 tree int_ftype_int_v8hi_v8hi
5550 = build_function_type_list (integer_type_node,
5551 integer_type_node, V8HI_type_node,
5552 V8HI_type_node, NULL_TREE);
5553 tree int_ftype_int_v16qi_v16qi
5554 = build_function_type_list (integer_type_node,
5555 integer_type_node, V16QI_type_node,
5556 V16QI_type_node, NULL_TREE);
5557 tree int_ftype_int_v4sf_v4sf
5558 = build_function_type_list (integer_type_node,
5559 integer_type_node, V4SF_type_node,
5560 V4SF_type_node, NULL_TREE);
5561 tree v4si_ftype_v4si
5562 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5563 tree v8hi_ftype_v8hi
5564 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5565 tree v16qi_ftype_v16qi
5566 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5567 tree v4sf_ftype_v4sf
5568 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5569 tree void_ftype_pcvoid_int_char
5570 = build_function_type_list (void_type_node,
5571 pcvoid_type_node, integer_type_node,
5572 char_type_node, NULL_TREE);
5573
5574 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
5575 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5576 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
5577 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5578 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
5579 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5580 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
5581 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
5582 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
5583 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
5584 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
5585 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
5586 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
5587 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
5588 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
5589 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
5590 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
5591 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
5592 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
5593 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
5594 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
5595 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
5596 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
5597 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
5598 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
5599 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
5600 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
5601 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
5602 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
5603 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
5604 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
5605 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
5606
5607 /* Add the DST variants. */
5608 d = (struct builtin_description *) bdesc_dst;
5609 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5610 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
5611
5612 /* Initialize the predicates. */
5613 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5614 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5615 {
5616 enum machine_mode mode1;
5617 tree type;
5618
5619 mode1 = insn_data[dp->icode].operand[1].mode;
5620
5621 switch (mode1)
5622 {
5623 case V4SImode:
5624 type = int_ftype_int_v4si_v4si;
5625 break;
5626 case V8HImode:
5627 type = int_ftype_int_v8hi_v8hi;
5628 break;
5629 case V16QImode:
5630 type = int_ftype_int_v16qi_v16qi;
5631 break;
5632 case V4SFmode:
5633 type = int_ftype_int_v4sf_v4sf;
5634 break;
5635 default:
5636 abort ();
5637 }
5638
5639 def_builtin (dp->mask, dp->name, type, dp->code);
5640 }
5641
5642 /* Initialize the abs* operators. */
5643 d = (struct builtin_description *) bdesc_abs;
5644 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5645 {
5646 enum machine_mode mode0;
5647 tree type;
5648
5649 mode0 = insn_data[d->icode].operand[0].mode;
5650
5651 switch (mode0)
5652 {
5653 case V4SImode:
5654 type = v4si_ftype_v4si;
5655 break;
5656 case V8HImode:
5657 type = v8hi_ftype_v8hi;
5658 break;
5659 case V16QImode:
5660 type = v16qi_ftype_v16qi;
5661 break;
5662 case V4SFmode:
5663 type = v4sf_ftype_v4sf;
5664 break;
5665 default:
5666 abort ();
5667 }
5668
5669 def_builtin (d->mask, d->name, type, d->code);
5670 }
5671 }
5672
5673 static void
rs6000_common_init_builtins()5674 rs6000_common_init_builtins ()
5675 {
5676 struct builtin_description *d;
5677 size_t i;
5678
5679 tree v4sf_ftype_v4sf_v4sf_v16qi
5680 = build_function_type_list (V4SF_type_node,
5681 V4SF_type_node, V4SF_type_node,
5682 V16QI_type_node, NULL_TREE);
5683 tree v4si_ftype_v4si_v4si_v16qi
5684 = build_function_type_list (V4SI_type_node,
5685 V4SI_type_node, V4SI_type_node,
5686 V16QI_type_node, NULL_TREE);
5687 tree v8hi_ftype_v8hi_v8hi_v16qi
5688 = build_function_type_list (V8HI_type_node,
5689 V8HI_type_node, V8HI_type_node,
5690 V16QI_type_node, NULL_TREE);
5691 tree v16qi_ftype_v16qi_v16qi_v16qi
5692 = build_function_type_list (V16QI_type_node,
5693 V16QI_type_node, V16QI_type_node,
5694 V16QI_type_node, NULL_TREE);
5695 tree v4si_ftype_char
5696 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
5697 tree v8hi_ftype_char
5698 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
5699 tree v16qi_ftype_char
5700 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
5701 tree v8hi_ftype_v16qi
5702 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
5703 tree v4sf_ftype_v4sf
5704 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5705
5706 tree v2si_ftype_v2si_v2si
5707 = build_function_type_list (V2SI_type_node,
5708 V2SI_type_node, V2SI_type_node, NULL_TREE);
5709
5710 tree v2sf_ftype_v2sf_v2sf
5711 = build_function_type_list (V2SF_type_node,
5712 V2SF_type_node, V2SF_type_node, NULL_TREE);
5713
5714 tree v2si_ftype_int_int
5715 = build_function_type_list (V2SI_type_node,
5716 integer_type_node, integer_type_node,
5717 NULL_TREE);
5718
5719 tree v2si_ftype_v2si
5720 = build_function_type_list (V2SI_type_node, V2SI_type_node, NULL_TREE);
5721
5722 tree v2sf_ftype_v2sf
5723 = build_function_type_list (V2SF_type_node,
5724 V2SF_type_node, NULL_TREE);
5725
5726 tree v2sf_ftype_v2si
5727 = build_function_type_list (V2SF_type_node,
5728 V2SI_type_node, NULL_TREE);
5729
5730 tree v2si_ftype_v2sf
5731 = build_function_type_list (V2SI_type_node,
5732 V2SF_type_node, NULL_TREE);
5733
5734 tree v2si_ftype_v2si_char
5735 = build_function_type_list (V2SI_type_node,
5736 V2SI_type_node, char_type_node, NULL_TREE);
5737
5738 tree v2si_ftype_int_char
5739 = build_function_type_list (V2SI_type_node,
5740 integer_type_node, char_type_node, NULL_TREE);
5741
5742 tree v2si_ftype_char
5743 = build_function_type_list (V2SI_type_node, char_type_node, NULL_TREE);
5744
5745 tree int_ftype_int_int
5746 = build_function_type_list (integer_type_node,
5747 integer_type_node, integer_type_node,
5748 NULL_TREE);
5749
5750 tree v4si_ftype_v4si_v4si
5751 = build_function_type_list (V4SI_type_node,
5752 V4SI_type_node, V4SI_type_node, NULL_TREE);
5753 tree v4sf_ftype_v4si_char
5754 = build_function_type_list (V4SF_type_node,
5755 V4SI_type_node, char_type_node, NULL_TREE);
5756 tree v4si_ftype_v4sf_char
5757 = build_function_type_list (V4SI_type_node,
5758 V4SF_type_node, char_type_node, NULL_TREE);
5759 tree v4si_ftype_v4si_char
5760 = build_function_type_list (V4SI_type_node,
5761 V4SI_type_node, char_type_node, NULL_TREE);
5762 tree v8hi_ftype_v8hi_char
5763 = build_function_type_list (V8HI_type_node,
5764 V8HI_type_node, char_type_node, NULL_TREE);
5765 tree v16qi_ftype_v16qi_char
5766 = build_function_type_list (V16QI_type_node,
5767 V16QI_type_node, char_type_node, NULL_TREE);
5768 tree v16qi_ftype_v16qi_v16qi_char
5769 = build_function_type_list (V16QI_type_node,
5770 V16QI_type_node, V16QI_type_node,
5771 char_type_node, NULL_TREE);
5772 tree v8hi_ftype_v8hi_v8hi_char
5773 = build_function_type_list (V8HI_type_node,
5774 V8HI_type_node, V8HI_type_node,
5775 char_type_node, NULL_TREE);
5776 tree v4si_ftype_v4si_v4si_char
5777 = build_function_type_list (V4SI_type_node,
5778 V4SI_type_node, V4SI_type_node,
5779 char_type_node, NULL_TREE);
5780 tree v4sf_ftype_v4sf_v4sf_char
5781 = build_function_type_list (V4SF_type_node,
5782 V4SF_type_node, V4SF_type_node,
5783 char_type_node, NULL_TREE);
5784 tree v4sf_ftype_v4sf_v4sf
5785 = build_function_type_list (V4SF_type_node,
5786 V4SF_type_node, V4SF_type_node, NULL_TREE);
5787 tree v4sf_ftype_v4sf_v4sf_v4si
5788 = build_function_type_list (V4SF_type_node,
5789 V4SF_type_node, V4SF_type_node,
5790 V4SI_type_node, NULL_TREE);
5791 tree v4sf_ftype_v4sf_v4sf_v4sf
5792 = build_function_type_list (V4SF_type_node,
5793 V4SF_type_node, V4SF_type_node,
5794 V4SF_type_node, NULL_TREE);
5795 tree v4si_ftype_v4si_v4si_v4si
5796 = build_function_type_list (V4SI_type_node,
5797 V4SI_type_node, V4SI_type_node,
5798 V4SI_type_node, NULL_TREE);
5799 tree v8hi_ftype_v8hi_v8hi
5800 = build_function_type_list (V8HI_type_node,
5801 V8HI_type_node, V8HI_type_node, NULL_TREE);
5802 tree v8hi_ftype_v8hi_v8hi_v8hi
5803 = build_function_type_list (V8HI_type_node,
5804 V8HI_type_node, V8HI_type_node,
5805 V8HI_type_node, NULL_TREE);
5806 tree v4si_ftype_v8hi_v8hi_v4si
5807 = build_function_type_list (V4SI_type_node,
5808 V8HI_type_node, V8HI_type_node,
5809 V4SI_type_node, NULL_TREE);
5810 tree v4si_ftype_v16qi_v16qi_v4si
5811 = build_function_type_list (V4SI_type_node,
5812 V16QI_type_node, V16QI_type_node,
5813 V4SI_type_node, NULL_TREE);
5814 tree v16qi_ftype_v16qi_v16qi
5815 = build_function_type_list (V16QI_type_node,
5816 V16QI_type_node, V16QI_type_node, NULL_TREE);
5817 tree v4si_ftype_v4sf_v4sf
5818 = build_function_type_list (V4SI_type_node,
5819 V4SF_type_node, V4SF_type_node, NULL_TREE);
5820 tree v8hi_ftype_v16qi_v16qi
5821 = build_function_type_list (V8HI_type_node,
5822 V16QI_type_node, V16QI_type_node, NULL_TREE);
5823 tree v4si_ftype_v8hi_v8hi
5824 = build_function_type_list (V4SI_type_node,
5825 V8HI_type_node, V8HI_type_node, NULL_TREE);
5826 tree v8hi_ftype_v4si_v4si
5827 = build_function_type_list (V8HI_type_node,
5828 V4SI_type_node, V4SI_type_node, NULL_TREE);
5829 tree v16qi_ftype_v8hi_v8hi
5830 = build_function_type_list (V16QI_type_node,
5831 V8HI_type_node, V8HI_type_node, NULL_TREE);
5832 tree v4si_ftype_v16qi_v4si
5833 = build_function_type_list (V4SI_type_node,
5834 V16QI_type_node, V4SI_type_node, NULL_TREE);
5835 tree v4si_ftype_v16qi_v16qi
5836 = build_function_type_list (V4SI_type_node,
5837 V16QI_type_node, V16QI_type_node, NULL_TREE);
5838 tree v4si_ftype_v8hi_v4si
5839 = build_function_type_list (V4SI_type_node,
5840 V8HI_type_node, V4SI_type_node, NULL_TREE);
5841 tree v4si_ftype_v8hi
5842 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
5843 tree int_ftype_v4si_v4si
5844 = build_function_type_list (integer_type_node,
5845 V4SI_type_node, V4SI_type_node, NULL_TREE);
5846 tree int_ftype_v4sf_v4sf
5847 = build_function_type_list (integer_type_node,
5848 V4SF_type_node, V4SF_type_node, NULL_TREE);
5849 tree int_ftype_v16qi_v16qi
5850 = build_function_type_list (integer_type_node,
5851 V16QI_type_node, V16QI_type_node, NULL_TREE);
5852 tree int_ftype_v8hi_v8hi
5853 = build_function_type_list (integer_type_node,
5854 V8HI_type_node, V8HI_type_node, NULL_TREE);
5855
5856 /* Add the simple ternary operators. */
5857 d = (struct builtin_description *) bdesc_3arg;
5858 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5859 {
5860
5861 enum machine_mode mode0, mode1, mode2, mode3;
5862 tree type;
5863
5864 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5865 continue;
5866
5867 mode0 = insn_data[d->icode].operand[0].mode;
5868 mode1 = insn_data[d->icode].operand[1].mode;
5869 mode2 = insn_data[d->icode].operand[2].mode;
5870 mode3 = insn_data[d->icode].operand[3].mode;
5871
5872 /* When all four are of the same mode. */
5873 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
5874 {
5875 switch (mode0)
5876 {
5877 case V4SImode:
5878 type = v4si_ftype_v4si_v4si_v4si;
5879 break;
5880 case V4SFmode:
5881 type = v4sf_ftype_v4sf_v4sf_v4sf;
5882 break;
5883 case V8HImode:
5884 type = v8hi_ftype_v8hi_v8hi_v8hi;
5885 break;
5886 case V16QImode:
5887 type = v16qi_ftype_v16qi_v16qi_v16qi;
5888 break;
5889 default:
5890 abort();
5891 }
5892 }
5893 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
5894 {
5895 switch (mode0)
5896 {
5897 case V4SImode:
5898 type = v4si_ftype_v4si_v4si_v16qi;
5899 break;
5900 case V4SFmode:
5901 type = v4sf_ftype_v4sf_v4sf_v16qi;
5902 break;
5903 case V8HImode:
5904 type = v8hi_ftype_v8hi_v8hi_v16qi;
5905 break;
5906 case V16QImode:
5907 type = v16qi_ftype_v16qi_v16qi_v16qi;
5908 break;
5909 default:
5910 abort();
5911 }
5912 }
5913 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
5914 && mode3 == V4SImode)
5915 type = v4si_ftype_v16qi_v16qi_v4si;
5916 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
5917 && mode3 == V4SImode)
5918 type = v4si_ftype_v8hi_v8hi_v4si;
5919 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
5920 && mode3 == V4SImode)
5921 type = v4sf_ftype_v4sf_v4sf_v4si;
5922
5923 /* vchar, vchar, vchar, 4 bit literal. */
5924 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
5925 && mode3 == QImode)
5926 type = v16qi_ftype_v16qi_v16qi_char;
5927
5928 /* vshort, vshort, vshort, 4 bit literal. */
5929 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
5930 && mode3 == QImode)
5931 type = v8hi_ftype_v8hi_v8hi_char;
5932
5933 /* vint, vint, vint, 4 bit literal. */
5934 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
5935 && mode3 == QImode)
5936 type = v4si_ftype_v4si_v4si_char;
5937
5938 /* vfloat, vfloat, vfloat, 4 bit literal. */
5939 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
5940 && mode3 == QImode)
5941 type = v4sf_ftype_v4sf_v4sf_char;
5942
5943 else
5944 abort ();
5945
5946 def_builtin (d->mask, d->name, type, d->code);
5947 }
5948
5949 /* Add the simple binary operators. */
5950 d = (struct builtin_description *) bdesc_2arg;
5951 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5952 {
5953 enum machine_mode mode0, mode1, mode2;
5954 tree type;
5955
5956 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5957 continue;
5958
5959 mode0 = insn_data[d->icode].operand[0].mode;
5960 mode1 = insn_data[d->icode].operand[1].mode;
5961 mode2 = insn_data[d->icode].operand[2].mode;
5962
5963 /* When all three operands are of the same mode. */
5964 if (mode0 == mode1 && mode1 == mode2)
5965 {
5966 switch (mode0)
5967 {
5968 case V4SFmode:
5969 type = v4sf_ftype_v4sf_v4sf;
5970 break;
5971 case V4SImode:
5972 type = v4si_ftype_v4si_v4si;
5973 break;
5974 case V16QImode:
5975 type = v16qi_ftype_v16qi_v16qi;
5976 break;
5977 case V8HImode:
5978 type = v8hi_ftype_v8hi_v8hi;
5979 break;
5980 case V2SImode:
5981 type = v2si_ftype_v2si_v2si;
5982 break;
5983 case V2SFmode:
5984 type = v2sf_ftype_v2sf_v2sf;
5985 break;
5986 case SImode:
5987 type = int_ftype_int_int;
5988 break;
5989 default:
5990 abort ();
5991 }
5992 }
5993
5994 /* A few other combos we really don't want to do manually. */
5995
5996 /* vint, vfloat, vfloat. */
5997 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
5998 type = v4si_ftype_v4sf_v4sf;
5999
6000 /* vshort, vchar, vchar. */
6001 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6002 type = v8hi_ftype_v16qi_v16qi;
6003
6004 /* vint, vshort, vshort. */
6005 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6006 type = v4si_ftype_v8hi_v8hi;
6007
6008 /* vshort, vint, vint. */
6009 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6010 type = v8hi_ftype_v4si_v4si;
6011
6012 /* vchar, vshort, vshort. */
6013 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6014 type = v16qi_ftype_v8hi_v8hi;
6015
6016 /* vint, vchar, vint. */
6017 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6018 type = v4si_ftype_v16qi_v4si;
6019
6020 /* vint, vchar, vchar. */
6021 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6022 type = v4si_ftype_v16qi_v16qi;
6023
6024 /* vint, vshort, vint. */
6025 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6026 type = v4si_ftype_v8hi_v4si;
6027
6028 /* vint, vint, 5 bit literal. */
6029 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6030 type = v4si_ftype_v4si_char;
6031
6032 /* vshort, vshort, 5 bit literal. */
6033 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6034 type = v8hi_ftype_v8hi_char;
6035
6036 /* vchar, vchar, 5 bit literal. */
6037 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6038 type = v16qi_ftype_v16qi_char;
6039
6040 /* vfloat, vint, 5 bit literal. */
6041 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6042 type = v4sf_ftype_v4si_char;
6043
6044 /* vint, vfloat, 5 bit literal. */
6045 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6046 type = v4si_ftype_v4sf_char;
6047
6048 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6049 type = v2si_ftype_int_int;
6050
6051 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6052 type = v2si_ftype_v2si_char;
6053
6054 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6055 type = v2si_ftype_int_char;
6056
6057 /* int, x, x. */
6058 else if (mode0 == SImode)
6059 {
6060 switch (mode1)
6061 {
6062 case V4SImode:
6063 type = int_ftype_v4si_v4si;
6064 break;
6065 case V4SFmode:
6066 type = int_ftype_v4sf_v4sf;
6067 break;
6068 case V16QImode:
6069 type = int_ftype_v16qi_v16qi;
6070 break;
6071 case V8HImode:
6072 type = int_ftype_v8hi_v8hi;
6073 break;
6074 default:
6075 abort ();
6076 }
6077 }
6078
6079 else
6080 abort ();
6081
6082 def_builtin (d->mask, d->name, type, d->code);
6083 }
6084
6085 /* Add the simple unary operators. */
6086 d = (struct builtin_description *) bdesc_1arg;
6087 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6088 {
6089 enum machine_mode mode0, mode1;
6090 tree type;
6091
6092 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6093 continue;
6094
6095 mode0 = insn_data[d->icode].operand[0].mode;
6096 mode1 = insn_data[d->icode].operand[1].mode;
6097
6098 if (mode0 == V4SImode && mode1 == QImode)
6099 type = v4si_ftype_char;
6100 else if (mode0 == V8HImode && mode1 == QImode)
6101 type = v8hi_ftype_char;
6102 else if (mode0 == V16QImode && mode1 == QImode)
6103 type = v16qi_ftype_char;
6104 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6105 type = v4sf_ftype_v4sf;
6106 else if (mode0 == V8HImode && mode1 == V16QImode)
6107 type = v8hi_ftype_v16qi;
6108 else if (mode0 == V4SImode && mode1 == V8HImode)
6109 type = v4si_ftype_v8hi;
6110 else if (mode0 == V2SImode && mode1 == V2SImode)
6111 type = v2si_ftype_v2si;
6112 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6113 type = v2sf_ftype_v2sf;
6114 else if (mode0 == V2SFmode && mode1 == V2SImode)
6115 type = v2sf_ftype_v2si;
6116 else if (mode0 == V2SImode && mode1 == V2SFmode)
6117 type = v2si_ftype_v2sf;
6118 else if (mode0 == V2SImode && mode1 == QImode)
6119 type = v2si_ftype_char;
6120 else
6121 abort ();
6122
6123 def_builtin (d->mask, d->name, type, d->code);
6124 }
6125 }
6126
6127
6128 /* Expand a block move operation, and return 1 if successful. Return 0
6129 if we should let the compiler generate normal code.
6130
6131 operands[0] is the destination
6132 operands[1] is the source
6133 operands[2] is the length
6134 operands[3] is the alignment */
6135
6136 #define MAX_MOVE_REG 4
6137
6138 int
expand_block_move(operands)6139 expand_block_move (operands)
6140 rtx operands[];
6141 {
6142 rtx orig_dest = operands[0];
6143 rtx orig_src = operands[1];
6144 rtx bytes_rtx = operands[2];
6145 rtx align_rtx = operands[3];
6146 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6147 int align;
6148 int bytes;
6149 int offset;
6150 int move_bytes;
6151
6152 /* If this is not a fixed size move, just call memcpy */
6153 if (! constp)
6154 return 0;
6155
6156 /* If this is not a fixed size alignment, abort */
6157 if (GET_CODE (align_rtx) != CONST_INT)
6158 abort ();
6159 align = INTVAL (align_rtx);
6160
6161 /* Anything to move? */
6162 bytes = INTVAL (bytes_rtx);
6163 if (bytes <= 0)
6164 return 1;
6165
6166 /* store_one_arg depends on expand_block_move to handle at least the size of
6167 reg_parm_stack_space. */
6168 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6169 return 0;
6170
6171 if (TARGET_STRING) /* string instructions are available */
6172 {
6173 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6174 {
6175 union {
6176 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6177 rtx (*mov) PARAMS ((rtx, rtx));
6178 } gen_func;
6179 enum machine_mode mode = BLKmode;
6180 rtx src, dest;
6181
6182 if (bytes > 24 /* move up to 32 bytes at a time */
6183 && ! fixed_regs[5]
6184 && ! fixed_regs[6]
6185 && ! fixed_regs[7]
6186 && ! fixed_regs[8]
6187 && ! fixed_regs[9]
6188 && ! fixed_regs[10]
6189 && ! fixed_regs[11]
6190 && ! fixed_regs[12])
6191 {
6192 move_bytes = (bytes > 32) ? 32 : bytes;
6193 gen_func.movstrsi = gen_movstrsi_8reg;
6194 }
6195 else if (bytes > 16 /* move up to 24 bytes at a time */
6196 && ! fixed_regs[5]
6197 && ! fixed_regs[6]
6198 && ! fixed_regs[7]
6199 && ! fixed_regs[8]
6200 && ! fixed_regs[9]
6201 && ! fixed_regs[10])
6202 {
6203 move_bytes = (bytes > 24) ? 24 : bytes;
6204 gen_func.movstrsi = gen_movstrsi_6reg;
6205 }
6206 else if (bytes > 8 /* move up to 16 bytes at a time */
6207 && ! fixed_regs[5]
6208 && ! fixed_regs[6]
6209 && ! fixed_regs[7]
6210 && ! fixed_regs[8])
6211 {
6212 move_bytes = (bytes > 16) ? 16 : bytes;
6213 gen_func.movstrsi = gen_movstrsi_4reg;
6214 }
6215 else if (bytes >= 8 && TARGET_POWERPC64
6216 /* 64-bit loads and stores require word-aligned
6217 displacements. */
6218 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6219 {
6220 move_bytes = 8;
6221 mode = DImode;
6222 gen_func.mov = gen_movdi;
6223 }
6224 else if (bytes > 4 && !TARGET_POWERPC64)
6225 { /* move up to 8 bytes at a time */
6226 move_bytes = (bytes > 8) ? 8 : bytes;
6227 gen_func.movstrsi = gen_movstrsi_2reg;
6228 }
6229 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6230 { /* move 4 bytes */
6231 move_bytes = 4;
6232 mode = SImode;
6233 gen_func.mov = gen_movsi;
6234 }
6235 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6236 { /* move 2 bytes */
6237 move_bytes = 2;
6238 mode = HImode;
6239 gen_func.mov = gen_movhi;
6240 }
6241 else if (bytes == 1) /* move 1 byte */
6242 {
6243 move_bytes = 1;
6244 mode = QImode;
6245 gen_func.mov = gen_movqi;
6246 }
6247 else
6248 { /* move up to 4 bytes at a time */
6249 move_bytes = (bytes > 4) ? 4 : bytes;
6250 gen_func.movstrsi = gen_movstrsi_1reg;
6251 }
6252
6253 src = adjust_address (orig_src, mode, offset);
6254 dest = adjust_address (orig_dest, mode, offset);
6255
6256 if (mode == BLKmode)
6257 {
6258 /* Move the address into scratch registers. The movstrsi
6259 patterns require zero offset. */
6260 if (!REG_P (XEXP (src, 0)))
6261 {
6262 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6263 src = replace_equiv_address (src, src_reg);
6264 }
6265 set_mem_size (src, GEN_INT (move_bytes));
6266
6267 if (!REG_P (XEXP (dest, 0)))
6268 {
6269 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
6270 dest = replace_equiv_address (dest, dest_reg);
6271 }
6272 set_mem_size (dest, GEN_INT (move_bytes));
6273
6274 emit_insn ((*gen_func.movstrsi) (dest, src,
6275 GEN_INT (move_bytes & 31),
6276 align_rtx));
6277 }
6278 else
6279 {
6280 rtx tmp_reg = gen_reg_rtx (mode);
6281
6282 emit_insn ((*gen_func.mov) (tmp_reg, src));
6283 emit_insn ((*gen_func.mov) (dest, tmp_reg));
6284 }
6285 }
6286 }
6287
6288 else /* string instructions not available */
6289 {
6290 rtx stores[MAX_MOVE_REG];
6291 int num_reg = 0;
6292 int i;
6293
6294 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6295 {
6296 rtx (*gen_mov_func) PARAMS ((rtx, rtx));
6297 enum machine_mode mode;
6298 rtx src, dest, tmp_reg;
6299
6300 /* Generate the appropriate load and store, saving the stores
6301 for later. */
6302 if (bytes >= 8 && TARGET_POWERPC64
6303 /* 64-bit loads and stores require word-aligned
6304 displacements. */
6305 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6306 {
6307 move_bytes = 8;
6308 mode = DImode;
6309 gen_mov_func = gen_movdi;
6310 }
6311 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6312 {
6313 move_bytes = 4;
6314 mode = SImode;
6315 gen_mov_func = gen_movsi;
6316 }
6317 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6318 {
6319 move_bytes = 2;
6320 mode = HImode;
6321 gen_mov_func = gen_movhi;
6322 }
6323 else
6324 {
6325 move_bytes = 1;
6326 mode = QImode;
6327 gen_mov_func = gen_movqi;
6328 }
6329
6330 src = adjust_address (orig_src, mode, offset);
6331 dest = adjust_address (orig_dest, mode, offset);
6332 tmp_reg = gen_reg_rtx (mode);
6333
6334 emit_insn ((*gen_mov_func) (tmp_reg, src));
6335 stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
6336
6337 if (num_reg >= MAX_MOVE_REG)
6338 {
6339 for (i = 0; i < num_reg; i++)
6340 emit_insn (stores[i]);
6341 num_reg = 0;
6342 }
6343 }
6344
6345 for (i = 0; i < num_reg; i++)
6346 emit_insn (stores[i]);
6347 }
6348
6349 return 1;
6350 }
6351
6352
6353 /* Return 1 if OP is a load multiple operation. It is known to be a
6354 PARALLEL and the first section will be tested. */
6355
6356 int
load_multiple_operation(op,mode)6357 load_multiple_operation (op, mode)
6358 rtx op;
6359 enum machine_mode mode ATTRIBUTE_UNUSED;
6360 {
6361 int count = XVECLEN (op, 0);
6362 unsigned int dest_regno;
6363 rtx src_addr;
6364 int i;
6365
6366 /* Perform a quick check so we don't blow up below. */
6367 if (count <= 1
6368 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6369 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6370 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6371 return 0;
6372
6373 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6374 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6375
6376 for (i = 1; i < count; i++)
6377 {
6378 rtx elt = XVECEXP (op, 0, i);
6379
6380 if (GET_CODE (elt) != SET
6381 || GET_CODE (SET_DEST (elt)) != REG
6382 || GET_MODE (SET_DEST (elt)) != SImode
6383 || REGNO (SET_DEST (elt)) != dest_regno + i
6384 || GET_CODE (SET_SRC (elt)) != MEM
6385 || GET_MODE (SET_SRC (elt)) != SImode
6386 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6387 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6388 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6389 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6390 return 0;
6391 }
6392
6393 return 1;
6394 }
6395
6396 /* Similar, but tests for store multiple. Here, the second vector element
6397 is a CLOBBER. It will be tested later. */
6398
6399 int
store_multiple_operation(op,mode)6400 store_multiple_operation (op, mode)
6401 rtx op;
6402 enum machine_mode mode ATTRIBUTE_UNUSED;
6403 {
6404 int count = XVECLEN (op, 0) - 1;
6405 unsigned int src_regno;
6406 rtx dest_addr;
6407 int i;
6408
6409 /* Perform a quick check so we don't blow up below. */
6410 if (count <= 1
6411 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6412 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6413 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6414 return 0;
6415
6416 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6417 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6418
6419 for (i = 1; i < count; i++)
6420 {
6421 rtx elt = XVECEXP (op, 0, i + 1);
6422
6423 if (GET_CODE (elt) != SET
6424 || GET_CODE (SET_SRC (elt)) != REG
6425 || GET_MODE (SET_SRC (elt)) != SImode
6426 || REGNO (SET_SRC (elt)) != src_regno + i
6427 || GET_CODE (SET_DEST (elt)) != MEM
6428 || GET_MODE (SET_DEST (elt)) != SImode
6429 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6430 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6431 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6432 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6433 return 0;
6434 }
6435
6436 return 1;
6437 }
6438
6439 /* Return a string to perform a load_multiple operation.
6440 operands[0] is the vector.
6441 operands[1] is the source address.
6442 operands[2] is the first destination register. */
6443
6444 const char *
rs6000_output_load_multiple(operands)6445 rs6000_output_load_multiple (operands)
6446 rtx operands[3];
6447 {
6448 /* We have to handle the case where the pseudo used to contain the address
6449 is assigned to one of the output registers. */
6450 int i, j;
6451 int words = XVECLEN (operands[0], 0);
6452 rtx xop[10];
6453
6454 if (XVECLEN (operands[0], 0) == 1)
6455 return "{l|lwz} %2,0(%1)";
6456
6457 for (i = 0; i < words; i++)
6458 if (refers_to_regno_p (REGNO (operands[2]) + i,
6459 REGNO (operands[2]) + i + 1, operands[1], 0))
6460 {
6461 if (i == words-1)
6462 {
6463 xop[0] = GEN_INT (4 * (words-1));
6464 xop[1] = operands[1];
6465 xop[2] = operands[2];
6466 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
6467 return "";
6468 }
6469 else if (i == 0)
6470 {
6471 xop[0] = GEN_INT (4 * (words-1));
6472 xop[1] = operands[1];
6473 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
6474 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
6475 return "";
6476 }
6477 else
6478 {
6479 for (j = 0; j < words; j++)
6480 if (j != i)
6481 {
6482 xop[0] = GEN_INT (j * 4);
6483 xop[1] = operands[1];
6484 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
6485 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
6486 }
6487 xop[0] = GEN_INT (i * 4);
6488 xop[1] = operands[1];
6489 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
6490 return "";
6491 }
6492 }
6493
6494 return "{lsi|lswi} %2,%1,%N0";
6495 }
6496
6497 /* Return 1 for a parallel vrsave operation. */
6498
6499 int
vrsave_operation(op,mode)6500 vrsave_operation (op, mode)
6501 rtx op;
6502 enum machine_mode mode ATTRIBUTE_UNUSED;
6503 {
6504 int count = XVECLEN (op, 0);
6505 unsigned int dest_regno, src_regno;
6506 int i;
6507
6508 if (count <= 1
6509 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6510 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6511 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6512 return 0;
6513
6514 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6515 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6516
6517 if (dest_regno != VRSAVE_REGNO
6518 && src_regno != VRSAVE_REGNO)
6519 return 0;
6520
6521 for (i = 1; i < count; i++)
6522 {
6523 rtx elt = XVECEXP (op, 0, i);
6524
6525 if (GET_CODE (elt) != CLOBBER
6526 && GET_CODE (elt) != SET)
6527 return 0;
6528 }
6529
6530 return 1;
6531 }
6532
6533 /* Return 1 for an PARALLEL suitable for mtcrf. */
6534
6535 int
mtcrf_operation(op,mode)6536 mtcrf_operation (op, mode)
6537 rtx op;
6538 enum machine_mode mode ATTRIBUTE_UNUSED;
6539 {
6540 int count = XVECLEN (op, 0);
6541 int i;
6542 rtx src_reg;
6543
6544 /* Perform a quick check so we don't blow up below. */
6545 if (count < 1
6546 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6547 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6548 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6549 return 0;
6550 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6551
6552 if (GET_CODE (src_reg) != REG
6553 || GET_MODE (src_reg) != SImode
6554 || ! INT_REGNO_P (REGNO (src_reg)))
6555 return 0;
6556
6557 for (i = 0; i < count; i++)
6558 {
6559 rtx exp = XVECEXP (op, 0, i);
6560 rtx unspec;
6561 int maskval;
6562
6563 if (GET_CODE (exp) != SET
6564 || GET_CODE (SET_DEST (exp)) != REG
6565 || GET_MODE (SET_DEST (exp)) != CCmode
6566 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6567 return 0;
6568 unspec = SET_SRC (exp);
6569 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6570
6571 if (GET_CODE (unspec) != UNSPEC
6572 || XINT (unspec, 1) != 20
6573 || XVECLEN (unspec, 0) != 2
6574 || XVECEXP (unspec, 0, 0) != src_reg
6575 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
6576 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
6577 return 0;
6578 }
6579 return 1;
6580 }
6581
6582 /* Return 1 for an PARALLEL suitable for lmw. */
6583
6584 int
lmw_operation(op,mode)6585 lmw_operation (op, mode)
6586 rtx op;
6587 enum machine_mode mode ATTRIBUTE_UNUSED;
6588 {
6589 int count = XVECLEN (op, 0);
6590 unsigned int dest_regno;
6591 rtx src_addr;
6592 unsigned int base_regno;
6593 HOST_WIDE_INT offset;
6594 int i;
6595
6596 /* Perform a quick check so we don't blow up below. */
6597 if (count <= 1
6598 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6599 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6600 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6601 return 0;
6602
6603 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6604 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6605
6606 if (dest_regno > 31
6607 || count != 32 - (int) dest_regno)
6608 return 0;
6609
6610 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
6611 {
6612 offset = 0;
6613 base_regno = REGNO (src_addr);
6614 if (base_regno == 0)
6615 return 0;
6616 }
6617 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
6618 {
6619 offset = INTVAL (XEXP (src_addr, 1));
6620 base_regno = REGNO (XEXP (src_addr, 0));
6621 }
6622 else
6623 return 0;
6624
6625 for (i = 0; i < count; i++)
6626 {
6627 rtx elt = XVECEXP (op, 0, i);
6628 rtx newaddr;
6629 rtx addr_reg;
6630 HOST_WIDE_INT newoffset;
6631
6632 if (GET_CODE (elt) != SET
6633 || GET_CODE (SET_DEST (elt)) != REG
6634 || GET_MODE (SET_DEST (elt)) != SImode
6635 || REGNO (SET_DEST (elt)) != dest_regno + i
6636 || GET_CODE (SET_SRC (elt)) != MEM
6637 || GET_MODE (SET_SRC (elt)) != SImode)
6638 return 0;
6639 newaddr = XEXP (SET_SRC (elt), 0);
6640 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6641 {
6642 newoffset = 0;
6643 addr_reg = newaddr;
6644 }
6645 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6646 {
6647 addr_reg = XEXP (newaddr, 0);
6648 newoffset = INTVAL (XEXP (newaddr, 1));
6649 }
6650 else
6651 return 0;
6652 if (REGNO (addr_reg) != base_regno
6653 || newoffset != offset + 4 * i)
6654 return 0;
6655 }
6656
6657 return 1;
6658 }
6659
6660 /* Return 1 for an PARALLEL suitable for stmw. */
6661
6662 int
stmw_operation(op,mode)6663 stmw_operation (op, mode)
6664 rtx op;
6665 enum machine_mode mode ATTRIBUTE_UNUSED;
6666 {
6667 int count = XVECLEN (op, 0);
6668 unsigned int src_regno;
6669 rtx dest_addr;
6670 unsigned int base_regno;
6671 HOST_WIDE_INT offset;
6672 int i;
6673
6674 /* Perform a quick check so we don't blow up below. */
6675 if (count <= 1
6676 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6677 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6678 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6679 return 0;
6680
6681 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6682 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6683
6684 if (src_regno > 31
6685 || count != 32 - (int) src_regno)
6686 return 0;
6687
6688 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
6689 {
6690 offset = 0;
6691 base_regno = REGNO (dest_addr);
6692 if (base_regno == 0)
6693 return 0;
6694 }
6695 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
6696 {
6697 offset = INTVAL (XEXP (dest_addr, 1));
6698 base_regno = REGNO (XEXP (dest_addr, 0));
6699 }
6700 else
6701 return 0;
6702
6703 for (i = 0; i < count; i++)
6704 {
6705 rtx elt = XVECEXP (op, 0, i);
6706 rtx newaddr;
6707 rtx addr_reg;
6708 HOST_WIDE_INT newoffset;
6709
6710 if (GET_CODE (elt) != SET
6711 || GET_CODE (SET_SRC (elt)) != REG
6712 || GET_MODE (SET_SRC (elt)) != SImode
6713 || REGNO (SET_SRC (elt)) != src_regno + i
6714 || GET_CODE (SET_DEST (elt)) != MEM
6715 || GET_MODE (SET_DEST (elt)) != SImode)
6716 return 0;
6717 newaddr = XEXP (SET_DEST (elt), 0);
6718 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6719 {
6720 newoffset = 0;
6721 addr_reg = newaddr;
6722 }
6723 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6724 {
6725 addr_reg = XEXP (newaddr, 0);
6726 newoffset = INTVAL (XEXP (newaddr, 1));
6727 }
6728 else
6729 return 0;
6730 if (REGNO (addr_reg) != base_regno
6731 || newoffset != offset + 4 * i)
6732 return 0;
6733 }
6734
6735 return 1;
6736 }
6737
6738 /* A validation routine: say whether CODE, a condition code, and MODE
6739 match. The other alternatives either don't make sense or should
6740 never be generated. */
6741
6742 static void
validate_condition_mode(code,mode)6743 validate_condition_mode (code, mode)
6744 enum rtx_code code;
6745 enum machine_mode mode;
6746 {
6747 if (GET_RTX_CLASS (code) != '<'
6748 || GET_MODE_CLASS (mode) != MODE_CC)
6749 abort ();
6750
6751 /* These don't make sense. */
6752 if ((code == GT || code == LT || code == GE || code == LE)
6753 && mode == CCUNSmode)
6754 abort ();
6755
6756 if ((code == GTU || code == LTU || code == GEU || code == LEU)
6757 && mode != CCUNSmode)
6758 abort ();
6759
6760 if (mode != CCFPmode
6761 && (code == ORDERED || code == UNORDERED
6762 || code == UNEQ || code == LTGT
6763 || code == UNGT || code == UNLT
6764 || code == UNGE || code == UNLE))
6765 abort ();
6766
6767 /* These should never be generated except for
6768 flag_unsafe_math_optimizations and flag_finite_math_only. */
6769 if (mode == CCFPmode
6770 && ! flag_unsafe_math_optimizations
6771 && ! flag_finite_math_only
6772 && (code == LE || code == GE
6773 || code == UNEQ || code == LTGT
6774 || code == UNGT || code == UNLT))
6775 abort ();
6776
6777 /* These are invalid; the information is not there. */
6778 if (mode == CCEQmode
6779 && code != EQ && code != NE)
6780 abort ();
6781 }
6782
6783 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6784 We only check the opcode against the mode of the CC value here. */
6785
6786 int
branch_comparison_operator(op,mode)6787 branch_comparison_operator (op, mode)
6788 rtx op;
6789 enum machine_mode mode ATTRIBUTE_UNUSED;
6790 {
6791 enum rtx_code code = GET_CODE (op);
6792 enum machine_mode cc_mode;
6793
6794 if (GET_RTX_CLASS (code) != '<')
6795 return 0;
6796
6797 cc_mode = GET_MODE (XEXP (op, 0));
6798 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6799 return 0;
6800
6801 validate_condition_mode (code, cc_mode);
6802
6803 return 1;
6804 }
6805
6806 /* Return 1 if OP is a comparison operation that is valid for a branch
6807 insn and which is true if the corresponding bit in the CC register
6808 is set. */
6809
6810 int
branch_positive_comparison_operator(op,mode)6811 branch_positive_comparison_operator (op, mode)
6812 rtx op;
6813 enum machine_mode mode;
6814 {
6815 enum rtx_code code;
6816
6817 if (! branch_comparison_operator (op, mode))
6818 return 0;
6819
6820 code = GET_CODE (op);
6821 return (code == EQ || code == LT || code == GT
6822 || (TARGET_SPE && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
6823 || code == LTU || code == GTU
6824 || code == UNORDERED);
6825 }
6826
6827 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6828 We check the opcode against the mode of the CC value and disallow EQ or
6829 NE comparisons for integers. */
6830
6831 int
scc_comparison_operator(op,mode)6832 scc_comparison_operator (op, mode)
6833 rtx op;
6834 enum machine_mode mode;
6835 {
6836 enum rtx_code code = GET_CODE (op);
6837 enum machine_mode cc_mode;
6838
6839 if (GET_MODE (op) != mode && mode != VOIDmode)
6840 return 0;
6841
6842 if (GET_RTX_CLASS (code) != '<')
6843 return 0;
6844
6845 cc_mode = GET_MODE (XEXP (op, 0));
6846 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6847 return 0;
6848
6849 validate_condition_mode (code, cc_mode);
6850
6851 if (code == NE && cc_mode != CCFPmode)
6852 return 0;
6853
6854 return 1;
6855 }
6856
6857 int
trap_comparison_operator(op,mode)6858 trap_comparison_operator (op, mode)
6859 rtx op;
6860 enum machine_mode mode;
6861 {
6862 if (mode != VOIDmode && mode != GET_MODE (op))
6863 return 0;
6864 return GET_RTX_CLASS (GET_CODE (op)) == '<';
6865 }
6866
6867 int
boolean_operator(op,mode)6868 boolean_operator (op, mode)
6869 rtx op;
6870 enum machine_mode mode ATTRIBUTE_UNUSED;
6871 {
6872 enum rtx_code code = GET_CODE (op);
6873 return (code == AND || code == IOR || code == XOR);
6874 }
6875
6876 int
boolean_or_operator(op,mode)6877 boolean_or_operator (op, mode)
6878 rtx op;
6879 enum machine_mode mode ATTRIBUTE_UNUSED;
6880 {
6881 enum rtx_code code = GET_CODE (op);
6882 return (code == IOR || code == XOR);
6883 }
6884
6885 int
min_max_operator(op,mode)6886 min_max_operator (op, mode)
6887 rtx op;
6888 enum machine_mode mode ATTRIBUTE_UNUSED;
6889 {
6890 enum rtx_code code = GET_CODE (op);
6891 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
6892 }
6893
6894 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6895 mask required to convert the result of a rotate insn into a shift
6896 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6897
6898 int
includes_lshift_p(shiftop,andop)6899 includes_lshift_p (shiftop, andop)
6900 rtx shiftop;
6901 rtx andop;
6902 {
6903 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6904
6905 shift_mask <<= INTVAL (shiftop);
6906
6907 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6908 }
6909
6910 /* Similar, but for right shift. */
6911
6912 int
includes_rshift_p(shiftop,andop)6913 includes_rshift_p (shiftop, andop)
6914 rtx shiftop;
6915 rtx andop;
6916 {
6917 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6918
6919 shift_mask >>= INTVAL (shiftop);
6920
6921 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6922 }
6923
6924 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6925 to perform a left shift. It must have exactly SHIFTOP least
6926 signifigant 0's, then one or more 1's, then zero or more 0's. */
6927
6928 int
includes_rldic_lshift_p(shiftop,andop)6929 includes_rldic_lshift_p (shiftop, andop)
6930 rtx shiftop;
6931 rtx andop;
6932 {
6933 if (GET_CODE (andop) == CONST_INT)
6934 {
6935 HOST_WIDE_INT c, lsb, shift_mask;
6936
6937 c = INTVAL (andop);
6938 if (c == 0 || c == ~0)
6939 return 0;
6940
6941 shift_mask = ~0;
6942 shift_mask <<= INTVAL (shiftop);
6943
6944 /* Find the least signifigant one bit. */
6945 lsb = c & -c;
6946
6947 /* It must coincide with the LSB of the shift mask. */
6948 if (-lsb != shift_mask)
6949 return 0;
6950
6951 /* Invert to look for the next transition (if any). */
6952 c = ~c;
6953
6954 /* Remove the low group of ones (originally low group of zeros). */
6955 c &= -lsb;
6956
6957 /* Again find the lsb, and check we have all 1's above. */
6958 lsb = c & -c;
6959 return c == -lsb;
6960 }
6961 else if (GET_CODE (andop) == CONST_DOUBLE
6962 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
6963 {
6964 HOST_WIDE_INT low, high, lsb;
6965 HOST_WIDE_INT shift_mask_low, shift_mask_high;
6966
6967 low = CONST_DOUBLE_LOW (andop);
6968 if (HOST_BITS_PER_WIDE_INT < 64)
6969 high = CONST_DOUBLE_HIGH (andop);
6970
6971 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
6972 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
6973 return 0;
6974
6975 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
6976 {
6977 shift_mask_high = ~0;
6978 if (INTVAL (shiftop) > 32)
6979 shift_mask_high <<= INTVAL (shiftop) - 32;
6980
6981 lsb = high & -high;
6982
6983 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
6984 return 0;
6985
6986 high = ~high;
6987 high &= -lsb;
6988
6989 lsb = high & -high;
6990 return high == -lsb;
6991 }
6992
6993 shift_mask_low = ~0;
6994 shift_mask_low <<= INTVAL (shiftop);
6995
6996 lsb = low & -low;
6997
6998 if (-lsb != shift_mask_low)
6999 return 0;
7000
7001 if (HOST_BITS_PER_WIDE_INT < 64)
7002 high = ~high;
7003 low = ~low;
7004 low &= -lsb;
7005
7006 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7007 {
7008 lsb = high & -high;
7009 return high == -lsb;
7010 }
7011
7012 lsb = low & -low;
7013 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7014 }
7015 else
7016 return 0;
7017 }
7018
7019 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7020 to perform a left shift. It must have SHIFTOP or more least
7021 signifigant 0's, with the remainder of the word 1's. */
7022
7023 int
includes_rldicr_lshift_p(shiftop,andop)7024 includes_rldicr_lshift_p (shiftop, andop)
7025 rtx shiftop;
7026 rtx andop;
7027 {
7028 if (GET_CODE (andop) == CONST_INT)
7029 {
7030 HOST_WIDE_INT c, lsb, shift_mask;
7031
7032 shift_mask = ~0;
7033 shift_mask <<= INTVAL (shiftop);
7034 c = INTVAL (andop);
7035
7036 /* Find the least signifigant one bit. */
7037 lsb = c & -c;
7038
7039 /* It must be covered by the shift mask.
7040 This test also rejects c == 0. */
7041 if ((lsb & shift_mask) == 0)
7042 return 0;
7043
7044 /* Check we have all 1's above the transition, and reject all 1's. */
7045 return c == -lsb && lsb != 1;
7046 }
7047 else if (GET_CODE (andop) == CONST_DOUBLE
7048 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7049 {
7050 HOST_WIDE_INT low, lsb, shift_mask_low;
7051
7052 low = CONST_DOUBLE_LOW (andop);
7053
7054 if (HOST_BITS_PER_WIDE_INT < 64)
7055 {
7056 HOST_WIDE_INT high, shift_mask_high;
7057
7058 high = CONST_DOUBLE_HIGH (andop);
7059
7060 if (low == 0)
7061 {
7062 shift_mask_high = ~0;
7063 if (INTVAL (shiftop) > 32)
7064 shift_mask_high <<= INTVAL (shiftop) - 32;
7065
7066 lsb = high & -high;
7067
7068 if ((lsb & shift_mask_high) == 0)
7069 return 0;
7070
7071 return high == -lsb;
7072 }
7073 if (high != ~0)
7074 return 0;
7075 }
7076
7077 shift_mask_low = ~0;
7078 shift_mask_low <<= INTVAL (shiftop);
7079
7080 lsb = low & -low;
7081
7082 if ((lsb & shift_mask_low) == 0)
7083 return 0;
7084
7085 return low == -lsb && lsb != 1;
7086 }
7087 else
7088 return 0;
7089 }
7090
7091 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7092 for lfq and stfq insns.
7093
7094 Note reg1 and reg2 *must* be hard registers. To be sure we will
7095 abort if we are passed pseudo registers. */
7096
7097 int
registers_ok_for_quad_peep(reg1,reg2)7098 registers_ok_for_quad_peep (reg1, reg2)
7099 rtx reg1, reg2;
7100 {
7101 /* We might have been passed a SUBREG. */
7102 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7103 return 0;
7104
7105 return (REGNO (reg1) == REGNO (reg2) - 1);
7106 }
7107
7108 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7109 addr1 and addr2 must be in consecutive memory locations
7110 (addr2 == addr1 + 8). */
7111
7112 int
addrs_ok_for_quad_peep(addr1,addr2)7113 addrs_ok_for_quad_peep (addr1, addr2)
7114 rtx addr1;
7115 rtx addr2;
7116 {
7117 unsigned int reg1;
7118 int offset1;
7119
7120 /* Extract an offset (if used) from the first addr. */
7121 if (GET_CODE (addr1) == PLUS)
7122 {
7123 /* If not a REG, return zero. */
7124 if (GET_CODE (XEXP (addr1, 0)) != REG)
7125 return 0;
7126 else
7127 {
7128 reg1 = REGNO (XEXP (addr1, 0));
7129 /* The offset must be constant! */
7130 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7131 return 0;
7132 offset1 = INTVAL (XEXP (addr1, 1));
7133 }
7134 }
7135 else if (GET_CODE (addr1) != REG)
7136 return 0;
7137 else
7138 {
7139 reg1 = REGNO (addr1);
7140 /* This was a simple (mem (reg)) expression. Offset is 0. */
7141 offset1 = 0;
7142 }
7143
7144 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7145 if (GET_CODE (addr2) != PLUS)
7146 return 0;
7147
7148 if (GET_CODE (XEXP (addr2, 0)) != REG
7149 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7150 return 0;
7151
7152 if (reg1 != REGNO (XEXP (addr2, 0)))
7153 return 0;
7154
7155 /* The offset for the second addr must be 8 more than the first addr. */
7156 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7157 return 0;
7158
7159 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7160 instructions. */
7161 return 1;
7162 }
7163
7164 /* Return the register class of a scratch register needed to copy IN into
7165 or out of a register in CLASS in MODE. If it can be done directly,
7166 NO_REGS is returned. */
7167
7168 enum reg_class
secondary_reload_class(class,mode,in)7169 secondary_reload_class (class, mode, in)
7170 enum reg_class class;
7171 enum machine_mode mode ATTRIBUTE_UNUSED;
7172 rtx in;
7173 {
7174 int regno;
7175
7176 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
7177 {
7178 /* We cannot copy a symbolic operand directly into anything
7179 other than BASE_REGS for TARGET_ELF. So indicate that a
7180 register from BASE_REGS is needed as an intermediate
7181 register.
7182
7183 On Darwin, pic addresses require a load from memory, which
7184 needs a base register. */
7185 if (class != BASE_REGS
7186 && (GET_CODE (in) == SYMBOL_REF
7187 || GET_CODE (in) == HIGH
7188 || GET_CODE (in) == LABEL_REF
7189 || GET_CODE (in) == CONST))
7190 return BASE_REGS;
7191 }
7192
7193 if (GET_CODE (in) == REG)
7194 {
7195 regno = REGNO (in);
7196 if (regno >= FIRST_PSEUDO_REGISTER)
7197 {
7198 regno = true_regnum (in);
7199 if (regno >= FIRST_PSEUDO_REGISTER)
7200 regno = -1;
7201 }
7202 }
7203 else if (GET_CODE (in) == SUBREG)
7204 {
7205 regno = true_regnum (in);
7206 if (regno >= FIRST_PSEUDO_REGISTER)
7207 regno = -1;
7208 }
7209 else
7210 regno = -1;
7211
7212 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7213 into anything. */
7214 if (class == GENERAL_REGS || class == BASE_REGS
7215 || (regno >= 0 && INT_REGNO_P (regno)))
7216 return NO_REGS;
7217
7218 /* Constants, memory, and FP registers can go into FP registers. */
7219 if ((regno == -1 || FP_REGNO_P (regno))
7220 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7221 return NO_REGS;
7222
7223 /* Memory, and AltiVec registers can go into AltiVec registers. */
7224 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7225 && class == ALTIVEC_REGS)
7226 return NO_REGS;
7227
7228 /* We can copy among the CR registers. */
7229 if ((class == CR_REGS || class == CR0_REGS)
7230 && regno >= 0 && CR_REGNO_P (regno))
7231 return NO_REGS;
7232
7233 /* Otherwise, we need GENERAL_REGS. */
7234 return GENERAL_REGS;
7235 }
7236
7237 /* Given a comparison operation, return the bit number in CCR to test. We
7238 know this is a valid comparison.
7239
7240 SCC_P is 1 if this is for an scc. That means that %D will have been
7241 used instead of %C, so the bits will be in different places.
7242
7243 Return -1 if OP isn't a valid comparison for some reason. */
7244
7245 int
ccr_bit(op,scc_p)7246 ccr_bit (op, scc_p)
7247 rtx op;
7248 int scc_p;
7249 {
7250 enum rtx_code code = GET_CODE (op);
7251 enum machine_mode cc_mode;
7252 int cc_regnum;
7253 int base_bit;
7254 rtx reg;
7255
7256 if (GET_RTX_CLASS (code) != '<')
7257 return -1;
7258
7259 reg = XEXP (op, 0);
7260
7261 if (GET_CODE (reg) != REG
7262 || ! CR_REGNO_P (REGNO (reg)))
7263 abort ();
7264
7265 cc_mode = GET_MODE (reg);
7266 cc_regnum = REGNO (reg);
7267 base_bit = 4 * (cc_regnum - CR0_REGNO);
7268
7269 validate_condition_mode (code, cc_mode);
7270
7271 switch (code)
7272 {
7273 case NE:
7274 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7275 return base_bit + 1;
7276 return scc_p ? base_bit + 3 : base_bit + 2;
7277 case EQ:
7278 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7279 return base_bit + 1;
7280 return base_bit + 2;
7281 case GT: case GTU: case UNLE:
7282 return base_bit + 1;
7283 case LT: case LTU: case UNGE:
7284 return base_bit;
7285 case ORDERED: case UNORDERED:
7286 return base_bit + 3;
7287
7288 case GE: case GEU:
7289 /* If scc, we will have done a cror to put the bit in the
7290 unordered position. So test that bit. For integer, this is ! LT
7291 unless this is an scc insn. */
7292 return scc_p ? base_bit + 3 : base_bit;
7293
7294 case LE: case LEU:
7295 return scc_p ? base_bit + 3 : base_bit + 1;
7296
7297 default:
7298 abort ();
7299 }
7300 }
7301
7302 /* Return the GOT register. */
7303
7304 struct rtx_def *
rs6000_got_register(value)7305 rs6000_got_register (value)
7306 rtx value ATTRIBUTE_UNUSED;
7307 {
7308 /* The second flow pass currently (June 1999) can't update
7309 regs_ever_live without disturbing other parts of the compiler, so
7310 update it here to make the prolog/epilogue code happy. */
7311 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7312 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7313
7314 current_function_uses_pic_offset_table = 1;
7315
7316 return pic_offset_table_rtx;
7317 }
7318
7319 /* Function to init struct machine_function.
7320 This will be called, via a pointer variable,
7321 from push_function_context. */
7322
7323 static struct machine_function *
rs6000_init_machine_status()7324 rs6000_init_machine_status ()
7325 {
7326 return ggc_alloc_cleared (sizeof (machine_function));
7327 }
7328
7329 /* These macros test for integers and extract the low-order bits. */
7330 #define INT_P(X) \
7331 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7332 && GET_MODE (X) == VOIDmode)
7333
7334 #define INT_LOWPART(X) \
7335 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7336
7337 int
extract_MB(op)7338 extract_MB (op)
7339 rtx op;
7340 {
7341 int i;
7342 unsigned long val = INT_LOWPART (op);
7343
7344 /* If the high bit is zero, the value is the first 1 bit we find
7345 from the left. */
7346 if ((val & 0x80000000) == 0)
7347 {
7348 if ((val & 0xffffffff) == 0)
7349 abort ();
7350
7351 i = 1;
7352 while (((val <<= 1) & 0x80000000) == 0)
7353 ++i;
7354 return i;
7355 }
7356
7357 /* If the high bit is set and the low bit is not, or the mask is all
7358 1's, the value is zero. */
7359 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7360 return 0;
7361
7362 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7363 from the right. */
7364 i = 31;
7365 while (((val >>= 1) & 1) != 0)
7366 --i;
7367
7368 return i;
7369 }
7370
7371 int
extract_ME(op)7372 extract_ME (op)
7373 rtx op;
7374 {
7375 int i;
7376 unsigned long val = INT_LOWPART (op);
7377
7378 /* If the low bit is zero, the value is the first 1 bit we find from
7379 the right. */
7380 if ((val & 1) == 0)
7381 {
7382 if ((val & 0xffffffff) == 0)
7383 abort ();
7384
7385 i = 30;
7386 while (((val >>= 1) & 1) == 0)
7387 --i;
7388
7389 return i;
7390 }
7391
7392 /* If the low bit is set and the high bit is not, or the mask is all
7393 1's, the value is 31. */
7394 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7395 return 31;
7396
7397 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7398 from the left. */
7399 i = 0;
7400 while (((val <<= 1) & 0x80000000) != 0)
7401 ++i;
7402
7403 return i;
7404 }
7405
7406 /* Print an operand. Recognize special options, documented below. */
7407
7408 #if TARGET_ELF
7409 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7410 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7411 #else
7412 #define SMALL_DATA_RELOC "sda21"
7413 #define SMALL_DATA_REG 0
7414 #endif
7415
7416 void
print_operand(file,x,code)7417 print_operand (file, x, code)
7418 FILE *file;
7419 rtx x;
7420 int code;
7421 {
7422 int i;
7423 HOST_WIDE_INT val;
7424 unsigned HOST_WIDE_INT uval;
7425
7426 switch (code)
7427 {
7428 case '.':
7429 /* Write out an instruction after the call which may be replaced
7430 with glue code by the loader. This depends on the AIX version. */
7431 asm_fprintf (file, RS6000_CALL_GLUE);
7432 return;
7433
7434 /* %a is output_address. */
7435
7436 case 'A':
7437 /* If X is a constant integer whose low-order 5 bits are zero,
7438 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7439 in the AIX assembler where "sri" with a zero shift count
7440 writes a trash instruction. */
7441 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7442 putc ('l', file);
7443 else
7444 putc ('r', file);
7445 return;
7446
7447 case 'b':
7448 /* If constant, low-order 16 bits of constant, unsigned.
7449 Otherwise, write normally. */
7450 if (INT_P (x))
7451 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7452 else
7453 print_operand (file, x, 0);
7454 return;
7455
7456 case 'B':
7457 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7458 for 64-bit mask direction. */
7459 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7460 return;
7461
7462 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7463 output_operand. */
7464
7465 case 'D':
7466 /* There used to be a comment for 'C' reading "This is an
7467 optional cror needed for certain floating-point
7468 comparisons. Otherwise write nothing." */
7469
7470 /* Similar, except that this is for an scc, so we must be able to
7471 encode the test in a single bit that is one. We do the above
7472 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7473 if (GET_CODE (x) == LE || GET_CODE (x) == GE
7474 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
7475 {
7476 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7477
7478 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
7479 base_bit + 2,
7480 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
7481 }
7482
7483 else if (GET_CODE (x) == NE)
7484 {
7485 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7486
7487 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
7488 base_bit + 2, base_bit + 2);
7489 }
7490 else if (TARGET_SPE && TARGET_HARD_FLOAT
7491 && GET_CODE (x) == EQ
7492 && GET_MODE (XEXP (x, 0)) == CCFPmode)
7493 {
7494 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7495
7496 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 1,
7497 base_bit + 1, base_bit + 1);
7498 }
7499 return;
7500
7501 case 'E':
7502 /* X is a CR register. Print the number of the EQ bit of the CR */
7503 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7504 output_operand_lossage ("invalid %%E value");
7505 else
7506 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7507 return;
7508
7509 case 'f':
7510 /* X is a CR register. Print the shift count needed to move it
7511 to the high-order four bits. */
7512 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7513 output_operand_lossage ("invalid %%f value");
7514 else
7515 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7516 return;
7517
7518 case 'F':
7519 /* Similar, but print the count for the rotate in the opposite
7520 direction. */
7521 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7522 output_operand_lossage ("invalid %%F value");
7523 else
7524 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7525 return;
7526
7527 case 'G':
7528 /* X is a constant integer. If it is negative, print "m",
7529 otherwise print "z". This is to make an aze or ame insn. */
7530 if (GET_CODE (x) != CONST_INT)
7531 output_operand_lossage ("invalid %%G value");
7532 else if (INTVAL (x) >= 0)
7533 putc ('z', file);
7534 else
7535 putc ('m', file);
7536 return;
7537
7538 case 'h':
7539 /* If constant, output low-order five bits. Otherwise, write
7540 normally. */
7541 if (INT_P (x))
7542 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7543 else
7544 print_operand (file, x, 0);
7545 return;
7546
7547 case 'H':
7548 /* If constant, output low-order six bits. Otherwise, write
7549 normally. */
7550 if (INT_P (x))
7551 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7552 else
7553 print_operand (file, x, 0);
7554 return;
7555
7556 case 'I':
7557 /* Print `i' if this is a constant, else nothing. */
7558 if (INT_P (x))
7559 putc ('i', file);
7560 return;
7561
7562 case 'j':
7563 /* Write the bit number in CCR for jump. */
7564 i = ccr_bit (x, 0);
7565 if (i == -1)
7566 output_operand_lossage ("invalid %%j code");
7567 else
7568 fprintf (file, "%d", i);
7569 return;
7570
7571 case 'J':
7572 /* Similar, but add one for shift count in rlinm for scc and pass
7573 scc flag to `ccr_bit'. */
7574 i = ccr_bit (x, 1);
7575 if (i == -1)
7576 output_operand_lossage ("invalid %%J code");
7577 else
7578 /* If we want bit 31, write a shift count of zero, not 32. */
7579 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7580 return;
7581
7582 case 'k':
7583 /* X must be a constant. Write the 1's complement of the
7584 constant. */
7585 if (! INT_P (x))
7586 output_operand_lossage ("invalid %%k value");
7587 else
7588 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7589 return;
7590
7591 case 'K':
7592 /* X must be a symbolic constant on ELF. Write an
7593 expression suitable for an 'addi' that adds in the low 16
7594 bits of the MEM. */
7595 if (GET_CODE (x) != CONST)
7596 {
7597 print_operand_address (file, x);
7598 fputs ("@l", file);
7599 }
7600 else
7601 {
7602 if (GET_CODE (XEXP (x, 0)) != PLUS
7603 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7604 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7605 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7606 output_operand_lossage ("invalid %%K value");
7607 print_operand_address (file, XEXP (XEXP (x, 0), 0));
7608 fputs ("@l", file);
7609 /* For GNU as, there must be a non-alphanumeric character
7610 between 'l' and the number. The '-' is added by
7611 print_operand() already. */
7612 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7613 fputs ("+", file);
7614 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
7615 }
7616 return;
7617
7618 /* %l is output_asm_label. */
7619
7620 case 'L':
7621 /* Write second word of DImode or DFmode reference. Works on register
7622 or non-indexed memory only. */
7623 if (GET_CODE (x) == REG)
7624 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
7625 else if (GET_CODE (x) == MEM)
7626 {
7627 /* Handle possible auto-increment. Since it is pre-increment and
7628 we have already done it, we can just use an offset of word. */
7629 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7630 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7631 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
7632 UNITS_PER_WORD));
7633 else
7634 output_address (XEXP (adjust_address_nv (x, SImode,
7635 UNITS_PER_WORD),
7636 0));
7637
7638 if (small_data_operand (x, GET_MODE (x)))
7639 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7640 reg_names[SMALL_DATA_REG]);
7641 }
7642 return;
7643
7644 case 'm':
7645 /* MB value for a mask operand. */
7646 if (! mask_operand (x, SImode))
7647 output_operand_lossage ("invalid %%m value");
7648
7649 fprintf (file, "%d", extract_MB (x));
7650 return;
7651
7652 case 'M':
7653 /* ME value for a mask operand. */
7654 if (! mask_operand (x, SImode))
7655 output_operand_lossage ("invalid %%M value");
7656
7657 fprintf (file, "%d", extract_ME (x));
7658 return;
7659
7660 /* %n outputs the negative of its operand. */
7661
7662 case 'N':
7663 /* Write the number of elements in the vector times 4. */
7664 if (GET_CODE (x) != PARALLEL)
7665 output_operand_lossage ("invalid %%N value");
7666 else
7667 fprintf (file, "%d", XVECLEN (x, 0) * 4);
7668 return;
7669
7670 case 'O':
7671 /* Similar, but subtract 1 first. */
7672 if (GET_CODE (x) != PARALLEL)
7673 output_operand_lossage ("invalid %%O value");
7674 else
7675 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
7676 return;
7677
7678 case 'p':
7679 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7680 if (! INT_P (x)
7681 || INT_LOWPART (x) < 0
7682 || (i = exact_log2 (INT_LOWPART (x))) < 0)
7683 output_operand_lossage ("invalid %%p value");
7684 else
7685 fprintf (file, "%d", i);
7686 return;
7687
7688 case 'P':
7689 /* The operand must be an indirect memory reference. The result
7690 is the register number. */
7691 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
7692 || REGNO (XEXP (x, 0)) >= 32)
7693 output_operand_lossage ("invalid %%P value");
7694 else
7695 fprintf (file, "%d", REGNO (XEXP (x, 0)));
7696 return;
7697
7698 case 'q':
7699 /* This outputs the logical code corresponding to a boolean
7700 expression. The expression may have one or both operands
7701 negated (if one, only the first one). For condition register
7702 logical operations, it will also treat the negated
7703 CR codes as NOTs, but not handle NOTs of them. */
7704 {
7705 const char *const *t = 0;
7706 const char *s;
7707 enum rtx_code code = GET_CODE (x);
7708 static const char * const tbl[3][3] = {
7709 { "and", "andc", "nor" },
7710 { "or", "orc", "nand" },
7711 { "xor", "eqv", "xor" } };
7712
7713 if (code == AND)
7714 t = tbl[0];
7715 else if (code == IOR)
7716 t = tbl[1];
7717 else if (code == XOR)
7718 t = tbl[2];
7719 else
7720 output_operand_lossage ("invalid %%q value");
7721
7722 if (GET_CODE (XEXP (x, 0)) != NOT)
7723 s = t[0];
7724 else
7725 {
7726 if (GET_CODE (XEXP (x, 1)) == NOT)
7727 s = t[2];
7728 else
7729 s = t[1];
7730 }
7731
7732 fputs (s, file);
7733 }
7734 return;
7735
7736 case 'R':
7737 /* X is a CR register. Print the mask for `mtcrf'. */
7738 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7739 output_operand_lossage ("invalid %%R value");
7740 else
7741 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
7742 return;
7743
7744 case 's':
7745 /* Low 5 bits of 32 - value */
7746 if (! INT_P (x))
7747 output_operand_lossage ("invalid %%s value");
7748 else
7749 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
7750 return;
7751
7752 case 'S':
7753 /* PowerPC64 mask position. All 0's is excluded.
7754 CONST_INT 32-bit mask is considered sign-extended so any
7755 transition must occur within the CONST_INT, not on the boundary. */
7756 if (! mask64_operand (x, DImode))
7757 output_operand_lossage ("invalid %%S value");
7758
7759 uval = INT_LOWPART (x);
7760
7761 if (uval & 1) /* Clear Left */
7762 {
7763 uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
7764 i = 64;
7765 }
7766 else /* Clear Right */
7767 {
7768 uval = ~uval;
7769 uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
7770 i = 63;
7771 }
7772 while (uval != 0)
7773 --i, uval >>= 1;
7774 if (i < 0)
7775 abort ();
7776 fprintf (file, "%d", i);
7777 return;
7778
7779 case 't':
7780 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7781 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
7782 abort ();
7783
7784 /* Bit 3 is OV bit. */
7785 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
7786
7787 /* If we want bit 31, write a shift count of zero, not 32. */
7788 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7789 return;
7790
7791 case 'T':
7792 /* Print the symbolic name of a branch target register. */
7793 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
7794 && REGNO (x) != COUNT_REGISTER_REGNUM))
7795 output_operand_lossage ("invalid %%T value");
7796 else if (REGNO (x) == LINK_REGISTER_REGNUM)
7797 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
7798 else
7799 fputs ("ctr", file);
7800 return;
7801
7802 case 'u':
7803 /* High-order 16 bits of constant for use in unsigned operand. */
7804 if (! INT_P (x))
7805 output_operand_lossage ("invalid %%u value");
7806 else
7807 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7808 (INT_LOWPART (x) >> 16) & 0xffff);
7809 return;
7810
7811 case 'v':
7812 /* High-order 16 bits of constant for use in signed operand. */
7813 if (! INT_P (x))
7814 output_operand_lossage ("invalid %%v value");
7815 else
7816 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7817 (INT_LOWPART (x) >> 16) & 0xffff);
7818 return;
7819
7820 case 'U':
7821 /* Print `u' if this has an auto-increment or auto-decrement. */
7822 if (GET_CODE (x) == MEM
7823 && (GET_CODE (XEXP (x, 0)) == PRE_INC
7824 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
7825 putc ('u', file);
7826 return;
7827
7828 case 'V':
7829 /* Print the trap code for this operand. */
7830 switch (GET_CODE (x))
7831 {
7832 case EQ:
7833 fputs ("eq", file); /* 4 */
7834 break;
7835 case NE:
7836 fputs ("ne", file); /* 24 */
7837 break;
7838 case LT:
7839 fputs ("lt", file); /* 16 */
7840 break;
7841 case LE:
7842 fputs ("le", file); /* 20 */
7843 break;
7844 case GT:
7845 fputs ("gt", file); /* 8 */
7846 break;
7847 case GE:
7848 fputs ("ge", file); /* 12 */
7849 break;
7850 case LTU:
7851 fputs ("llt", file); /* 2 */
7852 break;
7853 case LEU:
7854 fputs ("lle", file); /* 6 */
7855 break;
7856 case GTU:
7857 fputs ("lgt", file); /* 1 */
7858 break;
7859 case GEU:
7860 fputs ("lge", file); /* 5 */
7861 break;
7862 default:
7863 abort ();
7864 }
7865 break;
7866
7867 case 'w':
7868 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7869 normally. */
7870 if (INT_P (x))
7871 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7872 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
7873 else
7874 print_operand (file, x, 0);
7875 return;
7876
7877 case 'W':
7878 /* MB value for a PowerPC64 rldic operand. */
7879 val = (GET_CODE (x) == CONST_INT
7880 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
7881
7882 if (val < 0)
7883 i = -1;
7884 else
7885 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
7886 if ((val <<= 1) < 0)
7887 break;
7888
7889 #if HOST_BITS_PER_WIDE_INT == 32
7890 if (GET_CODE (x) == CONST_INT && i >= 0)
7891 i += 32; /* zero-extend high-part was all 0's */
7892 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
7893 {
7894 val = CONST_DOUBLE_LOW (x);
7895
7896 if (val == 0)
7897 abort ();
7898 else if (val < 0)
7899 --i;
7900 else
7901 for ( ; i < 64; i++)
7902 if ((val <<= 1) < 0)
7903 break;
7904 }
7905 #endif
7906
7907 fprintf (file, "%d", i + 1);
7908 return;
7909
7910 case 'X':
7911 if (GET_CODE (x) == MEM
7912 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
7913 putc ('x', file);
7914 return;
7915
7916 case 'Y':
7917 /* Like 'L', for third word of TImode */
7918 if (GET_CODE (x) == REG)
7919 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
7920 else if (GET_CODE (x) == MEM)
7921 {
7922 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7923 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7924 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
7925 else
7926 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
7927 if (small_data_operand (x, GET_MODE (x)))
7928 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7929 reg_names[SMALL_DATA_REG]);
7930 }
7931 return;
7932
7933 case 'z':
7934 /* X is a SYMBOL_REF. Write out the name preceded by a
7935 period and without any trailing data in brackets. Used for function
7936 names. If we are configured for System V (or the embedded ABI) on
7937 the PowerPC, do not emit the period, since those systems do not use
7938 TOCs and the like. */
7939 if (GET_CODE (x) != SYMBOL_REF)
7940 abort ();
7941
7942 if (XSTR (x, 0)[0] != '.')
7943 {
7944 switch (DEFAULT_ABI)
7945 {
7946 default:
7947 abort ();
7948
7949 case ABI_AIX:
7950 putc ('.', file);
7951 break;
7952
7953 case ABI_V4:
7954 case ABI_AIX_NODESC:
7955 case ABI_DARWIN:
7956 break;
7957 }
7958 }
7959 #if TARGET_AIX
7960 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
7961 #else
7962 assemble_name (file, XSTR (x, 0));
7963 #endif
7964 return;
7965
7966 case 'Z':
7967 /* Like 'L', for last word of TImode. */
7968 if (GET_CODE (x) == REG)
7969 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
7970 else if (GET_CODE (x) == MEM)
7971 {
7972 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7973 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7974 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
7975 else
7976 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
7977 if (small_data_operand (x, GET_MODE (x)))
7978 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7979 reg_names[SMALL_DATA_REG]);
7980 }
7981 return;
7982
7983 /* Print AltiVec or SPE memory operand. */
7984 case 'y':
7985 {
7986 rtx tmp;
7987
7988 if (GET_CODE (x) != MEM)
7989 abort ();
7990
7991 tmp = XEXP (x, 0);
7992
7993 if (TARGET_SPE)
7994 {
7995 /* Handle [reg]. */
7996 if (GET_CODE (tmp) == REG)
7997 {
7998 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
7999 break;
8000 }
8001 /* Handle [reg+UIMM]. */
8002 else if (GET_CODE (tmp) == PLUS &&
8003 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8004 {
8005 int x;
8006
8007 if (GET_CODE (XEXP (tmp, 0)) != REG)
8008 abort ();
8009
8010 x = INTVAL (XEXP (tmp, 1));
8011 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8012 break;
8013 }
8014
8015 /* Fall through. Must be [reg+reg]. */
8016 }
8017 if (GET_CODE (tmp) == REG)
8018 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8019 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8020 {
8021 if (REGNO (XEXP (tmp, 0)) == 0)
8022 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8023 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8024 else
8025 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8026 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8027 }
8028 else
8029 abort ();
8030 break;
8031 }
8032
8033 case 0:
8034 if (GET_CODE (x) == REG)
8035 fprintf (file, "%s", reg_names[REGNO (x)]);
8036 else if (GET_CODE (x) == MEM)
8037 {
8038 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8039 know the width from the mode. */
8040 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8041 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8042 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8043 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8044 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8045 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8046 else
8047 output_address (XEXP (x, 0));
8048 }
8049 else
8050 output_addr_const (file, x);
8051 return;
8052
8053 default:
8054 output_operand_lossage ("invalid %%xn code");
8055 }
8056 }
8057
8058 /* Print the address of an operand. */
8059
8060 void
print_operand_address(file,x)8061 print_operand_address (file, x)
8062 FILE *file;
8063 rtx x;
8064 {
8065 if (GET_CODE (x) == REG)
8066 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8067 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8068 || GET_CODE (x) == LABEL_REF)
8069 {
8070 output_addr_const (file, x);
8071 if (small_data_operand (x, GET_MODE (x)))
8072 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8073 reg_names[SMALL_DATA_REG]);
8074 else if (TARGET_TOC)
8075 abort ();
8076 }
8077 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8078 {
8079 if (REGNO (XEXP (x, 0)) == 0)
8080 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8081 reg_names[ REGNO (XEXP (x, 0)) ]);
8082 else
8083 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8084 reg_names[ REGNO (XEXP (x, 1)) ]);
8085 }
8086 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8087 {
8088 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
8089 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8090 }
8091 #if TARGET_ELF
8092 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8093 && CONSTANT_P (XEXP (x, 1)))
8094 {
8095 output_addr_const (file, XEXP (x, 1));
8096 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8097 }
8098 #endif
8099 #if TARGET_MACHO
8100 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8101 && CONSTANT_P (XEXP (x, 1)))
8102 {
8103 fprintf (file, "lo16(");
8104 output_addr_const (file, XEXP (x, 1));
8105 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8106 }
8107 #endif
8108 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
8109 {
8110 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8111 {
8112 rtx contains_minus = XEXP (x, 1);
8113 rtx minus, symref;
8114 const char *name;
8115
8116 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8117 turn it into (sym) for output_addr_const. */
8118 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8119 contains_minus = XEXP (contains_minus, 0);
8120
8121 minus = XEXP (contains_minus, 0);
8122 symref = XEXP (minus, 0);
8123 XEXP (contains_minus, 0) = symref;
8124 if (TARGET_ELF)
8125 {
8126 char *newname;
8127
8128 name = XSTR (symref, 0);
8129 newname = alloca (strlen (name) + sizeof ("@toc"));
8130 strcpy (newname, name);
8131 strcat (newname, "@toc");
8132 XSTR (symref, 0) = newname;
8133 }
8134 output_addr_const (file, XEXP (x, 1));
8135 if (TARGET_ELF)
8136 XSTR (symref, 0) = name;
8137 XEXP (contains_minus, 0) = minus;
8138 }
8139 else
8140 output_addr_const (file, XEXP (x, 1));
8141
8142 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8143 }
8144 else
8145 abort ();
8146 }
8147
8148 /* Target hook for assembling integer objects. The PowerPC version has
8149 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8150 is defined. It also needs to handle DI-mode objects on 64-bit
8151 targets. */
8152
8153 static bool
rs6000_assemble_integer(x,size,aligned_p)8154 rs6000_assemble_integer (x, size, aligned_p)
8155 rtx x;
8156 unsigned int size;
8157 int aligned_p;
8158 {
8159 #ifdef RELOCATABLE_NEEDS_FIXUP
8160 /* Special handling for SI values. */
8161 if (size == 4 && aligned_p)
8162 {
8163 extern int in_toc_section PARAMS ((void));
8164 static int recurse = 0;
8165
8166 /* For -mrelocatable, we mark all addresses that need to be fixed up
8167 in the .fixup section. */
8168 if (TARGET_RELOCATABLE
8169 && !in_toc_section ()
8170 && !in_text_section ()
8171 && !recurse
8172 && GET_CODE (x) != CONST_INT
8173 && GET_CODE (x) != CONST_DOUBLE
8174 && CONSTANT_P (x))
8175 {
8176 char buf[256];
8177
8178 recurse = 1;
8179 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8180 fixuplabelno++;
8181 ASM_OUTPUT_LABEL (asm_out_file, buf);
8182 fprintf (asm_out_file, "\t.long\t(");
8183 output_addr_const (asm_out_file, x);
8184 fprintf (asm_out_file, ")@fixup\n");
8185 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8186 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8187 fprintf (asm_out_file, "\t.long\t");
8188 assemble_name (asm_out_file, buf);
8189 fprintf (asm_out_file, "\n\t.previous\n");
8190 recurse = 0;
8191 return true;
8192 }
8193 /* Remove initial .'s to turn a -mcall-aixdesc function
8194 address into the address of the descriptor, not the function
8195 itself. */
8196 else if (GET_CODE (x) == SYMBOL_REF
8197 && XSTR (x, 0)[0] == '.'
8198 && DEFAULT_ABI == ABI_AIX)
8199 {
8200 const char *name = XSTR (x, 0);
8201 while (*name == '.')
8202 name++;
8203
8204 fprintf (asm_out_file, "\t.long\t%s\n", name);
8205 return true;
8206 }
8207 }
8208 #endif /* RELOCATABLE_NEEDS_FIXUP */
8209 return default_assemble_integer (x, size, aligned_p);
8210 }
8211
8212 #ifdef HAVE_GAS_HIDDEN
8213 /* Emit an assembler directive to set symbol visibility for DECL to
8214 VISIBILITY_TYPE. */
8215
8216 static void
rs6000_assemble_visibility(decl,vis)8217 rs6000_assemble_visibility (decl, vis)
8218 tree decl;
8219 int vis;
8220 {
8221 /* Functions need to have their entry point symbol visibility set as
8222 well as their descriptor symbol visibility. */
8223 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8224 {
8225 static const char * const visibility_types[] = {
8226 NULL, "internal", "hidden", "protected"
8227 };
8228
8229 const char *name, *type;
8230
8231 name = ((* targetm.strip_name_encoding)
8232 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8233 type = visibility_types[vis];
8234
8235 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8236 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8237 }
8238 else
8239 default_assemble_visibility (decl, vis);
8240 }
8241 #endif
8242
8243 enum rtx_code
rs6000_reverse_condition(mode,code)8244 rs6000_reverse_condition (mode, code)
8245 enum machine_mode mode;
8246 enum rtx_code code;
8247 {
8248 /* Reversal of FP compares takes care -- an ordered compare
8249 becomes an unordered compare and vice versa. */
8250 if (mode == CCFPmode && !flag_unsafe_math_optimizations)
8251 return reverse_condition_maybe_unordered (code);
8252 else
8253 return reverse_condition (code);
8254 }
8255
8256 /* Generate a compare for CODE. Return a brand-new rtx that
8257 represents the result of the compare. */
8258
8259 static rtx
rs6000_generate_compare(code)8260 rs6000_generate_compare (code)
8261 enum rtx_code code;
8262 {
8263 enum machine_mode comp_mode;
8264 rtx compare_result;
8265
8266 if (rs6000_compare_fp_p)
8267 comp_mode = CCFPmode;
8268 else if (code == GTU || code == LTU
8269 || code == GEU || code == LEU)
8270 comp_mode = CCUNSmode;
8271 else
8272 comp_mode = CCmode;
8273
8274 /* First, the compare. */
8275 compare_result = gen_reg_rtx (comp_mode);
8276
8277 /* SPE FP compare instructions on the GPRs. Yuck! */
8278 if ((TARGET_SPE && TARGET_HARD_FLOAT) && rs6000_compare_fp_p)
8279 {
8280 rtx cmp, or1, or2, or_result, compare_result2;
8281
8282 switch (code)
8283 {
8284 case EQ:
8285 case UNEQ:
8286 case NE:
8287 case LTGT:
8288 cmp = flag_unsafe_math_optimizations
8289 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8290 rs6000_compare_op1)
8291 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8292 rs6000_compare_op1);
8293 break;
8294 case GT:
8295 case GTU:
8296 case UNGT:
8297 case UNGE:
8298 case GE:
8299 case GEU:
8300 cmp = flag_unsafe_math_optimizations
8301 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8302 rs6000_compare_op1)
8303 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8304 rs6000_compare_op1);
8305 break;
8306 case LT:
8307 case LTU:
8308 case UNLT:
8309 case UNLE:
8310 case LE:
8311 case LEU:
8312 cmp = flag_unsafe_math_optimizations
8313 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8314 rs6000_compare_op1)
8315 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8316 rs6000_compare_op1);
8317 break;
8318 default:
8319 abort ();
8320 }
8321
8322 /* Synthesize LE and GE from LT/GT || EQ. */
8323 if (code == LE || code == GE || code == LEU || code == GEU)
8324 {
8325 /* Synthesize GE/LE frome GT/LT || EQ. */
8326
8327 emit_insn (cmp);
8328
8329 switch (code)
8330 {
8331 case LE: code = LT; break;
8332 case GE: code = GT; break;
8333 case LEU: code = LT; break;
8334 case GEU: code = GT; break;
8335 default: abort ();
8336 }
8337
8338 or1 = gen_reg_rtx (SImode);
8339 or2 = gen_reg_rtx (SImode);
8340 or_result = gen_reg_rtx (CCEQmode);
8341 compare_result2 = gen_reg_rtx (CCFPmode);
8342
8343 /* Do the EQ. */
8344 cmp = flag_unsafe_math_optimizations
8345 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8346 rs6000_compare_op1)
8347 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8348 rs6000_compare_op1);
8349 emit_insn (cmp);
8350
8351 /* The MC8540 FP compare instructions set the CR bits
8352 differently than other PPC compare instructions. For
8353 that matter, there is no generic test instruction, but a
8354 testgt, testlt, and testeq. For a true condition, bit 2
8355 is set (x1xx) in the CR. Following the traditional CR
8356 values:
8357
8358 LT GT EQ OV
8359 bit3 bit2 bit1 bit0
8360
8361 ... bit 2 would be a GT CR alias, so later on we
8362 look in the GT bits for the branch instructins.
8363 However, we must be careful to emit correct RTL in
8364 the meantime, so optimizations don't get confused. */
8365
8366 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8367 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8368
8369 /* OR them together. */
8370 cmp = gen_rtx_SET (VOIDmode, or_result,
8371 gen_rtx_COMPARE (CCEQmode,
8372 gen_rtx_IOR (SImode, or1, or2),
8373 const_true_rtx));
8374 compare_result = or_result;
8375 code = EQ;
8376 }
8377 else
8378 {
8379 /* We only care about 1 bit (x1xx), so map everything to NE to
8380 maintain rtl sanity. We'll get to the right bit (x1xx) at
8381 code output time. */
8382 if (code == NE || code == LTGT)
8383 /* Do the inverse here because we have no cmpne
8384 instruction. We use the cmpeq instruction and expect
8385 to get a 0 instead. */
8386 code = EQ;
8387 else
8388 code = NE;
8389 }
8390
8391 emit_insn (cmp);
8392 }
8393 else
8394 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8395 gen_rtx_COMPARE (comp_mode,
8396 rs6000_compare_op0,
8397 rs6000_compare_op1)));
8398
8399 /* Some kinds of FP comparisons need an OR operation;
8400 except for flag_unsafe_math_optimizations we don't bother. */
8401 if (rs6000_compare_fp_p
8402 && ! flag_unsafe_math_optimizations
8403 && ! (TARGET_HARD_FLOAT && TARGET_SPE)
8404 && (code == LE || code == GE
8405 || code == UNEQ || code == LTGT
8406 || code == UNGT || code == UNLT))
8407 {
8408 enum rtx_code or1, or2;
8409 rtx or1_rtx, or2_rtx, compare2_rtx;
8410 rtx or_result = gen_reg_rtx (CCEQmode);
8411
8412 switch (code)
8413 {
8414 case LE: or1 = LT; or2 = EQ; break;
8415 case GE: or1 = GT; or2 = EQ; break;
8416 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
8417 case LTGT: or1 = LT; or2 = GT; break;
8418 case UNGT: or1 = UNORDERED; or2 = GT; break;
8419 case UNLT: or1 = UNORDERED; or2 = LT; break;
8420 default: abort ();
8421 }
8422 validate_condition_mode (or1, comp_mode);
8423 validate_condition_mode (or2, comp_mode);
8424 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8425 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8426 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8427 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8428 const_true_rtx);
8429 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8430
8431 compare_result = or_result;
8432 code = EQ;
8433 }
8434
8435 validate_condition_mode (code, GET_MODE (compare_result));
8436
8437 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8438 }
8439
8440
8441 /* Emit the RTL for an sCOND pattern. */
8442
8443 void
rs6000_emit_sCOND(code,result)8444 rs6000_emit_sCOND (code, result)
8445 enum rtx_code code;
8446 rtx result;
8447 {
8448 rtx condition_rtx;
8449 enum machine_mode op_mode;
8450
8451 condition_rtx = rs6000_generate_compare (code);
8452
8453 op_mode = GET_MODE (rs6000_compare_op0);
8454 if (op_mode == VOIDmode)
8455 op_mode = GET_MODE (rs6000_compare_op1);
8456
8457 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8458 {
8459 PUT_MODE (condition_rtx, DImode);
8460 convert_move (result, condition_rtx, 0);
8461 }
8462 else
8463 {
8464 PUT_MODE (condition_rtx, SImode);
8465 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8466 }
8467 }
8468
8469 /* Emit a branch of kind CODE to location LOC. */
8470
8471 void
rs6000_emit_cbranch(code,loc)8472 rs6000_emit_cbranch (code, loc)
8473 enum rtx_code code;
8474 rtx loc;
8475 {
8476 rtx condition_rtx, loc_ref;
8477
8478 condition_rtx = rs6000_generate_compare (code);
8479 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8480 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8481 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8482 loc_ref, pc_rtx)));
8483 }
8484
8485 /* Return the string to output a conditional branch to LABEL, which is
8486 the operand number of the label, or -1 if the branch is really a
8487 conditional return.
8488
8489 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8490 condition code register and its mode specifies what kind of
8491 comparison we made.
8492
8493 REVERSED is nonzero if we should reverse the sense of the comparison.
8494
8495 INSN is the insn. */
8496
8497 char *
output_cbranch(op,label,reversed,insn)8498 output_cbranch (op, label, reversed, insn)
8499 rtx op;
8500 const char * label;
8501 int reversed;
8502 rtx insn;
8503 {
8504 static char string[64];
8505 enum rtx_code code = GET_CODE (op);
8506 rtx cc_reg = XEXP (op, 0);
8507 enum machine_mode mode = GET_MODE (cc_reg);
8508 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8509 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8510 int really_reversed = reversed ^ need_longbranch;
8511 char *s = string;
8512 const char *ccode;
8513 const char *pred;
8514 rtx note;
8515
8516 validate_condition_mode (code, mode);
8517
8518 /* Work out which way this really branches. We could use
8519 reverse_condition_maybe_unordered here always but this
8520 makes the resulting assembler clearer. */
8521 if (really_reversed)
8522 {
8523 /* Reversal of FP compares takes care -- an ordered compare
8524 becomes an unordered compare and vice versa. */
8525 if (mode == CCFPmode)
8526 code = reverse_condition_maybe_unordered (code);
8527 else
8528 code = reverse_condition (code);
8529 }
8530
8531 if ((TARGET_SPE && TARGET_HARD_FLOAT) && mode == CCFPmode)
8532 {
8533 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8534 to the GT bit. */
8535 if (code == EQ)
8536 /* Opposite of GT. */
8537 code = UNLE;
8538 else if (code == NE)
8539 code = GT;
8540 else
8541 abort ();
8542 }
8543
8544 switch (code)
8545 {
8546 /* Not all of these are actually distinct opcodes, but
8547 we distinguish them for clarity of the resulting assembler. */
8548 case NE: case LTGT:
8549 ccode = "ne"; break;
8550 case EQ: case UNEQ:
8551 ccode = "eq"; break;
8552 case GE: case GEU:
8553 ccode = "ge"; break;
8554 case GT: case GTU: case UNGT:
8555 ccode = "gt"; break;
8556 case LE: case LEU:
8557 ccode = "le"; break;
8558 case LT: case LTU: case UNLT:
8559 ccode = "lt"; break;
8560 case UNORDERED: ccode = "un"; break;
8561 case ORDERED: ccode = "nu"; break;
8562 case UNGE: ccode = "nl"; break;
8563 case UNLE: ccode = "ng"; break;
8564 default:
8565 abort ();
8566 }
8567
8568 /* Maybe we have a guess as to how likely the branch is.
8569 The old mnemonics don't have a way to specify this information. */
8570 pred = "";
8571 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8572 if (note != NULL_RTX)
8573 {
8574 /* PROB is the difference from 50%. */
8575 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8576 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
8577
8578 /* Only hint for highly probable/improbable branches on newer
8579 cpus as static prediction overrides processor dynamic
8580 prediction. For older cpus we may as well always hint, but
8581 assume not taken for branches that are very close to 50% as a
8582 mispredicted taken branch is more expensive than a
8583 mispredicted not-taken branch. */
8584 if (always_hint
8585 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
8586 {
8587 if (abs (prob) > REG_BR_PROB_BASE / 20
8588 && ((prob > 0) ^ need_longbranch))
8589 pred = "+";
8590 else
8591 pred = "-";
8592 }
8593 }
8594
8595 if (label == NULL)
8596 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
8597 else
8598 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
8599
8600 /* We need to escape any '%' characters in the reg_names string.
8601 Assume they'd only be the first character... */
8602 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
8603 *s++ = '%';
8604 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
8605
8606 if (label != NULL)
8607 {
8608 /* If the branch distance was too far, we may have to use an
8609 unconditional branch to go the distance. */
8610 if (need_longbranch)
8611 s += sprintf (s, ",$+8\n\tb %s", label);
8612 else
8613 s += sprintf (s, ",%s", label);
8614 }
8615
8616 return string;
8617 }
8618
8619 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8620 operands of the last comparison is nonzero/true, FALSE_COND if it
8621 is zero/false. Return 0 if the hardware has no such operation. */
8622
8623 int
rs6000_emit_cmove(dest,op,true_cond,false_cond)8624 rs6000_emit_cmove (dest, op, true_cond, false_cond)
8625 rtx dest;
8626 rtx op;
8627 rtx true_cond;
8628 rtx false_cond;
8629 {
8630 enum rtx_code code = GET_CODE (op);
8631 rtx op0 = rs6000_compare_op0;
8632 rtx op1 = rs6000_compare_op1;
8633 REAL_VALUE_TYPE c1;
8634 enum machine_mode compare_mode = GET_MODE (op0);
8635 enum machine_mode result_mode = GET_MODE (dest);
8636 rtx temp;
8637
8638 /* These modes should always match. */
8639 if (GET_MODE (op1) != compare_mode
8640 /* In the isel case however, we can use a compare immediate, so
8641 op1 may be a small constant. */
8642 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
8643 return 0;
8644 if (GET_MODE (true_cond) != result_mode)
8645 return 0;
8646 if (GET_MODE (false_cond) != result_mode)
8647 return 0;
8648
8649 /* First, work out if the hardware can do this at all, or
8650 if it's too slow... */
8651 if (! rs6000_compare_fp_p)
8652 {
8653 if (TARGET_ISEL)
8654 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
8655 return 0;
8656 }
8657
8658 /* Eliminate half of the comparisons by switching operands, this
8659 makes the remaining code simpler. */
8660 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
8661 || code == LTGT || code == LT)
8662 {
8663 code = reverse_condition_maybe_unordered (code);
8664 temp = true_cond;
8665 true_cond = false_cond;
8666 false_cond = temp;
8667 }
8668
8669 /* UNEQ and LTGT take four instructions for a comparison with zero,
8670 it'll probably be faster to use a branch here too. */
8671 if (code == UNEQ)
8672 return 0;
8673
8674 if (GET_CODE (op1) == CONST_DOUBLE)
8675 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
8676
8677 /* We're going to try to implement comparions by performing
8678 a subtract, then comparing against zero. Unfortunately,
8679 Inf - Inf is NaN which is not zero, and so if we don't
8680 know that the operand is finite and the comparison
8681 would treat EQ different to UNORDERED, we can't do it. */
8682 if (! flag_unsafe_math_optimizations
8683 && code != GT && code != UNGE
8684 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
8685 /* Constructs of the form (a OP b ? a : b) are safe. */
8686 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
8687 || (! rtx_equal_p (op0, true_cond)
8688 && ! rtx_equal_p (op1, true_cond))))
8689 return 0;
8690 /* At this point we know we can use fsel. */
8691
8692 /* Reduce the comparison to a comparison against zero. */
8693 temp = gen_reg_rtx (compare_mode);
8694 emit_insn (gen_rtx_SET (VOIDmode, temp,
8695 gen_rtx_MINUS (compare_mode, op0, op1)));
8696 op0 = temp;
8697 op1 = CONST0_RTX (compare_mode);
8698
8699 /* If we don't care about NaNs we can reduce some of the comparisons
8700 down to faster ones. */
8701 if (flag_unsafe_math_optimizations)
8702 switch (code)
8703 {
8704 case GT:
8705 code = LE;
8706 temp = true_cond;
8707 true_cond = false_cond;
8708 false_cond = temp;
8709 break;
8710 case UNGE:
8711 code = GE;
8712 break;
8713 case UNEQ:
8714 code = EQ;
8715 break;
8716 default:
8717 break;
8718 }
8719
8720 /* Now, reduce everything down to a GE. */
8721 switch (code)
8722 {
8723 case GE:
8724 break;
8725
8726 case LE:
8727 temp = gen_reg_rtx (compare_mode);
8728 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8729 op0 = temp;
8730 break;
8731
8732 case ORDERED:
8733 temp = gen_reg_rtx (compare_mode);
8734 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
8735 op0 = temp;
8736 break;
8737
8738 case EQ:
8739 temp = gen_reg_rtx (compare_mode);
8740 emit_insn (gen_rtx_SET (VOIDmode, temp,
8741 gen_rtx_NEG (compare_mode,
8742 gen_rtx_ABS (compare_mode, op0))));
8743 op0 = temp;
8744 break;
8745
8746 case UNGE:
8747 temp = gen_reg_rtx (result_mode);
8748 emit_insn (gen_rtx_SET (VOIDmode, temp,
8749 gen_rtx_IF_THEN_ELSE (result_mode,
8750 gen_rtx_GE (VOIDmode,
8751 op0, op1),
8752 true_cond, false_cond)));
8753 false_cond = temp;
8754 true_cond = false_cond;
8755
8756 temp = gen_reg_rtx (compare_mode);
8757 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8758 op0 = temp;
8759 break;
8760
8761 case GT:
8762 temp = gen_reg_rtx (result_mode);
8763 emit_insn (gen_rtx_SET (VOIDmode, temp,
8764 gen_rtx_IF_THEN_ELSE (result_mode,
8765 gen_rtx_GE (VOIDmode,
8766 op0, op1),
8767 true_cond, false_cond)));
8768 true_cond = temp;
8769 false_cond = true_cond;
8770
8771 temp = gen_reg_rtx (compare_mode);
8772 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8773 op0 = temp;
8774 break;
8775
8776 default:
8777 abort ();
8778 }
8779
8780 emit_insn (gen_rtx_SET (VOIDmode, dest,
8781 gen_rtx_IF_THEN_ELSE (result_mode,
8782 gen_rtx_GE (VOIDmode,
8783 op0, op1),
8784 true_cond, false_cond)));
8785 return 1;
8786 }
8787
8788 /* Same as above, but for ints (isel). */
8789
8790 static int
rs6000_emit_int_cmove(dest,op,true_cond,false_cond)8791 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
8792 rtx dest;
8793 rtx op;
8794 rtx true_cond;
8795 rtx false_cond;
8796 {
8797 rtx condition_rtx, cr;
8798
8799 /* All isel implementations thus far are 32-bits. */
8800 if (GET_MODE (rs6000_compare_op0) != SImode)
8801 return 0;
8802
8803 /* We still have to do the compare, because isel doesn't do a
8804 compare, it just looks at the CRx bits set by a previous compare
8805 instruction. */
8806 condition_rtx = rs6000_generate_compare (GET_CODE (op));
8807 cr = XEXP (condition_rtx, 0);
8808
8809 if (GET_MODE (cr) == CCmode)
8810 emit_insn (gen_isel_signed (dest, condition_rtx,
8811 true_cond, false_cond, cr));
8812 else
8813 emit_insn (gen_isel_unsigned (dest, condition_rtx,
8814 true_cond, false_cond, cr));
8815
8816 return 1;
8817 }
8818
8819 const char *
output_isel(operands)8820 output_isel (operands)
8821 rtx *operands;
8822 {
8823 enum rtx_code code;
8824
8825 code = GET_CODE (operands[1]);
8826 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
8827 {
8828 PUT_CODE (operands[1], reverse_condition (code));
8829 return "isel %0,%3,%2,%j1";
8830 }
8831 else
8832 return "isel %0,%2,%3,%j1";
8833 }
8834
8835 void
rs6000_emit_minmax(dest,code,op0,op1)8836 rs6000_emit_minmax (dest, code, op0, op1)
8837 rtx dest;
8838 enum rtx_code code;
8839 rtx op0;
8840 rtx op1;
8841 {
8842 enum machine_mode mode = GET_MODE (op0);
8843 enum rtx_code c;
8844 rtx target;
8845
8846 if (code == SMAX || code == SMIN)
8847 c = GE;
8848 else
8849 c = GEU;
8850
8851 if (code == SMAX || code == UMAX)
8852 target = emit_conditional_move (dest, c, op0, op1, mode,
8853 op0, op1, mode, 0);
8854 else
8855 target = emit_conditional_move (dest, c, op0, op1, mode,
8856 op1, op0, mode, 0);
8857 if (target == NULL_RTX)
8858 abort ();
8859 if (target != dest)
8860 emit_move_insn (dest, target);
8861 }
8862
8863 /* This page contains routines that are used to determine what the
8864 function prologue and epilogue code will do and write them out. */
8865
8866 /* Return the first fixed-point register that is required to be
8867 saved. 32 if none. */
8868
8869 int
first_reg_to_save()8870 first_reg_to_save ()
8871 {
8872 int first_reg;
8873
8874 /* Find lowest numbered live register. */
8875 for (first_reg = 13; first_reg <= 31; first_reg++)
8876 if (regs_ever_live[first_reg]
8877 && (! call_used_regs[first_reg]
8878 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
8879 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
8880 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
8881 break;
8882
8883 #if TARGET_MACHO
8884 if (flag_pic
8885 && current_function_uses_pic_offset_table
8886 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
8887 return RS6000_PIC_OFFSET_TABLE_REGNUM;
8888 #endif
8889
8890 return first_reg;
8891 }
8892
8893 /* Similar, for FP regs. */
8894
8895 int
first_fp_reg_to_save()8896 first_fp_reg_to_save ()
8897 {
8898 int first_reg;
8899
8900 /* Find lowest numbered live register. */
8901 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
8902 if (regs_ever_live[first_reg])
8903 break;
8904
8905 return first_reg;
8906 }
8907
8908 /* Similar, for AltiVec regs. */
8909
8910 static int
first_altivec_reg_to_save()8911 first_altivec_reg_to_save ()
8912 {
8913 int i;
8914
8915 /* Stack frame remains as is unless we are in AltiVec ABI. */
8916 if (! TARGET_ALTIVEC_ABI)
8917 return LAST_ALTIVEC_REGNO + 1;
8918
8919 /* Find lowest numbered live register. */
8920 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
8921 if (regs_ever_live[i])
8922 break;
8923
8924 return i;
8925 }
8926
8927 /* Return a 32-bit mask of the AltiVec registers we need to set in
8928 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
8929 the 32-bit word is 0. */
8930
8931 static unsigned int
compute_vrsave_mask()8932 compute_vrsave_mask ()
8933 {
8934 unsigned int i, mask = 0;
8935
8936 /* First, find out if we use _any_ altivec registers. */
8937 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8938 if (regs_ever_live[i])
8939 mask |= ALTIVEC_REG_BIT (i);
8940
8941 if (mask == 0)
8942 return mask;
8943
8944 /* Next, add all registers that are call-clobbered. We do this
8945 because post-reload register optimizers such as regrename_optimize
8946 may choose to use them. They never change the register class
8947 chosen by reload, so cannot create new uses of altivec registers
8948 if there were none before, so the early exit above is safe. */
8949 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8950 altivec registers not saved in the mask, which might well make the
8951 adjustments below more effective in eliding the save/restore of
8952 VRSAVE in small functions. */
8953 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8954 if (call_used_regs[i])
8955 mask |= ALTIVEC_REG_BIT (i);
8956
8957 /* Next, remove the argument registers from the set. These must
8958 be in the VRSAVE mask set by the caller, so we don't need to add
8959 them in again. More importantly, the mask we compute here is
8960 used to generate CLOBBERs in the set_vrsave insn, and we do not
8961 wish the argument registers to die. */
8962 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
8963 mask &= ~ALTIVEC_REG_BIT (i);
8964
8965 /* Similarly, remove the return value from the set. */
8966 {
8967 bool yes = false;
8968 diddle_return_value (is_altivec_return_reg, &yes);
8969 if (yes)
8970 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
8971 }
8972
8973 return mask;
8974 }
8975
8976 static void
is_altivec_return_reg(reg,xyes)8977 is_altivec_return_reg (reg, xyes)
8978 rtx reg;
8979 void *xyes;
8980 {
8981 bool *yes = (bool *) xyes;
8982 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
8983 *yes = true;
8984 }
8985
8986
8987 /* Calculate the stack information for the current function. This is
8988 complicated by having two separate calling sequences, the AIX calling
8989 sequence and the V.4 calling sequence.
8990
8991 AIX (and Darwin/Mac OS X) stack frames look like:
8992 32-bit 64-bit
8993 SP----> +---------------------------------------+
8994 | back chain to caller | 0 0
8995 +---------------------------------------+
8996 | saved CR | 4 8 (8-11)
8997 +---------------------------------------+
8998 | saved LR | 8 16
8999 +---------------------------------------+
9000 | reserved for compilers | 12 24
9001 +---------------------------------------+
9002 | reserved for binders | 16 32
9003 +---------------------------------------+
9004 | saved TOC pointer | 20 40
9005 +---------------------------------------+
9006 | Parameter save area (P) | 24 48
9007 +---------------------------------------+
9008 | Alloca space (A) | 24+P etc.
9009 +---------------------------------------+
9010 | Local variable space (L) | 24+P+A
9011 +---------------------------------------+
9012 | Float/int conversion temporary (X) | 24+P+A+L
9013 +---------------------------------------+
9014 | Save area for AltiVec registers (W) | 24+P+A+L+X
9015 +---------------------------------------+
9016 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9017 +---------------------------------------+
9018 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9019 +---------------------------------------+
9020 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9021 +---------------------------------------+
9022 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9023 +---------------------------------------+
9024 old SP->| back chain to caller's caller |
9025 +---------------------------------------+
9026
9027 The required alignment for AIX configurations is two words (i.e., 8
9028 or 16 bytes).
9029
9030
9031 V.4 stack frames look like:
9032
9033 SP----> +---------------------------------------+
9034 | back chain to caller | 0
9035 +---------------------------------------+
9036 | caller's saved LR | 4
9037 +---------------------------------------+
9038 | Parameter save area (P) | 8
9039 +---------------------------------------+
9040 | Alloca space (A) | 8+P
9041 +---------------------------------------+
9042 | Varargs save area (V) | 8+P+A
9043 +---------------------------------------+
9044 | Local variable space (L) | 8+P+A+V
9045 +---------------------------------------+
9046 | Float/int conversion temporary (X) | 8+P+A+V+L
9047 +---------------------------------------+
9048 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9049 +---------------------------------------+
9050 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9051 +---------------------------------------+
9052 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9053 +---------------------------------------+
9054 | SPE: area for 64-bit GP registers |
9055 +---------------------------------------+
9056 | SPE alignment padding |
9057 +---------------------------------------+
9058 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9059 +---------------------------------------+
9060 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9061 +---------------------------------------+
9062 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9063 +---------------------------------------+
9064 old SP->| back chain to caller's caller |
9065 +---------------------------------------+
9066
9067 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9068 given. (But note below and in sysv4.h that we require only 8 and
9069 may round up the size of our stack frame anyways. The historical
9070 reason is early versions of powerpc-linux which didn't properly
9071 align the stack at program startup. A happy side-effect is that
9072 -mno-eabi libraries can be used with -meabi programs.)
9073
9074 The EABI configuration defaults to the V.4 layout, unless
9075 -mcall-aix is used, in which case the AIX layout is used. However,
9076 the stack alignment requirements may differ. If -mno-eabi is not
9077 given, the required stack alignment is 8 bytes; if -mno-eabi is
9078 given, the required alignment is 16 bytes. (But see V.4 comment
9079 above.) */
9080
9081 #ifndef ABI_STACK_BOUNDARY
9082 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9083 #endif
9084
9085 rs6000_stack_t *
rs6000_stack_info()9086 rs6000_stack_info ()
9087 {
9088 static rs6000_stack_t info, zero_info;
9089 rs6000_stack_t *info_ptr = &info;
9090 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9091 enum rs6000_abi abi;
9092 int ehrd_size;
9093 int total_raw_size;
9094
9095 /* Zero all fields portably. */
9096 info = zero_info;
9097
9098 /* Select which calling sequence. */
9099 info_ptr->abi = abi = DEFAULT_ABI;
9100
9101 /* Calculate which registers need to be saved & save area size. */
9102 info_ptr->first_gp_reg_save = first_reg_to_save ();
9103 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9104 even if it currently looks like we won't. */
9105 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9106 || (flag_pic == 1 && abi == ABI_V4)
9107 || (flag_pic && abi == ABI_DARWIN))
9108 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9109 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9110 else
9111 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9112
9113 /* For the SPE, we have an additional upper 32-bits on each GPR.
9114 Ideally we should save the entire 64-bits only when the upper
9115 half is used in SIMD instructions. Since we only record
9116 registers live (not the size they are used in), this proves
9117 difficult because we'd have to traverse the instruction chain at
9118 the right time, taking reload into account. This is a real pain,
9119 so we opt to save the GPRs in 64-bits always. Anyone overly
9120 concerned with frame size can fix this. ;-).
9121
9122 So... since we save all GPRs (except the SP) in 64-bits, the
9123 traditional GP save area will be empty. */
9124 if (TARGET_SPE_ABI)
9125 info_ptr->gp_size = 0;
9126
9127 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9128 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9129
9130 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9131 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9132 - info_ptr->first_altivec_reg_save);
9133
9134 /* Does this function call anything? */
9135 info_ptr->calls_p = (! current_function_is_leaf
9136 || cfun->machine->ra_needs_full_frame);
9137
9138 /* Determine if we need to save the link register. */
9139 if (rs6000_ra_ever_killed ()
9140 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
9141 #ifdef TARGET_RELOCATABLE
9142 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9143 #endif
9144 || (info_ptr->first_fp_reg_save != 64
9145 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9146 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9147 || (abi == ABI_V4 && current_function_calls_alloca)
9148 || (DEFAULT_ABI == ABI_DARWIN
9149 && flag_pic
9150 && current_function_uses_pic_offset_table)
9151 || info_ptr->calls_p)
9152 {
9153 info_ptr->lr_save_p = 1;
9154 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9155 }
9156
9157 /* Determine if we need to save the condition code registers. */
9158 if (regs_ever_live[CR2_REGNO]
9159 || regs_ever_live[CR3_REGNO]
9160 || regs_ever_live[CR4_REGNO])
9161 {
9162 info_ptr->cr_save_p = 1;
9163 if (abi == ABI_V4)
9164 info_ptr->cr_size = reg_size;
9165 }
9166
9167 /* If the current function calls __builtin_eh_return, then we need
9168 to allocate stack space for registers that will hold data for
9169 the exception handler. */
9170 if (current_function_calls_eh_return)
9171 {
9172 unsigned int i;
9173 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9174 continue;
9175
9176 /* SPE saves EH registers in 64-bits. */
9177 ehrd_size = i * (TARGET_SPE_ABI ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9178 }
9179 else
9180 ehrd_size = 0;
9181
9182 /* Determine various sizes. */
9183 info_ptr->reg_size = reg_size;
9184 info_ptr->fixed_size = RS6000_SAVE_AREA;
9185 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9186 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9187 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9188 8);
9189
9190 if (TARGET_SPE_ABI)
9191 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9192 else
9193 info_ptr->spe_gp_size = 0;
9194
9195 if (TARGET_ALTIVEC_ABI)
9196 info_ptr->vrsave_mask = compute_vrsave_mask ();
9197 else
9198 info_ptr->vrsave_mask = 0;
9199
9200 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
9201 info_ptr->vrsave_size = 4;
9202 else
9203 info_ptr->vrsave_size = 0;
9204
9205 /* Calculate the offsets. */
9206 switch (abi)
9207 {
9208 case ABI_NONE:
9209 default:
9210 abort ();
9211
9212 case ABI_AIX:
9213 case ABI_AIX_NODESC:
9214 case ABI_DARWIN:
9215 info_ptr->fp_save_offset = - info_ptr->fp_size;
9216 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9217
9218 if (TARGET_ALTIVEC_ABI)
9219 {
9220 info_ptr->vrsave_save_offset
9221 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9222
9223 /* Align stack so vector save area is on a quadword boundary. */
9224 if (info_ptr->altivec_size != 0)
9225 info_ptr->altivec_padding_size
9226 = 16 - (-info_ptr->vrsave_save_offset % 16);
9227 else
9228 info_ptr->altivec_padding_size = 0;
9229
9230 info_ptr->altivec_save_offset
9231 = info_ptr->vrsave_save_offset
9232 - info_ptr->altivec_padding_size
9233 - info_ptr->altivec_size;
9234
9235 /* Adjust for AltiVec case. */
9236 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9237 }
9238 else
9239 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9240 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9241 info_ptr->lr_save_offset = 2*reg_size;
9242 break;
9243
9244 case ABI_V4:
9245 info_ptr->fp_save_offset = - info_ptr->fp_size;
9246 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9247 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9248
9249 if (TARGET_SPE_ABI)
9250 {
9251 /* Align stack so SPE GPR save area is aligned on a
9252 double-word boundary. */
9253 if (info_ptr->spe_gp_size != 0)
9254 info_ptr->spe_padding_size
9255 = 8 - (-info_ptr->cr_save_offset % 8);
9256 else
9257 info_ptr->spe_padding_size = 0;
9258
9259 info_ptr->spe_gp_save_offset
9260 = info_ptr->cr_save_offset
9261 - info_ptr->spe_padding_size
9262 - info_ptr->spe_gp_size;
9263
9264 /* Adjust for SPE case. */
9265 info_ptr->toc_save_offset
9266 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9267 }
9268 else if (TARGET_ALTIVEC_ABI)
9269 {
9270 info_ptr->vrsave_save_offset
9271 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9272
9273 /* Align stack so vector save area is on a quadword boundary. */
9274 if (info_ptr->altivec_size != 0)
9275 info_ptr->altivec_padding_size
9276 = 16 - (-info_ptr->vrsave_save_offset % 16);
9277 else
9278 info_ptr->altivec_padding_size = 0;
9279
9280 info_ptr->altivec_save_offset
9281 = info_ptr->vrsave_save_offset
9282 - info_ptr->altivec_padding_size
9283 - info_ptr->altivec_size;
9284
9285 /* Adjust for AltiVec case. */
9286 info_ptr->toc_save_offset
9287 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9288 }
9289 else
9290 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9291 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9292 info_ptr->lr_save_offset = reg_size;
9293 break;
9294 }
9295
9296 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9297 + info_ptr->gp_size
9298 + info_ptr->altivec_size
9299 + info_ptr->altivec_padding_size
9300 + info_ptr->vrsave_size
9301 + info_ptr->spe_gp_size
9302 + info_ptr->spe_padding_size
9303 + ehrd_size
9304 + info_ptr->cr_size
9305 + info_ptr->lr_size
9306 + info_ptr->toc_size,
9307 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9308 ? 16 : 8);
9309
9310 total_raw_size = (info_ptr->vars_size
9311 + info_ptr->parm_size
9312 + info_ptr->save_size
9313 + info_ptr->varargs_size
9314 + info_ptr->fixed_size);
9315
9316 info_ptr->total_size =
9317 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9318
9319 /* Determine if we need to allocate any stack frame:
9320
9321 For AIX we need to push the stack if a frame pointer is needed
9322 (because the stack might be dynamically adjusted), if we are
9323 debugging, if we make calls, or if the sum of fp_save, gp_save,
9324 and local variables are more than the space needed to save all
9325 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9326 + 18*8 = 288 (GPR13 reserved).
9327
9328 For V.4 we don't have the stack cushion that AIX uses, but assume
9329 that the debugger can handle stackless frames. */
9330
9331 if (info_ptr->calls_p)
9332 info_ptr->push_p = 1;
9333
9334 else if (abi == ABI_V4)
9335 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9336
9337 else
9338 info_ptr->push_p = (frame_pointer_needed
9339 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
9340 || ((total_raw_size - info_ptr->fixed_size)
9341 > (TARGET_32BIT ? 220 : 288)));
9342
9343 /* Zero offsets if we're not saving those registers. */
9344 if (info_ptr->fp_size == 0)
9345 info_ptr->fp_save_offset = 0;
9346
9347 if (info_ptr->gp_size == 0)
9348 info_ptr->gp_save_offset = 0;
9349
9350 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9351 info_ptr->altivec_save_offset = 0;
9352
9353 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9354 info_ptr->vrsave_save_offset = 0;
9355
9356 if (! TARGET_SPE_ABI || info_ptr->spe_gp_size == 0)
9357 info_ptr->spe_gp_save_offset = 0;
9358
9359 if (! info_ptr->lr_save_p)
9360 info_ptr->lr_save_offset = 0;
9361
9362 if (! info_ptr->cr_save_p)
9363 info_ptr->cr_save_offset = 0;
9364
9365 if (! info_ptr->toc_save_p)
9366 info_ptr->toc_save_offset = 0;
9367
9368 return info_ptr;
9369 }
9370
9371 void
debug_stack_info(info)9372 debug_stack_info (info)
9373 rs6000_stack_t *info;
9374 {
9375 const char *abi_string;
9376
9377 if (! info)
9378 info = rs6000_stack_info ();
9379
9380 fprintf (stderr, "\nStack information for function %s:\n",
9381 ((current_function_decl && DECL_NAME (current_function_decl))
9382 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9383 : "<unknown>"));
9384
9385 switch (info->abi)
9386 {
9387 default: abi_string = "Unknown"; break;
9388 case ABI_NONE: abi_string = "NONE"; break;
9389 case ABI_AIX:
9390 case ABI_AIX_NODESC: abi_string = "AIX"; break;
9391 case ABI_DARWIN: abi_string = "Darwin"; break;
9392 case ABI_V4: abi_string = "V.4"; break;
9393 }
9394
9395 fprintf (stderr, "\tABI = %5s\n", abi_string);
9396
9397 if (TARGET_ALTIVEC_ABI)
9398 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9399
9400 if (TARGET_SPE_ABI)
9401 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9402
9403 if (info->first_gp_reg_save != 32)
9404 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
9405
9406 if (info->first_fp_reg_save != 64)
9407 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9408
9409 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9410 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9411 info->first_altivec_reg_save);
9412
9413 if (info->lr_save_p)
9414 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9415
9416 if (info->cr_save_p)
9417 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
9418
9419 if (info->toc_save_p)
9420 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
9421
9422 if (info->vrsave_mask)
9423 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
9424
9425 if (info->push_p)
9426 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
9427
9428 if (info->calls_p)
9429 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
9430
9431 if (info->gp_save_offset)
9432 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
9433
9434 if (info->fp_save_offset)
9435 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
9436
9437 if (info->altivec_save_offset)
9438 fprintf (stderr, "\taltivec_save_offset = %5d\n",
9439 info->altivec_save_offset);
9440
9441 if (info->spe_gp_save_offset)
9442 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
9443 info->spe_gp_save_offset);
9444
9445 if (info->vrsave_save_offset)
9446 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
9447 info->vrsave_save_offset);
9448
9449 if (info->lr_save_offset)
9450 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
9451
9452 if (info->cr_save_offset)
9453 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
9454
9455 if (info->toc_save_offset)
9456 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
9457
9458 if (info->varargs_save_offset)
9459 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9460
9461 if (info->total_size)
9462 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
9463
9464 if (info->varargs_size)
9465 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
9466
9467 if (info->vars_size)
9468 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
9469
9470 if (info->parm_size)
9471 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
9472
9473 if (info->fixed_size)
9474 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
9475
9476 if (info->gp_size)
9477 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
9478
9479 if (info->spe_gp_size)
9480 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
9481
9482 if (info->fp_size)
9483 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
9484
9485 if (info->altivec_size)
9486 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
9487
9488 if (info->vrsave_size)
9489 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
9490
9491 if (info->altivec_padding_size)
9492 fprintf (stderr, "\taltivec_padding_size= %5d\n",
9493 info->altivec_padding_size);
9494
9495 if (info->spe_padding_size)
9496 fprintf (stderr, "\tspe_padding_size = %5d\n",
9497 info->spe_padding_size);
9498
9499 if (info->lr_size)
9500 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
9501
9502 if (info->cr_size)
9503 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
9504
9505 if (info->toc_size)
9506 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
9507
9508 if (info->save_size)
9509 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
9510
9511 if (info->reg_size != 4)
9512 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
9513
9514 fprintf (stderr, "\n");
9515 }
9516
9517 rtx
rs6000_return_addr(count,frame)9518 rs6000_return_addr (count, frame)
9519 int count;
9520 rtx frame;
9521 {
9522 /* Currently we don't optimize very well between prolog and body
9523 code and for PIC code the code can be actually quite bad, so
9524 don't try to be too clever here. */
9525 if (count != 0 || flag_pic != 0)
9526 {
9527 cfun->machine->ra_needs_full_frame = 1;
9528
9529 return
9530 gen_rtx_MEM
9531 (Pmode,
9532 memory_address
9533 (Pmode,
9534 plus_constant (copy_to_reg
9535 (gen_rtx_MEM (Pmode,
9536 memory_address (Pmode, frame))),
9537 RETURN_ADDRESS_OFFSET)));
9538 }
9539
9540 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9541 }
9542
9543 /* Say whether a function is a candidate for sibcall handling or not.
9544 We do not allow indirect calls to be optimized into sibling calls.
9545 Also, we can't do it if there are any vector parameters; there's
9546 nowhere to put the VRsave code so it works; note that functions with
9547 vector parameters are required to have a prototype, so the argument
9548 type info must be available here. (The tail recursion case can work
9549 with vector parameters, but there's no way to distinguish here.) */
9550 int
function_ok_for_sibcall(fndecl)9551 function_ok_for_sibcall (fndecl)
9552 tree fndecl;
9553 {
9554 tree type;
9555 if (fndecl)
9556 {
9557 if (TARGET_ALTIVEC_VRSAVE)
9558 {
9559 for (type = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
9560 type; type = TREE_CHAIN (type))
9561 {
9562 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
9563 return 0;
9564 }
9565 }
9566 if (DEFAULT_ABI == ABI_DARWIN
9567 || (*targetm.binds_local_p) (fndecl))
9568 {
9569 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (fndecl));
9570
9571 if (!lookup_attribute ("longcall", attr_list)
9572 || lookup_attribute ("shortcall", attr_list))
9573 return 1;
9574 }
9575 }
9576 return 0;
9577 }
9578
9579 static int
rs6000_ra_ever_killed()9580 rs6000_ra_ever_killed ()
9581 {
9582 rtx top;
9583 rtx reg;
9584 rtx insn;
9585
9586 /* Irritatingly, there are two kinds of thunks -- those created with
9587 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9588 through the regular part of the compiler. This is a very hacky
9589 way to tell them apart. */
9590 if (current_function_is_thunk && !no_new_pseudos)
9591 return 0;
9592
9593 /* regs_ever_live has LR marked as used if any sibcalls are present,
9594 but this should not force saving and restoring in the
9595 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
9596 clobbers LR, so that is inappropriate. */
9597
9598 /* Also, the prologue can generate a store into LR that
9599 doesn't really count, like this:
9600
9601 move LR->R0
9602 bcl to set PIC register
9603 move LR->R31
9604 move R0->LR
9605
9606 When we're called from the epilogue, we need to avoid counting
9607 this as a store. */
9608
9609 push_topmost_sequence ();
9610 top = get_insns ();
9611 pop_topmost_sequence ();
9612 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9613
9614 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
9615 {
9616 if (INSN_P (insn))
9617 {
9618 if (FIND_REG_INC_NOTE (insn, reg))
9619 return 1;
9620 else if (GET_CODE (insn) == CALL_INSN
9621 && !SIBLING_CALL_P (insn))
9622 return 1;
9623 else if (set_of (reg, insn) != NULL_RTX
9624 && !prologue_epilogue_contains (insn))
9625 return 1;
9626 }
9627 }
9628 return 0;
9629 }
9630
9631 /* Add a REG_MAYBE_DEAD note to the insn. */
9632 static void
rs6000_maybe_dead(insn)9633 rs6000_maybe_dead (insn)
9634 rtx insn;
9635 {
9636 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
9637 const0_rtx,
9638 REG_NOTES (insn));
9639 }
9640
9641 /* Emit instructions needed to load the TOC register.
9642 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9643 a constant pool; or for SVR4 -fpic. */
9644
9645 void
rs6000_emit_load_toc_table(fromprolog)9646 rs6000_emit_load_toc_table (fromprolog)
9647 int fromprolog;
9648 {
9649 rtx dest, insn;
9650 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
9651
9652 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9653 {
9654 rtx temp = (fromprolog
9655 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9656 : gen_reg_rtx (Pmode));
9657 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
9658 if (fromprolog)
9659 rs6000_maybe_dead (insn);
9660 insn = emit_move_insn (dest, temp);
9661 if (fromprolog)
9662 rs6000_maybe_dead (insn);
9663 }
9664 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
9665 {
9666 char buf[30];
9667 rtx tempLR = (fromprolog
9668 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9669 : gen_reg_rtx (Pmode));
9670 rtx temp0 = (fromprolog
9671 ? gen_rtx_REG (Pmode, 0)
9672 : gen_reg_rtx (Pmode));
9673 rtx symF;
9674
9675 /* possibly create the toc section */
9676 if (! toc_initialized)
9677 {
9678 toc_section ();
9679 function_section (current_function_decl);
9680 }
9681
9682 if (fromprolog)
9683 {
9684 rtx symL;
9685
9686 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
9687 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9688
9689 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
9690 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9691
9692 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
9693 symF)));
9694 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9695 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
9696 symL,
9697 symF)));
9698 }
9699 else
9700 {
9701 rtx tocsym;
9702 static int reload_toc_labelno = 0;
9703
9704 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
9705
9706 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
9707 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9708
9709 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
9710 emit_move_insn (dest, tempLR);
9711 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
9712 }
9713 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
9714 if (fromprolog)
9715 rs6000_maybe_dead (insn);
9716 }
9717 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
9718 {
9719 /* This is for AIX code running in non-PIC ELF32. */
9720 char buf[30];
9721 rtx realsym;
9722 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
9723 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9724
9725 insn = emit_insn (gen_elf_high (dest, realsym));
9726 if (fromprolog)
9727 rs6000_maybe_dead (insn);
9728 insn = emit_insn (gen_elf_low (dest, dest, realsym));
9729 if (fromprolog)
9730 rs6000_maybe_dead (insn);
9731 }
9732 else if (DEFAULT_ABI == ABI_AIX)
9733 {
9734 if (TARGET_32BIT)
9735 insn = emit_insn (gen_load_toc_aix_si (dest));
9736 else
9737 insn = emit_insn (gen_load_toc_aix_di (dest));
9738 if (fromprolog)
9739 rs6000_maybe_dead (insn);
9740 }
9741 else
9742 abort ();
9743 }
9744
9745 int
get_TOC_alias_set()9746 get_TOC_alias_set ()
9747 {
9748 static int set = -1;
9749 if (set == -1)
9750 set = new_alias_set ();
9751 return set;
9752 }
9753
9754 /* This retuns nonzero if the current function uses the TOC. This is
9755 determined by the presence of (unspec ... 7), which is generated by
9756 the various load_toc_* patterns. */
9757
9758 int
uses_TOC()9759 uses_TOC ()
9760 {
9761 rtx insn;
9762
9763 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9764 if (INSN_P (insn))
9765 {
9766 rtx pat = PATTERN (insn);
9767 int i;
9768
9769 if (GET_CODE (pat) == PARALLEL)
9770 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
9771 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
9772 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
9773 return 1;
9774 }
9775 return 0;
9776 }
9777
9778 rtx
create_TOC_reference(symbol)9779 create_TOC_reference (symbol)
9780 rtx symbol;
9781 {
9782 return gen_rtx_PLUS (Pmode,
9783 gen_rtx_REG (Pmode, TOC_REGISTER),
9784 gen_rtx_CONST (Pmode,
9785 gen_rtx_MINUS (Pmode, symbol,
9786 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9787 }
9788
9789 #if TARGET_AIX
9790 /* __throw will restore its own return address to be the same as the
9791 return address of the function that the throw is being made to.
9792 This is unfortunate, because we want to check the original
9793 return address to see if we need to restore the TOC.
9794 So we have to squirrel it away here.
9795 This is used only in compiling __throw and __rethrow.
9796
9797 Most of this code should be removed by CSE. */
9798 static rtx insn_after_throw;
9799
9800 /* This does the saving... */
9801 void
rs6000_aix_emit_builtin_unwind_init()9802 rs6000_aix_emit_builtin_unwind_init ()
9803 {
9804 rtx mem;
9805 rtx stack_top = gen_reg_rtx (Pmode);
9806 rtx opcode_addr = gen_reg_rtx (Pmode);
9807
9808 insn_after_throw = gen_reg_rtx (SImode);
9809
9810 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9811 emit_move_insn (stack_top, mem);
9812
9813 mem = gen_rtx_MEM (Pmode,
9814 gen_rtx_PLUS (Pmode, stack_top,
9815 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9816 emit_move_insn (opcode_addr, mem);
9817 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
9818 }
9819
9820 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9821 in _eh.o). Only used on AIX.
9822
9823 The idea is that on AIX, function calls look like this:
9824 bl somefunction-trampoline
9825 lwz r2,20(sp)
9826
9827 and later,
9828 somefunction-trampoline:
9829 stw r2,20(sp)
9830 ... load function address in the count register ...
9831 bctr
9832 or like this, if the linker determines that this is not a cross-module call
9833 and so the TOC need not be restored:
9834 bl somefunction
9835 nop
9836 or like this, if the compiler could determine that this is not a
9837 cross-module call:
9838 bl somefunction
9839 now, the tricky bit here is that register 2 is saved and restored
9840 by the _linker_, so we can't readily generate debugging information
9841 for it. So we need to go back up the call chain looking at the
9842 insns at return addresses to see which calls saved the TOC register
9843 and so see where it gets restored from.
9844
9845 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9846 just before the actual epilogue.
9847
9848 On the bright side, this incurs no space or time overhead unless an
9849 exception is thrown, except for the extra code in libgcc.a.
9850
9851 The parameter STACKSIZE is a register containing (at runtime)
9852 the amount to be popped off the stack in addition to the stack frame
9853 of this routine (which will be __throw or __rethrow, and so is
9854 guaranteed to have a stack frame). */
9855
9856 void
rs6000_emit_eh_toc_restore(stacksize)9857 rs6000_emit_eh_toc_restore (stacksize)
9858 rtx stacksize;
9859 {
9860 rtx top_of_stack;
9861 rtx bottom_of_stack = gen_reg_rtx (Pmode);
9862 rtx tocompare = gen_reg_rtx (SImode);
9863 rtx opcode = gen_reg_rtx (SImode);
9864 rtx opcode_addr = gen_reg_rtx (Pmode);
9865 rtx mem;
9866 rtx loop_start = gen_label_rtx ();
9867 rtx no_toc_restore_needed = gen_label_rtx ();
9868 rtx loop_exit = gen_label_rtx ();
9869
9870 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9871 set_mem_alias_set (mem, rs6000_sr_alias_set);
9872 emit_move_insn (bottom_of_stack, mem);
9873
9874 top_of_stack = expand_binop (Pmode, add_optab,
9875 bottom_of_stack, stacksize,
9876 NULL_RTX, 1, OPTAB_WIDEN);
9877
9878 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
9879 : 0xE8410028, SImode));
9880
9881 if (insn_after_throw == NULL_RTX)
9882 abort ();
9883 emit_move_insn (opcode, insn_after_throw);
9884
9885 emit_note (NULL, NOTE_INSN_LOOP_BEG);
9886 emit_label (loop_start);
9887
9888 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
9889 SImode, NULL_RTX, NULL_RTX,
9890 no_toc_restore_needed);
9891
9892 mem = gen_rtx_MEM (Pmode,
9893 gen_rtx_PLUS (Pmode, bottom_of_stack,
9894 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
9895 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
9896
9897 emit_label (no_toc_restore_needed);
9898 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
9899 Pmode, NULL_RTX, NULL_RTX,
9900 loop_exit);
9901
9902 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
9903 set_mem_alias_set (mem, rs6000_sr_alias_set);
9904 emit_move_insn (bottom_of_stack, mem);
9905
9906 mem = gen_rtx_MEM (Pmode,
9907 gen_rtx_PLUS (Pmode, bottom_of_stack,
9908 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9909 emit_move_insn (opcode_addr, mem);
9910 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
9911
9912 emit_note (NULL, NOTE_INSN_LOOP_CONT);
9913 emit_jump (loop_start);
9914 emit_note (NULL, NOTE_INSN_LOOP_END);
9915 emit_label (loop_exit);
9916 }
9917 #endif /* TARGET_AIX */
9918
9919 /* This ties together stack memory (MEM with an alias set of
9920 rs6000_sr_alias_set) and the change to the stack pointer. */
9921
9922 static void
rs6000_emit_stack_tie()9923 rs6000_emit_stack_tie ()
9924 {
9925 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
9926
9927 set_mem_alias_set (mem, rs6000_sr_alias_set);
9928 emit_insn (gen_stack_tie (mem));
9929 }
9930
9931 /* Emit the correct code for allocating stack space, as insns.
9932 If COPY_R12, make sure a copy of the old frame is left in r12.
9933 The generated code may use hard register 0 as a temporary. */
9934
9935 static void
rs6000_emit_allocate_stack(size,copy_r12)9936 rs6000_emit_allocate_stack (size, copy_r12)
9937 HOST_WIDE_INT size;
9938 int copy_r12;
9939 {
9940 rtx insn;
9941 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
9942 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
9943 rtx todec = GEN_INT (-size);
9944
9945 if (current_function_limit_stack)
9946 {
9947 if (REG_P (stack_limit_rtx)
9948 && REGNO (stack_limit_rtx) > 1
9949 && REGNO (stack_limit_rtx) <= 31)
9950 {
9951 emit_insn (Pmode == SImode
9952 ? gen_addsi3 (tmp_reg,
9953 stack_limit_rtx,
9954 GEN_INT (size))
9955 : gen_adddi3 (tmp_reg,
9956 stack_limit_rtx,
9957 GEN_INT (size)));
9958
9959 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9960 const0_rtx));
9961 }
9962 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9963 && TARGET_32BIT
9964 && DEFAULT_ABI == ABI_V4)
9965 {
9966 rtx toload = gen_rtx_CONST (VOIDmode,
9967 gen_rtx_PLUS (Pmode,
9968 stack_limit_rtx,
9969 GEN_INT (size)));
9970
9971 emit_insn (gen_elf_high (tmp_reg, toload));
9972 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
9973 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9974 const0_rtx));
9975 }
9976 else
9977 warning ("stack limit expression is not supported");
9978 }
9979
9980 if (copy_r12 || ! TARGET_UPDATE)
9981 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
9982
9983 if (TARGET_UPDATE)
9984 {
9985 if (size > 32767)
9986 {
9987 /* Need a note here so that try_split doesn't get confused. */
9988 if (get_last_insn() == NULL_RTX)
9989 emit_note (0, NOTE_INSN_DELETED);
9990 insn = emit_move_insn (tmp_reg, todec);
9991 try_split (PATTERN (insn), insn, 0);
9992 todec = tmp_reg;
9993 }
9994
9995 if (Pmode == SImode)
9996 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
9997 todec, stack_reg));
9998 else
9999 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
10000 todec, stack_reg));
10001 }
10002 else
10003 {
10004 if (Pmode == SImode)
10005 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
10006 else
10007 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
10008 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10009 gen_rtx_REG (Pmode, 12));
10010 }
10011
10012 RTX_FRAME_RELATED_P (insn) = 1;
10013 REG_NOTES (insn) =
10014 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10015 gen_rtx_SET (VOIDmode, stack_reg,
10016 gen_rtx_PLUS (Pmode, stack_reg,
10017 GEN_INT (-size))),
10018 REG_NOTES (insn));
10019 }
10020
10021 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
10022 knows that:
10023
10024 (mem (plus (blah) (regXX)))
10025
10026 is really:
10027
10028 (mem (plus (blah) (const VALUE_OF_REGXX))). */
10029
10030 static void
altivec_frame_fixup(insn,reg,val)10031 altivec_frame_fixup (insn, reg, val)
10032 rtx insn, reg;
10033 HOST_WIDE_INT val;
10034 {
10035 rtx real;
10036
10037 real = copy_rtx (PATTERN (insn));
10038
10039 real = replace_rtx (real, reg, GEN_INT (val));
10040
10041 RTX_FRAME_RELATED_P (insn) = 1;
10042 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10043 real,
10044 REG_NOTES (insn));
10045 }
10046
10047 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10048 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10049 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10050 deduce these equivalences by itself so it wasn't necessary to hold
10051 its hand so much. */
10052
10053 static void
rs6000_frame_related(insn,reg,val,reg2,rreg)10054 rs6000_frame_related (insn, reg, val, reg2, rreg)
10055 rtx insn;
10056 rtx reg;
10057 HOST_WIDE_INT val;
10058 rtx reg2;
10059 rtx rreg;
10060 {
10061 rtx real, temp;
10062
10063 /* copy_rtx will not make unique copies of registers, so we need to
10064 ensure we don't have unwanted sharing here. */
10065 if (reg == reg2)
10066 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10067
10068 if (reg == rreg)
10069 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10070
10071 real = copy_rtx (PATTERN (insn));
10072
10073 if (reg2 != NULL_RTX)
10074 real = replace_rtx (real, reg2, rreg);
10075
10076 real = replace_rtx (real, reg,
10077 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10078 STACK_POINTER_REGNUM),
10079 GEN_INT (val)));
10080
10081 /* We expect that 'real' is either a SET or a PARALLEL containing
10082 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10083 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10084
10085 if (GET_CODE (real) == SET)
10086 {
10087 rtx set = real;
10088
10089 temp = simplify_rtx (SET_SRC (set));
10090 if (temp)
10091 SET_SRC (set) = temp;
10092 temp = simplify_rtx (SET_DEST (set));
10093 if (temp)
10094 SET_DEST (set) = temp;
10095 if (GET_CODE (SET_DEST (set)) == MEM)
10096 {
10097 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10098 if (temp)
10099 XEXP (SET_DEST (set), 0) = temp;
10100 }
10101 }
10102 else if (GET_CODE (real) == PARALLEL)
10103 {
10104 int i;
10105 for (i = 0; i < XVECLEN (real, 0); i++)
10106 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10107 {
10108 rtx set = XVECEXP (real, 0, i);
10109
10110 temp = simplify_rtx (SET_SRC (set));
10111 if (temp)
10112 SET_SRC (set) = temp;
10113 temp = simplify_rtx (SET_DEST (set));
10114 if (temp)
10115 SET_DEST (set) = temp;
10116 if (GET_CODE (SET_DEST (set)) == MEM)
10117 {
10118 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10119 if (temp)
10120 XEXP (SET_DEST (set), 0) = temp;
10121 }
10122 RTX_FRAME_RELATED_P (set) = 1;
10123 }
10124 }
10125 else
10126 abort ();
10127
10128 RTX_FRAME_RELATED_P (insn) = 1;
10129 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10130 real,
10131 REG_NOTES (insn));
10132 }
10133
10134 /* Returns an insn that has a vrsave set operation with the
10135 appropriate CLOBBERs. */
10136
10137 static rtx
generate_set_vrsave(reg,info,epiloguep)10138 generate_set_vrsave (reg, info, epiloguep)
10139 rtx reg;
10140 rs6000_stack_t *info;
10141 int epiloguep;
10142 {
10143 int nclobs, i;
10144 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10145 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10146
10147 clobs[0]
10148 = gen_rtx_SET (VOIDmode,
10149 vrsave,
10150 gen_rtx_UNSPEC_VOLATILE (SImode,
10151 gen_rtvec (2, reg, vrsave),
10152 30));
10153
10154 nclobs = 1;
10155
10156 /* We need to clobber the registers in the mask so the scheduler
10157 does not move sets to VRSAVE before sets of AltiVec registers.
10158
10159 However, if the function receives nonlocal gotos, reload will set
10160 all call saved registers live. We will end up with:
10161
10162 (set (reg 999) (mem))
10163 (parallel [ (set (reg vrsave) (unspec blah))
10164 (clobber (reg 999))])
10165
10166 The clobber will cause the store into reg 999 to be dead, and
10167 flow will attempt to delete an epilogue insn. In this case, we
10168 need an unspec use/set of the register. */
10169
10170 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10171 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10172 {
10173 if (!epiloguep || call_used_regs [i])
10174 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10175 gen_rtx_REG (V4SImode, i));
10176 else
10177 {
10178 rtx reg = gen_rtx_REG (V4SImode, i);
10179
10180 clobs[nclobs++]
10181 = gen_rtx_SET (VOIDmode,
10182 reg,
10183 gen_rtx_UNSPEC (V4SImode,
10184 gen_rtvec (1, reg), 27));
10185 }
10186 }
10187
10188 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10189
10190 for (i = 0; i < nclobs; ++i)
10191 XVECEXP (insn, 0, i) = clobs[i];
10192
10193 return insn;
10194 }
10195
10196 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10197 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10198
10199 static void
emit_frame_save(frame_reg,frame_ptr,mode,regno,offset,total_size)10200 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
10201 rtx frame_reg;
10202 rtx frame_ptr;
10203 enum machine_mode mode;
10204 unsigned int regno;
10205 int offset;
10206 int total_size;
10207 {
10208 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10209 rtx replacea, replaceb;
10210
10211 int_rtx = GEN_INT (offset);
10212
10213 /* Some cases that need register indexed addressing. */
10214 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10215 || (TARGET_SPE_ABI
10216 && SPE_VECTOR_MODE (mode)
10217 && !SPE_CONST_OFFSET_OK (offset)))
10218 {
10219 /* Whomever calls us must make sure r11 is available in the
10220 flow path of instructions in the prologue. */
10221 offset_rtx = gen_rtx_REG (Pmode, 11);
10222 emit_move_insn (offset_rtx, int_rtx);
10223
10224 replacea = offset_rtx;
10225 replaceb = int_rtx;
10226 }
10227 else
10228 {
10229 offset_rtx = int_rtx;
10230 replacea = NULL_RTX;
10231 replaceb = NULL_RTX;
10232 }
10233
10234 reg = gen_rtx_REG (mode, regno);
10235 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10236 mem = gen_rtx_MEM (mode, addr);
10237 set_mem_alias_set (mem, rs6000_sr_alias_set);
10238
10239 insn = emit_move_insn (mem, reg);
10240
10241 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10242 }
10243
10244 /* Emit an offset memory reference suitable for a frame store, while
10245 converting to a valid addressing mode. */
10246
10247 static rtx
gen_frame_mem_offset(mode,reg,offset)10248 gen_frame_mem_offset (mode, reg, offset)
10249 enum machine_mode mode;
10250 rtx reg;
10251 int offset;
10252 {
10253 rtx int_rtx, offset_rtx;
10254
10255 int_rtx = GEN_INT (offset);
10256
10257 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10258 {
10259 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10260 emit_move_insn (offset_rtx, int_rtx);
10261 }
10262 else
10263 offset_rtx = int_rtx;
10264
10265 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10266 }
10267
10268 /* Emit function prologue as insns. */
10269
10270 void
rs6000_emit_prologue()10271 rs6000_emit_prologue ()
10272 {
10273 rs6000_stack_t *info = rs6000_stack_info ();
10274 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10275 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10276 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10277 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10278 rtx frame_reg_rtx = sp_reg_rtx;
10279 rtx cr_save_rtx = NULL;
10280 rtx insn;
10281 int saving_FPRs_inline;
10282 int using_store_multiple;
10283 HOST_WIDE_INT sp_offset = 0;
10284
10285 if (warn_stack_larger_than && info->vars_size > stack_larger_than_size)
10286 warning ("stack usage is %d bytes", info->vars_size);
10287
10288 if (TARGET_SPE_ABI)
10289 {
10290 reg_mode = V2SImode;
10291 reg_size = 8;
10292 }
10293
10294 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10295 && !TARGET_SPE_ABI
10296 && info->first_gp_reg_save < 31);
10297 saving_FPRs_inline = (info->first_fp_reg_save == 64
10298 || FP_SAVE_INLINE (info->first_fp_reg_save));
10299
10300 /* For V.4, update stack before we do any saving and set back pointer. */
10301 if (info->push_p && DEFAULT_ABI == ABI_V4)
10302 {
10303 if (info->total_size < 32767)
10304 sp_offset = info->total_size;
10305 else
10306 frame_reg_rtx = frame_ptr_rtx;
10307 rs6000_emit_allocate_stack (info->total_size,
10308 (frame_reg_rtx != sp_reg_rtx
10309 && (info->cr_save_p
10310 || info->lr_save_p
10311 || info->first_fp_reg_save < 64
10312 || info->first_gp_reg_save < 32
10313 )));
10314 if (frame_reg_rtx != sp_reg_rtx)
10315 rs6000_emit_stack_tie ();
10316 }
10317
10318 /* Save AltiVec registers if needed. */
10319 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10320 {
10321 int i;
10322
10323 /* There should be a non inline version of this, for when we
10324 are saving lots of vector registers. */
10325 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10326 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10327 {
10328 rtx areg, savereg, mem;
10329 int offset;
10330
10331 offset = info->altivec_save_offset + sp_offset
10332 + 16 * (i - info->first_altivec_reg_save);
10333
10334 savereg = gen_rtx_REG (V4SImode, i);
10335
10336 areg = gen_rtx_REG (Pmode, 0);
10337 emit_move_insn (areg, GEN_INT (offset));
10338
10339 /* AltiVec addressing mode is [reg+reg]. */
10340 mem = gen_rtx_MEM (V4SImode,
10341 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10342
10343 set_mem_alias_set (mem, rs6000_sr_alias_set);
10344
10345 insn = emit_move_insn (mem, savereg);
10346
10347 altivec_frame_fixup (insn, areg, offset);
10348 }
10349 }
10350
10351 /* VRSAVE is a bit vector representing which AltiVec registers
10352 are used. The OS uses this to determine which vector
10353 registers to save on a context switch. We need to save
10354 VRSAVE on the stack frame, add whatever AltiVec registers we
10355 used in this function, and do the corresponding magic in the
10356 epilogue. */
10357
10358 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
10359 && info->vrsave_mask != 0)
10360 {
10361 rtx reg, mem, vrsave;
10362 int offset;
10363
10364 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
10365 as frame_reg_rtx and r11 as the static chain pointer for
10366 nested functions. */
10367 reg = gen_rtx_REG (SImode, 0);
10368 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10369 if (TARGET_MACHO)
10370 emit_insn (gen_get_vrsave_internal (reg));
10371 else
10372 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10373
10374 /* Save VRSAVE. */
10375 offset = info->vrsave_save_offset + sp_offset;
10376 mem
10377 = gen_rtx_MEM (SImode,
10378 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10379 set_mem_alias_set (mem, rs6000_sr_alias_set);
10380 insn = emit_move_insn (mem, reg);
10381
10382 /* Include the registers in the mask. */
10383 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10384
10385 insn = emit_insn (generate_set_vrsave (reg, info, 0));
10386 }
10387
10388 /* If we use the link register, get it into r0. */
10389 if (info->lr_save_p)
10390 emit_move_insn (gen_rtx_REG (Pmode, 0),
10391 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10392
10393 /* If we need to save CR, put it into r12. */
10394 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10395 {
10396 cr_save_rtx = gen_rtx_REG (SImode, 12);
10397 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10398 }
10399
10400 /* Do any required saving of fpr's. If only one or two to save, do
10401 it ourselves. Otherwise, call function. */
10402 if (saving_FPRs_inline)
10403 {
10404 int i;
10405 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10406 if ((regs_ever_live[info->first_fp_reg_save+i]
10407 && ! call_used_regs[info->first_fp_reg_save+i]))
10408 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10409 info->first_fp_reg_save + i,
10410 info->fp_save_offset + sp_offset + 8 * i,
10411 info->total_size);
10412 }
10413 else if (info->first_fp_reg_save != 64)
10414 {
10415 int i;
10416 char rname[30];
10417 const char *alloc_rname;
10418 rtvec p;
10419 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10420
10421 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10422 gen_rtx_REG (Pmode,
10423 LINK_REGISTER_REGNUM));
10424 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10425 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10426 alloc_rname = ggc_strdup (rname);
10427 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10428 gen_rtx_SYMBOL_REF (Pmode,
10429 alloc_rname));
10430 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10431 {
10432 rtx addr, reg, mem;
10433 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10434 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10435 GEN_INT (info->fp_save_offset
10436 + sp_offset + 8*i));
10437 mem = gen_rtx_MEM (DFmode, addr);
10438 set_mem_alias_set (mem, rs6000_sr_alias_set);
10439
10440 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10441 }
10442 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10443 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10444 NULL_RTX, NULL_RTX);
10445 }
10446
10447 /* Save GPRs. This is done as a PARALLEL if we are using
10448 the store-multiple instructions. */
10449 if (using_store_multiple)
10450 {
10451 rtvec p;
10452 int i;
10453 p = rtvec_alloc (32 - info->first_gp_reg_save);
10454 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10455 {
10456 rtx addr, reg, mem;
10457 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10458 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10459 GEN_INT (info->gp_save_offset
10460 + sp_offset
10461 + reg_size * i));
10462 mem = gen_rtx_MEM (reg_mode, addr);
10463 set_mem_alias_set (mem, rs6000_sr_alias_set);
10464
10465 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10466 }
10467 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10468 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10469 NULL_RTX, NULL_RTX);
10470 }
10471 else
10472 {
10473 int i;
10474 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10475 if ((regs_ever_live[info->first_gp_reg_save+i]
10476 && ! call_used_regs[info->first_gp_reg_save+i])
10477 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10478 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10479 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10480 {
10481 rtx addr, reg, mem;
10482 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10483
10484 if (TARGET_SPE_ABI)
10485 {
10486 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10487 rtx b;
10488
10489 if (!SPE_CONST_OFFSET_OK (offset))
10490 {
10491 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10492 emit_move_insn (b, GEN_INT (offset));
10493 }
10494 else
10495 b = GEN_INT (offset);
10496
10497 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10498 mem = gen_rtx_MEM (V2SImode, addr);
10499 set_mem_alias_set (mem, rs6000_sr_alias_set);
10500 insn = emit_move_insn (mem, reg);
10501
10502 if (GET_CODE (b) == CONST_INT)
10503 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10504 NULL_RTX, NULL_RTX);
10505 else
10506 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10507 b, GEN_INT (offset));
10508 }
10509 else
10510 {
10511 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10512 GEN_INT (info->gp_save_offset
10513 + sp_offset
10514 + reg_size * i));
10515 mem = gen_rtx_MEM (reg_mode, addr);
10516 set_mem_alias_set (mem, rs6000_sr_alias_set);
10517
10518 insn = emit_move_insn (mem, reg);
10519 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10520 NULL_RTX, NULL_RTX);
10521 }
10522 }
10523 }
10524
10525 /* ??? There's no need to emit actual instructions here, but it's the
10526 easiest way to get the frame unwind information emitted. */
10527 if (current_function_calls_eh_return)
10528 {
10529 unsigned int i, regno;
10530
10531 for (i = 0; ; ++i)
10532 {
10533 regno = EH_RETURN_DATA_REGNO (i);
10534 if (regno == INVALID_REGNUM)
10535 break;
10536
10537 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
10538 info->ehrd_offset + sp_offset
10539 + reg_size * (int) i,
10540 info->total_size);
10541 }
10542 }
10543
10544 /* Save lr if we used it. */
10545 if (info->lr_save_p)
10546 {
10547 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10548 GEN_INT (info->lr_save_offset + sp_offset));
10549 rtx reg = gen_rtx_REG (Pmode, 0);
10550 rtx mem = gen_rtx_MEM (Pmode, addr);
10551 /* This should not be of rs6000_sr_alias_set, because of
10552 __builtin_return_address. */
10553
10554 insn = emit_move_insn (mem, reg);
10555 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10556 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10557 }
10558
10559 /* Save CR if we use any that must be preserved. */
10560 if (info->cr_save_p)
10561 {
10562 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10563 GEN_INT (info->cr_save_offset + sp_offset));
10564 rtx mem = gen_rtx_MEM (SImode, addr);
10565
10566 set_mem_alias_set (mem, rs6000_sr_alias_set);
10567
10568 /* If r12 was used to hold the original sp, copy cr into r0 now
10569 that it's free. */
10570 if (REGNO (frame_reg_rtx) == 12)
10571 {
10572 cr_save_rtx = gen_rtx_REG (SImode, 0);
10573 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10574 }
10575 insn = emit_move_insn (mem, cr_save_rtx);
10576
10577 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10578 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10579 OK. All we have to do is specify that _one_ condition code
10580 register is saved in this stack slot. The thrower's epilogue
10581 will then restore all the call-saved registers.
10582 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10583 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10584 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
10585 }
10586
10587 /* Update stack and set back pointer unless this is V.4,
10588 for which it was done previously. */
10589 if (info->push_p && DEFAULT_ABI != ABI_V4)
10590 rs6000_emit_allocate_stack (info->total_size, FALSE);
10591
10592 /* Set frame pointer, if needed. */
10593 if (frame_pointer_needed)
10594 {
10595 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
10596 sp_reg_rtx);
10597 RTX_FRAME_RELATED_P (insn) = 1;
10598 }
10599
10600 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10601 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
10602 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
10603 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
10604 {
10605 /* If emit_load_toc_table will use the link register, we need to save
10606 it. We use R11 for this purpose because emit_load_toc_table
10607 can use register 0. This allows us to use a plain 'blr' to return
10608 from the procedure more often. */
10609 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
10610 && ! info->lr_save_p
10611 && EXIT_BLOCK_PTR->pred != NULL);
10612 if (save_LR_around_toc_setup)
10613 emit_move_insn (gen_rtx_REG (Pmode, 11),
10614 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10615
10616 rs6000_emit_load_toc_table (TRUE);
10617
10618 if (save_LR_around_toc_setup)
10619 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10620 gen_rtx_REG (Pmode, 11));
10621 }
10622
10623 #if TARGET_MACHO
10624 if (DEFAULT_ABI == ABI_DARWIN
10625 && flag_pic && current_function_uses_pic_offset_table)
10626 {
10627 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10628 char *picbase = machopic_function_base_name ();
10629 rtx src = gen_rtx_SYMBOL_REF (Pmode, ggc_alloc_string (picbase, -1));
10630
10631 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
10632
10633 rs6000_maybe_dead (
10634 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
10635 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
10636 }
10637 #endif
10638 }
10639
10640 /* Write function prologue. */
10641
10642 static void
rs6000_output_function_prologue(file,size)10643 rs6000_output_function_prologue (file, size)
10644 FILE *file;
10645 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10646 {
10647 rs6000_stack_t *info = rs6000_stack_info ();
10648
10649 if (TARGET_DEBUG_STACK)
10650 debug_stack_info (info);
10651
10652 /* Write .extern for any function we will call to save and restore
10653 fp values. */
10654 if (info->first_fp_reg_save < 64
10655 && !FP_SAVE_INLINE (info->first_fp_reg_save))
10656 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10657 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
10658 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
10659 RESTORE_FP_SUFFIX);
10660
10661 /* Write .extern for AIX common mode routines, if needed. */
10662 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
10663 {
10664 fputs ("\t.extern __mulh\n", file);
10665 fputs ("\t.extern __mull\n", file);
10666 fputs ("\t.extern __divss\n", file);
10667 fputs ("\t.extern __divus\n", file);
10668 fputs ("\t.extern __quoss\n", file);
10669 fputs ("\t.extern __quous\n", file);
10670 common_mode_defined = 1;
10671 }
10672
10673 if (! HAVE_prologue)
10674 {
10675 start_sequence ();
10676
10677 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10678 the "toplevel" insn chain. */
10679 emit_note (0, NOTE_INSN_DELETED);
10680 rs6000_emit_prologue ();
10681 emit_note (0, NOTE_INSN_DELETED);
10682
10683 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10684 {
10685 rtx insn;
10686 unsigned addr = 0;
10687 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10688 {
10689 INSN_ADDRESSES_NEW (insn, addr);
10690 addr += 4;
10691 }
10692 }
10693
10694 if (TARGET_DEBUG_STACK)
10695 debug_rtx_list (get_insns (), 100);
10696 final (get_insns (), file, FALSE, FALSE);
10697 end_sequence ();
10698 }
10699
10700 rs6000_pic_labelno++;
10701 }
10702
10703 /* Emit function epilogue as insns.
10704
10705 At present, dwarf2out_frame_debug_expr doesn't understand
10706 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10707 anywhere in the epilogue. Most of the insns below would in any case
10708 need special notes to explain where r11 is in relation to the stack. */
10709
10710 void
rs6000_emit_epilogue(sibcall)10711 rs6000_emit_epilogue (sibcall)
10712 int sibcall;
10713 {
10714 rs6000_stack_t *info;
10715 int restoring_FPRs_inline;
10716 int using_load_multiple;
10717 int using_mfcr_multiple;
10718 int use_backchain_to_restore_sp;
10719 int sp_offset = 0;
10720 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
10721 rtx frame_reg_rtx = sp_reg_rtx;
10722 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10723 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10724 int i;
10725
10726 if (TARGET_SPE_ABI)
10727 {
10728 reg_mode = V2SImode;
10729 reg_size = 8;
10730 }
10731
10732 info = rs6000_stack_info ();
10733 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10734 && !TARGET_SPE_ABI
10735 && info->first_gp_reg_save < 31);
10736 restoring_FPRs_inline = (sibcall
10737 || current_function_calls_eh_return
10738 || info->first_fp_reg_save == 64
10739 || FP_SAVE_INLINE (info->first_fp_reg_save));
10740 use_backchain_to_restore_sp = (frame_pointer_needed
10741 || current_function_calls_alloca
10742 || info->total_size > 32767);
10743 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
10744 || rs6000_cpu == PROCESSOR_PPC603
10745 || rs6000_cpu == PROCESSOR_PPC750
10746 || optimize_size);
10747
10748 /* If we have a frame pointer, a call to alloca, or a large stack
10749 frame, restore the old stack pointer using the backchain. Otherwise,
10750 we know what size to update it with. */
10751 if (use_backchain_to_restore_sp)
10752 {
10753 /* Under V.4, don't reset the stack pointer until after we're done
10754 loading the saved registers. */
10755 if (DEFAULT_ABI == ABI_V4)
10756 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
10757
10758 emit_move_insn (frame_reg_rtx,
10759 gen_rtx_MEM (Pmode, sp_reg_rtx));
10760
10761 }
10762 else if (info->push_p)
10763 {
10764 if (DEFAULT_ABI == ABI_V4)
10765 sp_offset = info->total_size;
10766 else
10767 {
10768 emit_insn (TARGET_32BIT
10769 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10770 GEN_INT (info->total_size))
10771 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10772 GEN_INT (info->total_size)));
10773 }
10774 }
10775
10776 /* Restore AltiVec registers if needed. */
10777 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10778 {
10779 int i;
10780
10781 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10782 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10783 {
10784 rtx addr, areg, mem;
10785
10786 areg = gen_rtx_REG (Pmode, 0);
10787 emit_move_insn
10788 (areg, GEN_INT (info->altivec_save_offset
10789 + sp_offset
10790 + 16 * (i - info->first_altivec_reg_save)));
10791
10792 /* AltiVec addressing mode is [reg+reg]. */
10793 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
10794 mem = gen_rtx_MEM (V4SImode, addr);
10795 set_mem_alias_set (mem, rs6000_sr_alias_set);
10796
10797 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
10798 }
10799 }
10800
10801 /* Restore VRSAVE if needed. */
10802 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE
10803 && info->vrsave_mask != 0)
10804 {
10805 rtx addr, mem, reg;
10806
10807 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10808 GEN_INT (info->vrsave_save_offset + sp_offset));
10809 mem = gen_rtx_MEM (SImode, addr);
10810 set_mem_alias_set (mem, rs6000_sr_alias_set);
10811 reg = gen_rtx_REG (SImode, 12);
10812 emit_move_insn (reg, mem);
10813
10814 emit_insn (generate_set_vrsave (reg, info, 1));
10815 }
10816
10817 /* Get the old lr if we saved it. */
10818 if (info->lr_save_p)
10819 {
10820 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
10821 info->lr_save_offset + sp_offset);
10822
10823 set_mem_alias_set (mem, rs6000_sr_alias_set);
10824
10825 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
10826 }
10827
10828 /* Get the old cr if we saved it. */
10829 if (info->cr_save_p)
10830 {
10831 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10832 GEN_INT (info->cr_save_offset + sp_offset));
10833 rtx mem = gen_rtx_MEM (SImode, addr);
10834
10835 set_mem_alias_set (mem, rs6000_sr_alias_set);
10836
10837 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
10838 }
10839
10840 /* Set LR here to try to overlap restores below. */
10841 if (info->lr_save_p)
10842 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10843 gen_rtx_REG (Pmode, 0));
10844
10845 /* Load exception handler data registers, if needed. */
10846 if (current_function_calls_eh_return)
10847 {
10848 unsigned int i, regno;
10849
10850 for (i = 0; ; ++i)
10851 {
10852 rtx mem;
10853
10854 regno = EH_RETURN_DATA_REGNO (i);
10855 if (regno == INVALID_REGNUM)
10856 break;
10857
10858 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
10859 info->ehrd_offset + sp_offset
10860 + reg_size * (int) i);
10861 set_mem_alias_set (mem, rs6000_sr_alias_set);
10862
10863 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
10864 }
10865 }
10866
10867 /* Restore GPRs. This is done as a PARALLEL if we are using
10868 the load-multiple instructions. */
10869 if (using_load_multiple)
10870 {
10871 rtvec p;
10872 p = rtvec_alloc (32 - info->first_gp_reg_save);
10873 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10874 {
10875 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10876 GEN_INT (info->gp_save_offset
10877 + sp_offset
10878 + reg_size * i));
10879 rtx mem = gen_rtx_MEM (reg_mode, addr);
10880
10881 set_mem_alias_set (mem, rs6000_sr_alias_set);
10882
10883 RTVEC_ELT (p, i) =
10884 gen_rtx_SET (VOIDmode,
10885 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
10886 mem);
10887 }
10888 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10889 }
10890 else
10891 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10892 if ((regs_ever_live[info->first_gp_reg_save+i]
10893 && ! call_used_regs[info->first_gp_reg_save+i])
10894 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10895 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10896 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10897 {
10898 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10899 GEN_INT (info->gp_save_offset
10900 + sp_offset
10901 + reg_size * i));
10902 rtx mem = gen_rtx_MEM (reg_mode, addr);
10903
10904 /* Restore 64-bit quantities for SPE. */
10905 if (TARGET_SPE_ABI)
10906 {
10907 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10908 rtx b;
10909
10910 if (!SPE_CONST_OFFSET_OK (offset))
10911 {
10912 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10913 emit_move_insn (b, GEN_INT (offset));
10914 }
10915 else
10916 b = GEN_INT (offset);
10917
10918 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10919 mem = gen_rtx_MEM (V2SImode, addr);
10920 }
10921
10922 set_mem_alias_set (mem, rs6000_sr_alias_set);
10923
10924 emit_move_insn (gen_rtx_REG (reg_mode,
10925 info->first_gp_reg_save + i), mem);
10926 }
10927
10928 /* Restore fpr's if we need to do it without calling a function. */
10929 if (restoring_FPRs_inline)
10930 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10931 if ((regs_ever_live[info->first_fp_reg_save+i]
10932 && ! call_used_regs[info->first_fp_reg_save+i]))
10933 {
10934 rtx addr, mem;
10935 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10936 GEN_INT (info->fp_save_offset
10937 + sp_offset
10938 + 8 * i));
10939 mem = gen_rtx_MEM (DFmode, addr);
10940 set_mem_alias_set (mem, rs6000_sr_alias_set);
10941
10942 emit_move_insn (gen_rtx_REG (DFmode,
10943 info->first_fp_reg_save + i),
10944 mem);
10945 }
10946
10947 /* If we saved cr, restore it here. Just those that were used. */
10948 if (info->cr_save_p)
10949 {
10950 rtx r12_rtx = gen_rtx_REG (SImode, 12);
10951 int count = 0;
10952
10953 if (using_mfcr_multiple)
10954 {
10955 for (i = 0; i < 8; i++)
10956 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10957 count++;
10958 if (count == 0)
10959 abort ();
10960 }
10961
10962 if (using_mfcr_multiple && count > 1)
10963 {
10964 rtvec p;
10965 int ndx;
10966
10967 p = rtvec_alloc (count);
10968
10969 ndx = 0;
10970 for (i = 0; i < 8; i++)
10971 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10972 {
10973 rtvec r = rtvec_alloc (2);
10974 RTVEC_ELT (r, 0) = r12_rtx;
10975 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
10976 RTVEC_ELT (p, ndx) =
10977 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
10978 gen_rtx_UNSPEC (CCmode, r, 20));
10979 ndx++;
10980 }
10981 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10982 if (ndx != count)
10983 abort ();
10984 }
10985 else
10986 for (i = 0; i < 8; i++)
10987 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10988 {
10989 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
10990 CR0_REGNO+i),
10991 r12_rtx));
10992 }
10993 }
10994
10995 /* If this is V.4, unwind the stack pointer after all of the loads
10996 have been done. We need to emit a block here so that sched
10997 doesn't decide to move the sp change before the register restores
10998 (which may not have any obvious dependency on the stack). This
10999 doesn't hurt performance, because there is no scheduling that can
11000 be done after this point. */
11001 if (DEFAULT_ABI == ABI_V4)
11002 {
11003 if (frame_reg_rtx != sp_reg_rtx)
11004 rs6000_emit_stack_tie ();
11005
11006 if (use_backchain_to_restore_sp)
11007 {
11008 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11009 }
11010 else if (sp_offset != 0)
11011 {
11012 emit_insn (Pmode == SImode
11013 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11014 GEN_INT (sp_offset))
11015 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11016 GEN_INT (sp_offset)));
11017 }
11018 }
11019
11020 if (current_function_calls_eh_return)
11021 {
11022 rtx sa = EH_RETURN_STACKADJ_RTX;
11023 emit_insn (Pmode == SImode
11024 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11025 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11026 }
11027
11028 if (!sibcall)
11029 {
11030 rtvec p;
11031 if (! restoring_FPRs_inline)
11032 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11033 else
11034 p = rtvec_alloc (2);
11035
11036 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11037 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11038 gen_rtx_REG (Pmode,
11039 LINK_REGISTER_REGNUM));
11040
11041 /* If we have to restore more than two FP registers, branch to the
11042 restore function. It will return to our caller. */
11043 if (! restoring_FPRs_inline)
11044 {
11045 int i;
11046 char rname[30];
11047 const char *alloc_rname;
11048
11049 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11050 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11051 alloc_rname = ggc_strdup (rname);
11052 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11053 gen_rtx_SYMBOL_REF (Pmode,
11054 alloc_rname));
11055
11056 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11057 {
11058 rtx addr, mem;
11059 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11060 GEN_INT (info->fp_save_offset + 8*i));
11061 mem = gen_rtx_MEM (DFmode, addr);
11062 set_mem_alias_set (mem, rs6000_sr_alias_set);
11063
11064 RTVEC_ELT (p, i+3) =
11065 gen_rtx_SET (VOIDmode,
11066 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11067 mem);
11068 }
11069 }
11070
11071 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11072 }
11073 }
11074
11075 /* Write function epilogue. */
11076
11077 static void
rs6000_output_function_epilogue(file,size)11078 rs6000_output_function_epilogue (file, size)
11079 FILE *file;
11080 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11081 {
11082 rs6000_stack_t *info = rs6000_stack_info ();
11083
11084 if (! HAVE_epilogue)
11085 {
11086 rtx insn = get_last_insn ();
11087 /* If the last insn was a BARRIER, we don't have to write anything except
11088 the trace table. */
11089 if (GET_CODE (insn) == NOTE)
11090 insn = prev_nonnote_insn (insn);
11091 if (insn == 0 || GET_CODE (insn) != BARRIER)
11092 {
11093 /* This is slightly ugly, but at least we don't have two
11094 copies of the epilogue-emitting code. */
11095 start_sequence ();
11096
11097 /* A NOTE_INSN_DELETED is supposed to be at the start
11098 and end of the "toplevel" insn chain. */
11099 emit_note (0, NOTE_INSN_DELETED);
11100 rs6000_emit_epilogue (FALSE);
11101 emit_note (0, NOTE_INSN_DELETED);
11102
11103 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11104 {
11105 rtx insn;
11106 unsigned addr = 0;
11107 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11108 {
11109 INSN_ADDRESSES_NEW (insn, addr);
11110 addr += 4;
11111 }
11112 }
11113
11114 if (TARGET_DEBUG_STACK)
11115 debug_rtx_list (get_insns (), 100);
11116 final (get_insns (), file, FALSE, FALSE);
11117 end_sequence ();
11118 }
11119 }
11120
11121 #if TARGET_OBJECT_FORMAT == OBJECT_MACHO
11122 /* Mach-O doesn't support labels at the end of objects, so if
11123 it looks like we might want one, insert a NOP. */
11124 {
11125 rtx insn = get_last_insn ();
11126 while (insn
11127 && NOTE_P (insn)
11128 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
11129 insn = PREV_INSN (insn);
11130 if (insn
11131 && (LABEL_P (insn)
11132 || (NOTE_P (insn)
11133 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
11134 fputs ("\tnop\n", file);
11135 }
11136 #endif
11137
11138 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11139 on its format.
11140
11141 We don't output a traceback table if -finhibit-size-directive was
11142 used. The documentation for -finhibit-size-directive reads
11143 ``don't output a @code{.size} assembler directive, or anything
11144 else that would cause trouble if the function is split in the
11145 middle, and the two halves are placed at locations far apart in
11146 memory.'' The traceback table has this property, since it
11147 includes the offset from the start of the function to the
11148 traceback table itself.
11149
11150 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11151 different traceback table. */
11152 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11153 && rs6000_traceback != traceback_none)
11154 {
11155 const char *fname = NULL;
11156 const char *language_string = lang_hooks.name;
11157 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11158 int i;
11159 int optional_tbtab;
11160
11161 if (rs6000_traceback == traceback_full)
11162 optional_tbtab = 1;
11163 else if (rs6000_traceback == traceback_part)
11164 optional_tbtab = 0;
11165 else
11166 optional_tbtab = !optimize_size && !TARGET_ELF;
11167
11168 if (optional_tbtab)
11169 {
11170 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11171 while (*fname == '.') /* V.4 encodes . in the name */
11172 fname++;
11173
11174 /* Need label immediately before tbtab, so we can compute
11175 its offset from the function start. */
11176 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11177 ASM_OUTPUT_LABEL (file, fname);
11178 }
11179
11180 /* The .tbtab pseudo-op can only be used for the first eight
11181 expressions, since it can't handle the possibly variable
11182 length fields that follow. However, if you omit the optional
11183 fields, the assembler outputs zeros for all optional fields
11184 anyways, giving each variable length field is minimum length
11185 (as defined in sys/debug.h). Thus we can not use the .tbtab
11186 pseudo-op at all. */
11187
11188 /* An all-zero word flags the start of the tbtab, for debuggers
11189 that have to find it by searching forward from the entry
11190 point or from the current pc. */
11191 fputs ("\t.long 0\n", file);
11192
11193 /* Tbtab format type. Use format type 0. */
11194 fputs ("\t.byte 0,", file);
11195
11196 /* Language type. Unfortunately, there doesn't seem to be any
11197 official way to get this info, so we use language_string. C
11198 is 0. C++ is 9. No number defined for Obj-C, so use the
11199 value for C for now. There is no official value for Java,
11200 although IBM appears to be using 13. There is no official value
11201 for Chill, so we've chosen 44 pseudo-randomly. */
11202 if (! strcmp (language_string, "GNU C")
11203 || ! strcmp (language_string, "GNU Objective-C"))
11204 i = 0;
11205 else if (! strcmp (language_string, "GNU F77"))
11206 i = 1;
11207 else if (! strcmp (language_string, "GNU Ada"))
11208 i = 3;
11209 else if (! strcmp (language_string, "GNU Pascal"))
11210 i = 2;
11211 else if (! strcmp (language_string, "GNU C++"))
11212 i = 9;
11213 else if (! strcmp (language_string, "GNU Java"))
11214 i = 13;
11215 else if (! strcmp (language_string, "GNU CHILL"))
11216 i = 44;
11217 else
11218 abort ();
11219 fprintf (file, "%d,", i);
11220
11221 /* 8 single bit fields: global linkage (not set for C extern linkage,
11222 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11223 from start of procedure stored in tbtab, internal function, function
11224 has controlled storage, function has no toc, function uses fp,
11225 function logs/aborts fp operations. */
11226 /* Assume that fp operations are used if any fp reg must be saved. */
11227 fprintf (file, "%d,",
11228 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
11229
11230 /* 6 bitfields: function is interrupt handler, name present in
11231 proc table, function calls alloca, on condition directives
11232 (controls stack walks, 3 bits), saves condition reg, saves
11233 link reg. */
11234 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11235 set up as a frame pointer, even when there is no alloca call. */
11236 fprintf (file, "%d,",
11237 ((optional_tbtab << 6)
11238 | ((optional_tbtab & frame_pointer_needed) << 5)
11239 | (info->cr_save_p << 1)
11240 | (info->lr_save_p)));
11241
11242 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11243 (6 bits). */
11244 fprintf (file, "%d,",
11245 (info->push_p << 7) | (64 - info->first_fp_reg_save));
11246
11247 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11248 fprintf (file, "%d,", (32 - first_reg_to_save ()));
11249
11250 if (optional_tbtab)
11251 {
11252 /* Compute the parameter info from the function decl argument
11253 list. */
11254 tree decl;
11255 int next_parm_info_bit = 31;
11256
11257 for (decl = DECL_ARGUMENTS (current_function_decl);
11258 decl; decl = TREE_CHAIN (decl))
11259 {
11260 rtx parameter = DECL_INCOMING_RTL (decl);
11261 enum machine_mode mode = GET_MODE (parameter);
11262
11263 if (GET_CODE (parameter) == REG)
11264 {
11265 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11266 {
11267 int bits;
11268
11269 float_parms++;
11270
11271 if (mode == SFmode)
11272 bits = 0x2;
11273 else if (mode == DFmode || mode == TFmode)
11274 bits = 0x3;
11275 else
11276 abort ();
11277
11278 /* If only one bit will fit, don't or in this entry. */
11279 if (next_parm_info_bit > 0)
11280 parm_info |= (bits << (next_parm_info_bit - 1));
11281 next_parm_info_bit -= 2;
11282 }
11283 else
11284 {
11285 fixed_parms += ((GET_MODE_SIZE (mode)
11286 + (UNITS_PER_WORD - 1))
11287 / UNITS_PER_WORD);
11288 next_parm_info_bit -= 1;
11289 }
11290 }
11291 }
11292 }
11293
11294 /* Number of fixed point parameters. */
11295 /* This is actually the number of words of fixed point parameters; thus
11296 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11297 fprintf (file, "%d,", fixed_parms);
11298
11299 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11300 all on stack. */
11301 /* This is actually the number of fp registers that hold parameters;
11302 and thus the maximum value is 13. */
11303 /* Set parameters on stack bit if parameters are not in their original
11304 registers, regardless of whether they are on the stack? Xlc
11305 seems to set the bit when not optimizing. */
11306 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11307
11308 if (! optional_tbtab)
11309 return;
11310
11311 /* Optional fields follow. Some are variable length. */
11312
11313 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11314 11 double float. */
11315 /* There is an entry for each parameter in a register, in the order that
11316 they occur in the parameter list. Any intervening arguments on the
11317 stack are ignored. If the list overflows a long (max possible length
11318 34 bits) then completely leave off all elements that don't fit. */
11319 /* Only emit this long if there was at least one parameter. */
11320 if (fixed_parms || float_parms)
11321 fprintf (file, "\t.long %d\n", parm_info);
11322
11323 /* Offset from start of code to tb table. */
11324 fputs ("\t.long ", file);
11325 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11326 #if TARGET_AIX
11327 RS6000_OUTPUT_BASENAME (file, fname);
11328 #else
11329 assemble_name (file, fname);
11330 #endif
11331 fputs ("-.", file);
11332 #if TARGET_AIX
11333 RS6000_OUTPUT_BASENAME (file, fname);
11334 #else
11335 assemble_name (file, fname);
11336 #endif
11337 putc ('\n', file);
11338
11339 /* Interrupt handler mask. */
11340 /* Omit this long, since we never set the interrupt handler bit
11341 above. */
11342
11343 /* Number of CTL (controlled storage) anchors. */
11344 /* Omit this long, since the has_ctl bit is never set above. */
11345
11346 /* Displacement into stack of each CTL anchor. */
11347 /* Omit this list of longs, because there are no CTL anchors. */
11348
11349 /* Length of function name. */
11350 if (*fname == '*')
11351 ++fname;
11352 fprintf (file, "\t.short %d\n", (int) strlen (fname));
11353
11354 /* Function name. */
11355 assemble_string (fname, strlen (fname));
11356
11357 /* Register for alloca automatic storage; this is always reg 31.
11358 Only emit this if the alloca bit was set above. */
11359 if (frame_pointer_needed)
11360 fputs ("\t.byte 31\n", file);
11361
11362 fputs ("\t.align 2\n", file);
11363 }
11364 }
11365
11366 /* A C compound statement that outputs the assembler code for a thunk
11367 function, used to implement C++ virtual function calls with
11368 multiple inheritance. The thunk acts as a wrapper around a virtual
11369 function, adjusting the implicit object parameter before handing
11370 control off to the real function.
11371
11372 First, emit code to add the integer DELTA to the location that
11373 contains the incoming first argument. Assume that this argument
11374 contains a pointer, and is the one used to pass the `this' pointer
11375 in C++. This is the incoming argument *before* the function
11376 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11377 values of all other incoming arguments.
11378
11379 After the addition, emit code to jump to FUNCTION, which is a
11380 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11381 not touch the return address. Hence returning from FUNCTION will
11382 return to whoever called the current `thunk'.
11383
11384 The effect must be as if FUNCTION had been called directly with the
11385 adjusted first argument. This macro is responsible for emitting
11386 all of the code for a thunk function; output_function_prologue()
11387 and output_function_epilogue() are not invoked.
11388
11389 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11390 been extracted from it.) It might possibly be useful on some
11391 targets, but probably not.
11392
11393 If you do not define this macro, the target-independent code in the
11394 C++ frontend will generate a less efficient heavyweight thunk that
11395 calls FUNCTION instead of jumping to it. The generic approach does
11396 not support varargs. */
11397
11398 static void
rs6000_output_mi_thunk(file,thunk_fndecl,delta,vcall_offset,function)11399 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
11400 FILE *file;
11401 tree thunk_fndecl ATTRIBUTE_UNUSED;
11402 HOST_WIDE_INT delta;
11403 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED;
11404 tree function;
11405 {
11406 const char *this_reg =
11407 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
11408 const char *prefix;
11409 const char *fname;
11410 const char *r0 = reg_names[0];
11411 const char *toc = reg_names[2];
11412 const char *schain = reg_names[11];
11413 const char *r12 = reg_names[12];
11414 char buf[512];
11415 static int labelno = 0;
11416
11417 /* Small constants that can be done by one add instruction. */
11418 if (delta >= -32768 && delta <= 32767)
11419 {
11420 if (! TARGET_NEW_MNEMONICS)
11421 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, (int) delta, this_reg);
11422 else
11423 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, (int) delta);
11424 }
11425
11426 /* 64-bit constants. If "int" is 32 bits, we'll never hit this abort. */
11427 else if (TARGET_64BIT && (delta < -2147483647 - 1 || delta > 2147483647))
11428 abort ();
11429
11430 /* Large constants that can be done by one addis instruction. */
11431 else if ((delta & 0xffff) == 0)
11432 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
11433 (int) (delta >> 16));
11434
11435 /* 32-bit constants that can be done by an add and addis instruction. */
11436 else
11437 {
11438 /* Break into two pieces, propagating the sign bit from the low
11439 word to the upper word. */
11440 int delta_low = ((delta & 0xffff) ^ 0x8000) - 0x8000;
11441 int delta_high = (delta - delta_low) >> 16;
11442
11443 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
11444 delta_high);
11445
11446 if (! TARGET_NEW_MNEMONICS)
11447 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
11448 else
11449 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
11450 }
11451
11452 /* Get the prefix in front of the names. */
11453 switch (DEFAULT_ABI)
11454 {
11455 default:
11456 abort ();
11457
11458 case ABI_AIX:
11459 prefix = ".";
11460 break;
11461
11462 case ABI_V4:
11463 case ABI_AIX_NODESC:
11464 case ABI_DARWIN:
11465 prefix = "";
11466 break;
11467 }
11468
11469 /* If the function is compiled in this module, jump to it directly.
11470 Otherwise, load up its address and jump to it. */
11471
11472 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
11473
11474 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
11475 && (! lookup_attribute ("longcall",
11476 TYPE_ATTRIBUTES (TREE_TYPE (function)))
11477 || lookup_attribute ("shortcall",
11478 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
11479 {
11480 fprintf (file, "\tb %s", prefix);
11481 assemble_name (file, fname);
11482 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
11483 putc ('\n', file);
11484 }
11485
11486 else
11487 {
11488 switch (DEFAULT_ABI)
11489 {
11490 default:
11491 abort ();
11492
11493 case ABI_AIX:
11494 /* Set up a TOC entry for the function. */
11495 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
11496 toc_section ();
11497 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
11498 labelno++;
11499
11500 if (TARGET_MINIMAL_TOC)
11501 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
11502 else
11503 {
11504 fputs ("\t.tc ", file);
11505 assemble_name (file, fname);
11506 fputs ("[TC],", file);
11507 }
11508 assemble_name (file, fname);
11509 putc ('\n', file);
11510 function_section (current_function_decl);
11511 if (TARGET_MINIMAL_TOC)
11512 asm_fprintf (file, (TARGET_32BIT)
11513 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
11514 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
11515 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
11516 assemble_name (file, buf);
11517 if (TARGET_ELF && TARGET_MINIMAL_TOC)
11518 fputs ("-(.LCTOC1)", file);
11519 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
11520 asm_fprintf (file,
11521 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
11522 r0, r12);
11523
11524 asm_fprintf (file,
11525 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
11526 toc, r12);
11527
11528 asm_fprintf (file, "\tmtctr %s\n", r0);
11529 asm_fprintf (file,
11530 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
11531 schain, r12);
11532
11533 asm_fprintf (file, "\tbctr\n");
11534 break;
11535
11536 case ABI_AIX_NODESC:
11537 case ABI_V4:
11538 fprintf (file, "\tb %s", prefix);
11539 assemble_name (file, fname);
11540 if (flag_pic) fputs ("@plt", file);
11541 putc ('\n', file);
11542 break;
11543
11544 #if TARGET_MACHO
11545 case ABI_DARWIN:
11546 fprintf (file, "\tb %s", prefix);
11547 if (flag_pic && !machopic_name_defined_p (fname))
11548 assemble_name (file, machopic_stub_name (fname));
11549 else
11550 assemble_name (file, fname);
11551 putc ('\n', file);
11552 break;
11553 #endif
11554 }
11555 }
11556 }
11557
11558 /* A quick summary of the various types of 'constant-pool tables'
11559 under PowerPC:
11560
11561 Target Flags Name One table per
11562 AIX (none) AIX TOC object file
11563 AIX -mfull-toc AIX TOC object file
11564 AIX -mminimal-toc AIX minimal TOC translation unit
11565 SVR4/EABI (none) SVR4 SDATA object file
11566 SVR4/EABI -fpic SVR4 pic object file
11567 SVR4/EABI -fPIC SVR4 PIC translation unit
11568 SVR4/EABI -mrelocatable EABI TOC function
11569 SVR4/EABI -maix AIX TOC object file
11570 SVR4/EABI -maix -mminimal-toc
11571 AIX minimal TOC translation unit
11572
11573 Name Reg. Set by entries contains:
11574 made by addrs? fp? sum?
11575
11576 AIX TOC 2 crt0 as Y option option
11577 AIX minimal TOC 30 prolog gcc Y Y option
11578 SVR4 SDATA 13 crt0 gcc N Y N
11579 SVR4 pic 30 prolog ld Y not yet N
11580 SVR4 PIC 30 prolog gcc Y option option
11581 EABI TOC 30 prolog gcc Y option option
11582
11583 */
11584
11585 /* Hash table stuff for keeping track of TOC entries. */
11586
11587 struct toc_hash_struct
11588 {
11589 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
11590 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
11591 rtx key;
11592 enum machine_mode key_mode;
11593 int labelno;
11594 };
11595
11596 static htab_t toc_hash_table;
11597
11598 /* Hash functions for the hash table. */
11599
11600 static unsigned
rs6000_hash_constant(k)11601 rs6000_hash_constant (k)
11602 rtx k;
11603 {
11604 enum rtx_code code = GET_CODE (k);
11605 enum machine_mode mode = GET_MODE (k);
11606 unsigned result = (code << 3) ^ mode;
11607 const char *format;
11608 int flen, fidx;
11609
11610 format = GET_RTX_FORMAT (code);
11611 flen = strlen (format);
11612 fidx = 0;
11613
11614 switch (code)
11615 {
11616 case LABEL_REF:
11617 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
11618
11619 case CONST_DOUBLE:
11620 if (mode != VOIDmode)
11621 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
11622 flen = 2;
11623 break;
11624
11625 case CODE_LABEL:
11626 fidx = 3;
11627 break;
11628
11629 default:
11630 break;
11631 }
11632
11633 for (; fidx < flen; fidx++)
11634 switch (format[fidx])
11635 {
11636 case 's':
11637 {
11638 unsigned i, len;
11639 const char *str = XSTR (k, fidx);
11640 len = strlen (str);
11641 result = result * 613 + len;
11642 for (i = 0; i < len; i++)
11643 result = result * 613 + (unsigned) str[i];
11644 break;
11645 }
11646 case 'u':
11647 case 'e':
11648 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
11649 break;
11650 case 'i':
11651 case 'n':
11652 result = result * 613 + (unsigned) XINT (k, fidx);
11653 break;
11654 case 'w':
11655 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
11656 result = result * 613 + (unsigned) XWINT (k, fidx);
11657 else
11658 {
11659 size_t i;
11660 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
11661 result = result * 613 + (unsigned) (XWINT (k, fidx)
11662 >> CHAR_BIT * i);
11663 }
11664 break;
11665 default:
11666 abort ();
11667 }
11668
11669 return result;
11670 }
11671
11672 static unsigned
toc_hash_function(hash_entry)11673 toc_hash_function (hash_entry)
11674 const void * hash_entry;
11675 {
11676 const struct toc_hash_struct *thc =
11677 (const struct toc_hash_struct *) hash_entry;
11678 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
11679 }
11680
11681 /* Compare H1 and H2 for equivalence. */
11682
11683 static int
toc_hash_eq(h1,h2)11684 toc_hash_eq (h1, h2)
11685 const void * h1;
11686 const void * h2;
11687 {
11688 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
11689 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
11690
11691 if (((const struct toc_hash_struct *) h1)->key_mode
11692 != ((const struct toc_hash_struct *) h2)->key_mode)
11693 return 0;
11694
11695 return rtx_equal_p (r1, r2);
11696 }
11697
11698 /* Mark the hash table-entry HASH_ENTRY. */
11699
11700 static int
toc_hash_mark_entry(hash_slot,unused)11701 toc_hash_mark_entry (hash_slot, unused)
11702 void ** hash_slot;
11703 void * unused ATTRIBUTE_UNUSED;
11704 {
11705 const struct toc_hash_struct * hash_entry =
11706 *(const struct toc_hash_struct **) hash_slot;
11707 rtx r = hash_entry->key;
11708 ggc_set_mark (hash_entry);
11709 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
11710 if (GET_CODE (r) == LABEL_REF)
11711 {
11712 ggc_set_mark (r);
11713 ggc_set_mark (XEXP (r, 0));
11714 }
11715 else
11716 ggc_mark_rtx (r);
11717 return 1;
11718 }
11719
11720 /* Mark all the elements of the TOC hash-table *HT. */
11721
11722 static void
toc_hash_mark_table(vht)11723 toc_hash_mark_table (vht)
11724 void *vht;
11725 {
11726 htab_t *ht = vht;
11727
11728 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
11729 }
11730
11731 /* These are the names given by the C++ front-end to vtables, and
11732 vtable-like objects. Ideally, this logic should not be here;
11733 instead, there should be some programmatic way of inquiring as
11734 to whether or not an object is a vtable. */
11735
11736 #define VTABLE_NAME_P(NAME) \
11737 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11738 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11739 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11740 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11741
11742 void
rs6000_output_symbol_ref(file,x)11743 rs6000_output_symbol_ref (file, x)
11744 FILE *file;
11745 rtx x;
11746 {
11747 /* Currently C++ toc references to vtables can be emitted before it
11748 is decided whether the vtable is public or private. If this is
11749 the case, then the linker will eventually complain that there is
11750 a reference to an unknown section. Thus, for vtables only,
11751 we emit the TOC reference to reference the symbol and not the
11752 section. */
11753 const char *name = XSTR (x, 0);
11754
11755 if (VTABLE_NAME_P (name))
11756 {
11757 RS6000_OUTPUT_BASENAME (file, name);
11758 }
11759 else
11760 assemble_name (file, name);
11761 }
11762
11763 /* Output a TOC entry. We derive the entry name from what is being
11764 written. */
11765
11766 void
output_toc(file,x,labelno,mode)11767 output_toc (file, x, labelno, mode)
11768 FILE *file;
11769 rtx x;
11770 int labelno;
11771 enum machine_mode mode;
11772 {
11773 char buf[256];
11774 const char *name = buf;
11775 const char *real_name;
11776 rtx base = x;
11777 int offset = 0;
11778
11779 if (TARGET_NO_TOC)
11780 abort ();
11781
11782 /* When the linker won't eliminate them, don't output duplicate
11783 TOC entries (this happens on AIX if there is any kind of TOC,
11784 and on SVR4 under -fPIC or -mrelocatable). */
11785 if (TARGET_TOC)
11786 {
11787 struct toc_hash_struct *h;
11788 void * * found;
11789
11790 h = ggc_alloc (sizeof (*h));
11791 h->key = x;
11792 h->key_mode = mode;
11793 h->labelno = labelno;
11794
11795 found = htab_find_slot (toc_hash_table, h, 1);
11796 if (*found == NULL)
11797 *found = h;
11798 else /* This is indeed a duplicate.
11799 Set this label equal to that label. */
11800 {
11801 fputs ("\t.set ", file);
11802 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11803 fprintf (file, "%d,", labelno);
11804 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11805 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
11806 found)->labelno));
11807 return;
11808 }
11809 }
11810
11811 /* If we're going to put a double constant in the TOC, make sure it's
11812 aligned properly when strict alignment is on. */
11813 if (GET_CODE (x) == CONST_DOUBLE
11814 && STRICT_ALIGNMENT
11815 && GET_MODE_BITSIZE (mode) >= 64
11816 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
11817 ASM_OUTPUT_ALIGN (file, 3);
11818 }
11819
11820 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
11821
11822 /* Handle FP constants specially. Note that if we have a minimal
11823 TOC, things we put here aren't actually in the TOC, so we can allow
11824 FP constants. */
11825 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
11826 {
11827 REAL_VALUE_TYPE rv;
11828 long k[4];
11829
11830 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11831 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
11832
11833 if (TARGET_64BIT)
11834 {
11835 if (TARGET_MINIMAL_TOC)
11836 fputs (DOUBLE_INT_ASM_OP, file);
11837 else
11838 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11839 k[0] & 0xffffffff, k[1] & 0xffffffff,
11840 k[2] & 0xffffffff, k[3] & 0xffffffff);
11841 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
11842 k[0] & 0xffffffff, k[1] & 0xffffffff,
11843 k[2] & 0xffffffff, k[3] & 0xffffffff);
11844 return;
11845 }
11846 else
11847 {
11848 if (TARGET_MINIMAL_TOC)
11849 fputs ("\t.long ", file);
11850 else
11851 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11852 k[0] & 0xffffffff, k[1] & 0xffffffff,
11853 k[2] & 0xffffffff, k[3] & 0xffffffff);
11854 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
11855 k[0] & 0xffffffff, k[1] & 0xffffffff,
11856 k[2] & 0xffffffff, k[3] & 0xffffffff);
11857 return;
11858 }
11859 }
11860 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
11861 {
11862 REAL_VALUE_TYPE rv;
11863 long k[2];
11864
11865 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11866 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
11867
11868 if (TARGET_64BIT)
11869 {
11870 if (TARGET_MINIMAL_TOC)
11871 fputs (DOUBLE_INT_ASM_OP, file);
11872 else
11873 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11874 k[0] & 0xffffffff, k[1] & 0xffffffff);
11875 fprintf (file, "0x%lx%08lx\n",
11876 k[0] & 0xffffffff, k[1] & 0xffffffff);
11877 return;
11878 }
11879 else
11880 {
11881 if (TARGET_MINIMAL_TOC)
11882 fputs ("\t.long ", file);
11883 else
11884 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11885 k[0] & 0xffffffff, k[1] & 0xffffffff);
11886 fprintf (file, "0x%lx,0x%lx\n",
11887 k[0] & 0xffffffff, k[1] & 0xffffffff);
11888 return;
11889 }
11890 }
11891 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
11892 {
11893 REAL_VALUE_TYPE rv;
11894 long l;
11895
11896 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11897 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
11898
11899 if (TARGET_64BIT)
11900 {
11901 if (TARGET_MINIMAL_TOC)
11902 fputs (DOUBLE_INT_ASM_OP, file);
11903 else
11904 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11905 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
11906 return;
11907 }
11908 else
11909 {
11910 if (TARGET_MINIMAL_TOC)
11911 fputs ("\t.long ", file);
11912 else
11913 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11914 fprintf (file, "0x%lx\n", l & 0xffffffff);
11915 return;
11916 }
11917 }
11918 else if (GET_MODE (x) == VOIDmode
11919 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
11920 {
11921 unsigned HOST_WIDE_INT low;
11922 HOST_WIDE_INT high;
11923
11924 if (GET_CODE (x) == CONST_DOUBLE)
11925 {
11926 low = CONST_DOUBLE_LOW (x);
11927 high = CONST_DOUBLE_HIGH (x);
11928 }
11929 else
11930 #if HOST_BITS_PER_WIDE_INT == 32
11931 {
11932 low = INTVAL (x);
11933 high = (low & 0x80000000) ? ~0 : 0;
11934 }
11935 #else
11936 {
11937 low = INTVAL (x) & 0xffffffff;
11938 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
11939 }
11940 #endif
11941
11942 /* TOC entries are always Pmode-sized, but since this
11943 is a bigendian machine then if we're putting smaller
11944 integer constants in the TOC we have to pad them.
11945 (This is still a win over putting the constants in
11946 a separate constant pool, because then we'd have
11947 to have both a TOC entry _and_ the actual constant.)
11948
11949 For a 32-bit target, CONST_INT values are loaded and shifted
11950 entirely within `low' and can be stored in one TOC entry. */
11951
11952 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
11953 abort ();/* It would be easy to make this work, but it doesn't now. */
11954
11955 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
11956 {
11957 #if HOST_BITS_PER_WIDE_INT == 32
11958 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
11959 POINTER_SIZE, &low, &high, 0);
11960 #else
11961 low |= high << 32;
11962 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
11963 high = (HOST_WIDE_INT) low >> 32;
11964 low &= 0xffffffff;
11965 #endif
11966 }
11967
11968 if (TARGET_64BIT)
11969 {
11970 if (TARGET_MINIMAL_TOC)
11971 fputs (DOUBLE_INT_ASM_OP, file);
11972 else
11973 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11974 (long) high & 0xffffffff, (long) low & 0xffffffff);
11975 fprintf (file, "0x%lx%08lx\n",
11976 (long) high & 0xffffffff, (long) low & 0xffffffff);
11977 return;
11978 }
11979 else
11980 {
11981 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
11982 {
11983 if (TARGET_MINIMAL_TOC)
11984 fputs ("\t.long ", file);
11985 else
11986 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11987 (long) high & 0xffffffff, (long) low & 0xffffffff);
11988 fprintf (file, "0x%lx,0x%lx\n",
11989 (long) high & 0xffffffff, (long) low & 0xffffffff);
11990 }
11991 else
11992 {
11993 if (TARGET_MINIMAL_TOC)
11994 fputs ("\t.long ", file);
11995 else
11996 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
11997 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
11998 }
11999 return;
12000 }
12001 }
12002
12003 if (GET_CODE (x) == CONST)
12004 {
12005 if (GET_CODE (XEXP (x, 0)) != PLUS)
12006 abort ();
12007
12008 base = XEXP (XEXP (x, 0), 0);
12009 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12010 }
12011
12012 if (GET_CODE (base) == SYMBOL_REF)
12013 name = XSTR (base, 0);
12014 else if (GET_CODE (base) == LABEL_REF)
12015 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12016 else if (GET_CODE (base) == CODE_LABEL)
12017 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12018 else
12019 abort ();
12020
12021 real_name = (*targetm.strip_name_encoding) (name);
12022 if (TARGET_MINIMAL_TOC)
12023 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12024 else
12025 {
12026 fprintf (file, "\t.tc %s", real_name);
12027
12028 if (offset < 0)
12029 fprintf (file, ".N%d", - offset);
12030 else if (offset)
12031 fprintf (file, ".P%d", offset);
12032
12033 fputs ("[TC],", file);
12034 }
12035
12036 /* Currently C++ toc references to vtables can be emitted before it
12037 is decided whether the vtable is public or private. If this is
12038 the case, then the linker will eventually complain that there is
12039 a TOC reference to an unknown section. Thus, for vtables only,
12040 we emit the TOC reference to reference the symbol and not the
12041 section. */
12042 if (VTABLE_NAME_P (name))
12043 {
12044 RS6000_OUTPUT_BASENAME (file, name);
12045 if (offset < 0)
12046 fprintf (file, "%d", offset);
12047 else if (offset > 0)
12048 fprintf (file, "+%d", offset);
12049 }
12050 else
12051 output_addr_const (file, x);
12052 putc ('\n', file);
12053 }
12054
12055 /* Output an assembler pseudo-op to write an ASCII string of N characters
12056 starting at P to FILE.
12057
12058 On the RS/6000, we have to do this using the .byte operation and
12059 write out special characters outside the quoted string.
12060 Also, the assembler is broken; very long strings are truncated,
12061 so we must artificially break them up early. */
12062
12063 void
output_ascii(file,p,n)12064 output_ascii (file, p, n)
12065 FILE *file;
12066 const char *p;
12067 int n;
12068 {
12069 char c;
12070 int i, count_string;
12071 const char *for_string = "\t.byte \"";
12072 const char *for_decimal = "\t.byte ";
12073 const char *to_close = NULL;
12074
12075 count_string = 0;
12076 for (i = 0; i < n; i++)
12077 {
12078 c = *p++;
12079 if (c >= ' ' && c < 0177)
12080 {
12081 if (for_string)
12082 fputs (for_string, file);
12083 putc (c, file);
12084
12085 /* Write two quotes to get one. */
12086 if (c == '"')
12087 {
12088 putc (c, file);
12089 ++count_string;
12090 }
12091
12092 for_string = NULL;
12093 for_decimal = "\"\n\t.byte ";
12094 to_close = "\"\n";
12095 ++count_string;
12096
12097 if (count_string >= 512)
12098 {
12099 fputs (to_close, file);
12100
12101 for_string = "\t.byte \"";
12102 for_decimal = "\t.byte ";
12103 to_close = NULL;
12104 count_string = 0;
12105 }
12106 }
12107 else
12108 {
12109 if (for_decimal)
12110 fputs (for_decimal, file);
12111 fprintf (file, "%d", c);
12112
12113 for_string = "\n\t.byte \"";
12114 for_decimal = ", ";
12115 to_close = "\n";
12116 count_string = 0;
12117 }
12118 }
12119
12120 /* Now close the string if we have written one. Then end the line. */
12121 if (to_close)
12122 fputs (to_close, file);
12123 }
12124
12125 /* Generate a unique section name for FILENAME for a section type
12126 represented by SECTION_DESC. Output goes into BUF.
12127
12128 SECTION_DESC can be any string, as long as it is different for each
12129 possible section type.
12130
12131 We name the section in the same manner as xlc. The name begins with an
12132 underscore followed by the filename (after stripping any leading directory
12133 names) with the last period replaced by the string SECTION_DESC. If
12134 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12135 the name. */
12136
12137 void
rs6000_gen_section_name(buf,filename,section_desc)12138 rs6000_gen_section_name (buf, filename, section_desc)
12139 char **buf;
12140 const char *filename;
12141 const char *section_desc;
12142 {
12143 const char *q, *after_last_slash, *last_period = 0;
12144 char *p;
12145 int len;
12146
12147 after_last_slash = filename;
12148 for (q = filename; *q; q++)
12149 {
12150 if (*q == '/')
12151 after_last_slash = q + 1;
12152 else if (*q == '.')
12153 last_period = q;
12154 }
12155
12156 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12157 *buf = (char *) xmalloc (len);
12158
12159 p = *buf;
12160 *p++ = '_';
12161
12162 for (q = after_last_slash; *q; q++)
12163 {
12164 if (q == last_period)
12165 {
12166 strcpy (p, section_desc);
12167 p += strlen (section_desc);
12168 }
12169
12170 else if (ISALNUM (*q))
12171 *p++ = *q;
12172 }
12173
12174 if (last_period == 0)
12175 strcpy (p, section_desc);
12176 else
12177 *p = '\0';
12178 }
12179
12180 /* Emit profile function. */
12181
12182 void
output_profile_hook(labelno)12183 output_profile_hook (labelno)
12184 int labelno ATTRIBUTE_UNUSED;
12185 {
12186 if (DEFAULT_ABI == ABI_AIX)
12187 {
12188 #ifdef NO_PROFILE_COUNTERS
12189 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12190 #else
12191 char buf[30];
12192 const char *label_name;
12193 rtx fun;
12194
12195 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12196 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12197 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12198
12199 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12200 fun, Pmode);
12201 #endif
12202 }
12203 else if (DEFAULT_ABI == ABI_DARWIN)
12204 {
12205 const char *mcount_name = RS6000_MCOUNT;
12206 int caller_addr_regno = LINK_REGISTER_REGNUM;
12207
12208 /* Be conservative and always set this, at least for now. */
12209 current_function_uses_pic_offset_table = 1;
12210
12211 #if TARGET_MACHO
12212 /* For PIC code, set up a stub and collect the caller's address
12213 from r0, which is where the prologue puts it. */
12214 if (flag_pic)
12215 {
12216 mcount_name = machopic_stub_name (mcount_name);
12217 if (current_function_uses_pic_offset_table)
12218 caller_addr_regno = 0;
12219 }
12220 #endif
12221 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12222 0, VOIDmode, 1,
12223 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12224 }
12225 }
12226
12227 /* Write function profiler code. */
12228
12229 void
output_function_profiler(file,labelno)12230 output_function_profiler (file, labelno)
12231 FILE *file;
12232 int labelno;
12233 {
12234 char buf[100];
12235 int save_lr = 8;
12236
12237 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12238 switch (DEFAULT_ABI)
12239 {
12240 default:
12241 abort ();
12242
12243 case ABI_V4:
12244 save_lr = 4;
12245 /* Fall through. */
12246
12247 case ABI_AIX_NODESC:
12248 if (!TARGET_32BIT)
12249 {
12250 warning ("no profiling of 64-bit code for this ABI");
12251 return;
12252 }
12253 fprintf (file, "\tmflr %s\n", reg_names[0]);
12254 if (flag_pic == 1)
12255 {
12256 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12257 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12258 reg_names[0], save_lr, reg_names[1]);
12259 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12260 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12261 assemble_name (file, buf);
12262 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12263 }
12264 else if (flag_pic > 1)
12265 {
12266 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12267 reg_names[0], save_lr, reg_names[1]);
12268 /* Now, we need to get the address of the label. */
12269 fputs ("\tbl 1f\n\t.long ", file);
12270 assemble_name (file, buf);
12271 fputs ("-.\n1:", file);
12272 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12273 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12274 reg_names[0], reg_names[11]);
12275 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12276 reg_names[0], reg_names[0], reg_names[11]);
12277 }
12278 else
12279 {
12280 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12281 assemble_name (file, buf);
12282 fputs ("@ha\n", file);
12283 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12284 reg_names[0], save_lr, reg_names[1]);
12285 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12286 assemble_name (file, buf);
12287 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12288 }
12289
12290 if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
12291 {
12292 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12293 reg_names[STATIC_CHAIN_REGNUM],
12294 12, reg_names[1]);
12295 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12296 asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
12297 reg_names[STATIC_CHAIN_REGNUM],
12298 12, reg_names[1]);
12299 }
12300 else
12301 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12302 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12303 break;
12304
12305 case ABI_AIX:
12306 case ABI_DARWIN:
12307 /* Don't do anything, done in output_profile_hook (). */
12308 break;
12309 }
12310 }
12311
12312 /* Adjust the cost of a scheduling dependency. Return the new cost of
12313 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12314
12315 static int
rs6000_adjust_cost(insn,link,dep_insn,cost)12316 rs6000_adjust_cost (insn, link, dep_insn, cost)
12317 rtx insn;
12318 rtx link;
12319 rtx dep_insn ATTRIBUTE_UNUSED;
12320 int cost;
12321 {
12322 if (! recog_memoized (insn))
12323 return 0;
12324
12325 if (REG_NOTE_KIND (link) != 0)
12326 return 0;
12327
12328 if (REG_NOTE_KIND (link) == 0)
12329 {
12330 /* Data dependency; DEP_INSN writes a register that INSN reads
12331 some cycles later. */
12332 switch (get_attr_type (insn))
12333 {
12334 case TYPE_JMPREG:
12335 /* Tell the first scheduling pass about the latency between
12336 a mtctr and bctr (and mtlr and br/blr). The first
12337 scheduling pass will not know about this latency since
12338 the mtctr instruction, which has the latency associated
12339 to it, will be generated by reload. */
12340 return TARGET_POWER ? 5 : 4;
12341 case TYPE_BRANCH:
12342 /* Leave some extra cycles between a compare and its
12343 dependent branch, to inhibit expensive mispredicts. */
12344 if ((rs6000_cpu_attr == CPU_PPC603
12345 || rs6000_cpu_attr == CPU_PPC604
12346 || rs6000_cpu_attr == CPU_PPC604E
12347 || rs6000_cpu_attr == CPU_PPC620
12348 || rs6000_cpu_attr == CPU_PPC630
12349 || rs6000_cpu_attr == CPU_PPC750
12350 || rs6000_cpu_attr == CPU_PPC7400
12351 || rs6000_cpu_attr == CPU_PPC7450
12352 || rs6000_cpu_attr == CPU_POWER4)
12353 && recog_memoized (dep_insn)
12354 && (INSN_CODE (dep_insn) >= 0)
12355 && (get_attr_type (dep_insn) == TYPE_COMPARE
12356 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12357 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12358 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
12359 return cost + 2;
12360 default:
12361 break;
12362 }
12363 /* Fall out to return default cost. */
12364 }
12365
12366 return cost;
12367 }
12368
12369 /* A C statement (sans semicolon) to update the integer scheduling
12370 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12371 INSN earlier, increase the priority to execute INSN later. Do not
12372 define this macro if you do not need to adjust the scheduling
12373 priorities of insns. */
12374
12375 static int
rs6000_adjust_priority(insn,priority)12376 rs6000_adjust_priority (insn, priority)
12377 rtx insn ATTRIBUTE_UNUSED;
12378 int priority;
12379 {
12380 /* On machines (like the 750) which have asymmetric integer units,
12381 where one integer unit can do multiply and divides and the other
12382 can't, reduce the priority of multiply/divide so it is scheduled
12383 before other integer operations. */
12384
12385 #if 0
12386 if (! INSN_P (insn))
12387 return priority;
12388
12389 if (GET_CODE (PATTERN (insn)) == USE)
12390 return priority;
12391
12392 switch (rs6000_cpu_attr) {
12393 case CPU_PPC750:
12394 switch (get_attr_type (insn))
12395 {
12396 default:
12397 break;
12398
12399 case TYPE_IMUL:
12400 case TYPE_IDIV:
12401 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12402 priority, priority);
12403 if (priority >= 0 && priority < 0x01000000)
12404 priority >>= 3;
12405 break;
12406 }
12407 }
12408 #endif
12409
12410 return priority;
12411 }
12412
12413 /* Return how many instructions the machine can issue per cycle. */
12414
12415 static int
rs6000_issue_rate()12416 rs6000_issue_rate ()
12417 {
12418 switch (rs6000_cpu_attr) {
12419 case CPU_RIOS1: /* ? */
12420 case CPU_RS64A:
12421 case CPU_PPC601: /* ? */
12422 case CPU_PPC7450:
12423 return 3;
12424 case CPU_PPC603:
12425 case CPU_PPC750:
12426 case CPU_PPC7400:
12427 return 2;
12428 case CPU_RIOS2:
12429 case CPU_PPC604:
12430 case CPU_PPC604E:
12431 case CPU_PPC620:
12432 case CPU_PPC630:
12433 case CPU_POWER4:
12434 return 4;
12435 default:
12436 return 1;
12437 }
12438 }
12439
12440
12441 /* Length in units of the trampoline for entering a nested function. */
12442
12443 int
rs6000_trampoline_size()12444 rs6000_trampoline_size ()
12445 {
12446 int ret = 0;
12447
12448 switch (DEFAULT_ABI)
12449 {
12450 default:
12451 abort ();
12452
12453 case ABI_AIX:
12454 ret = (TARGET_32BIT) ? 12 : 24;
12455 break;
12456
12457 case ABI_DARWIN:
12458 case ABI_V4:
12459 case ABI_AIX_NODESC:
12460 ret = (TARGET_32BIT) ? 40 : 48;
12461 break;
12462 }
12463
12464 return ret;
12465 }
12466
12467 /* Emit RTL insns to initialize the variable parts of a trampoline.
12468 FNADDR is an RTX for the address of the function's pure code.
12469 CXT is an RTX for the static chain value for the function. */
12470
12471 void
rs6000_initialize_trampoline(addr,fnaddr,cxt)12472 rs6000_initialize_trampoline (addr, fnaddr, cxt)
12473 rtx addr;
12474 rtx fnaddr;
12475 rtx cxt;
12476 {
12477 enum machine_mode pmode = Pmode;
12478 int regsize = (TARGET_32BIT) ? 4 : 8;
12479 rtx ctx_reg = force_reg (pmode, cxt);
12480
12481 switch (DEFAULT_ABI)
12482 {
12483 default:
12484 abort ();
12485
12486 /* Macros to shorten the code expansions below. */
12487 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12488 #define MEM_PLUS(addr,offset) \
12489 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12490
12491 /* Under AIX, just build the 3 word function descriptor */
12492 case ABI_AIX:
12493 {
12494 rtx fn_reg = gen_reg_rtx (pmode);
12495 rtx toc_reg = gen_reg_rtx (pmode);
12496 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12497 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12498 emit_move_insn (MEM_DEREF (addr), fn_reg);
12499 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12500 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12501 }
12502 break;
12503
12504 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12505 case ABI_DARWIN:
12506 case ABI_V4:
12507 case ABI_AIX_NODESC:
12508 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12509 FALSE, VOIDmode, 4,
12510 addr, pmode,
12511 GEN_INT (rs6000_trampoline_size ()), SImode,
12512 fnaddr, pmode,
12513 ctx_reg, pmode);
12514 break;
12515 }
12516
12517 /* Call __enable_execute_stack after writing onto the stack to make sure
12518 the stack address is accessible. */
12519 #ifdef TRANSFER_FROM_TRAMPOLINE
12520 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__enable_execute_stack"),
12521 LCT_NORMAL, VOIDmode, 1, addr, Pmode);
12522 #endif
12523
12524 return;
12525 }
12526
12527
12528 /* Table of valid machine attributes. */
12529
12530 const struct attribute_spec rs6000_attribute_table[] =
12531 {
12532 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12533 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12534 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12535 { NULL, 0, 0, false, false, false, NULL }
12536 };
12537
12538 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12539 struct attribute_spec.handler. */
12540
12541 static tree
rs6000_handle_longcall_attribute(node,name,args,flags,no_add_attrs)12542 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12543 tree *node;
12544 tree name;
12545 tree args ATTRIBUTE_UNUSED;
12546 int flags ATTRIBUTE_UNUSED;
12547 bool *no_add_attrs;
12548 {
12549 if (TREE_CODE (*node) != FUNCTION_TYPE
12550 && TREE_CODE (*node) != FIELD_DECL
12551 && TREE_CODE (*node) != TYPE_DECL)
12552 {
12553 warning ("`%s' attribute only applies to functions",
12554 IDENTIFIER_POINTER (name));
12555 *no_add_attrs = true;
12556 }
12557
12558 return NULL_TREE;
12559 }
12560
12561 /* Set longcall attributes on all functions declared when
12562 rs6000_default_long_calls is true. */
12563 static void
rs6000_set_default_type_attributes(type)12564 rs6000_set_default_type_attributes (type)
12565 tree type;
12566 {
12567 if (rs6000_default_long_calls
12568 && (TREE_CODE (type) == FUNCTION_TYPE
12569 || TREE_CODE (type) == METHOD_TYPE))
12570 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
12571 NULL_TREE,
12572 TYPE_ATTRIBUTES (type));
12573 }
12574
12575 /* Return a reference suitable for calling a function with the
12576 longcall attribute. */
12577
12578 struct rtx_def *
rs6000_longcall_ref(call_ref)12579 rs6000_longcall_ref (call_ref)
12580 rtx call_ref;
12581 {
12582 const char *call_name;
12583 tree node;
12584
12585 if (GET_CODE (call_ref) != SYMBOL_REF)
12586 return call_ref;
12587
12588 /* System V adds '.' to the internal name, so skip them. */
12589 call_name = XSTR (call_ref, 0);
12590 if (*call_name == '.')
12591 {
12592 while (*call_name == '.')
12593 call_name++;
12594
12595 node = get_identifier (call_name);
12596 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
12597 }
12598
12599 return force_reg (Pmode, call_ref);
12600 }
12601
12602
12603 #ifdef USING_ELFOS_H
12604
12605 /* A C statement or statements to switch to the appropriate section
12606 for output of RTX in mode MODE. You can assume that RTX is some
12607 kind of constant in RTL. The argument MODE is redundant except in
12608 the case of a `const_int' rtx. Select the section by calling
12609 `text_section' or one of the alternatives for other sections.
12610
12611 Do not define this macro if you put all constants in the read-only
12612 data section. */
12613
12614 static void
rs6000_elf_select_rtx_section(mode,x,align)12615 rs6000_elf_select_rtx_section (mode, x, align)
12616 enum machine_mode mode;
12617 rtx x;
12618 unsigned HOST_WIDE_INT align;
12619 {
12620 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
12621 toc_section ();
12622 else
12623 default_elf_select_rtx_section (mode, x, align);
12624 }
12625
12626 /* A C statement or statements to switch to the appropriate
12627 section for output of DECL. DECL is either a `VAR_DECL' node
12628 or a constant of some sort. RELOC indicates whether forming
12629 the initial value of DECL requires link-time relocations. */
12630
12631 static void
rs6000_elf_select_section(decl,reloc,align)12632 rs6000_elf_select_section (decl, reloc, align)
12633 tree decl;
12634 int reloc;
12635 unsigned HOST_WIDE_INT align;
12636 {
12637 default_elf_select_section_1 (decl, reloc, align,
12638 flag_pic || DEFAULT_ABI == ABI_AIX);
12639 }
12640
12641 /* A C statement to build up a unique section name, expressed as a
12642 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12643 RELOC indicates whether the initial value of EXP requires
12644 link-time relocations. If you do not define this macro, GCC will use
12645 the symbol name prefixed by `.' as the section name. Note - this
12646 macro can now be called for uninitialized data items as well as
12647 initialized data and functions. */
12648
12649 static void
rs6000_elf_unique_section(decl,reloc)12650 rs6000_elf_unique_section (decl, reloc)
12651 tree decl;
12652 int reloc;
12653 {
12654 default_unique_section_1 (decl, reloc,
12655 flag_pic || DEFAULT_ABI == ABI_AIX);
12656 }
12657
12658
12659 /* If we are referencing a function that is static or is known to be
12660 in this file, make the SYMBOL_REF special. We can use this to indicate
12661 that we can branch to this function without emitting a no-op after the
12662 call. For real AIX calling sequences, we also replace the
12663 function name with the real name (1 or 2 leading .'s), rather than
12664 the function descriptor name. This saves a lot of overriding code
12665 to read the prefixes. */
12666
12667 static void
rs6000_elf_encode_section_info(decl,first)12668 rs6000_elf_encode_section_info (decl, first)
12669 tree decl;
12670 int first;
12671 {
12672 if (!first)
12673 return;
12674
12675 if (TREE_CODE (decl) == FUNCTION_DECL)
12676 {
12677 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12678 if ((*targetm.binds_local_p) (decl))
12679 SYMBOL_REF_FLAG (sym_ref) = 1;
12680
12681 if (DEFAULT_ABI == ABI_AIX)
12682 {
12683 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
12684 size_t len2 = strlen (XSTR (sym_ref, 0));
12685 char *str = alloca (len1 + len2 + 1);
12686 str[0] = '.';
12687 str[1] = '.';
12688 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
12689
12690 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
12691 }
12692 }
12693 else if (rs6000_sdata != SDATA_NONE
12694 && DEFAULT_ABI == ABI_V4
12695 && TREE_CODE (decl) == VAR_DECL)
12696 {
12697 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12698 int size = int_size_in_bytes (TREE_TYPE (decl));
12699 tree section_name = DECL_SECTION_NAME (decl);
12700 const char *name = (char *)0;
12701 int len = 0;
12702
12703 if ((*targetm.binds_local_p) (decl))
12704 SYMBOL_REF_FLAG (sym_ref) = 1;
12705
12706 if (section_name)
12707 {
12708 if (TREE_CODE (section_name) == STRING_CST)
12709 {
12710 name = TREE_STRING_POINTER (section_name);
12711 len = TREE_STRING_LENGTH (section_name);
12712 }
12713 else
12714 abort ();
12715 }
12716
12717 if ((size > 0 && size <= g_switch_value)
12718 || (name
12719 && ((len == sizeof (".sdata") - 1
12720 && strcmp (name, ".sdata") == 0)
12721 || (len == sizeof (".sdata2") - 1
12722 && strcmp (name, ".sdata2") == 0)
12723 || (len == sizeof (".sbss") - 1
12724 && strcmp (name, ".sbss") == 0)
12725 || (len == sizeof (".sbss2") - 1
12726 && strcmp (name, ".sbss2") == 0)
12727 || (len == sizeof (".PPC.EMB.sdata0") - 1
12728 && strcmp (name, ".PPC.EMB.sdata0") == 0)
12729 || (len == sizeof (".PPC.EMB.sbss0") - 1
12730 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
12731 {
12732 size_t len = strlen (XSTR (sym_ref, 0));
12733 char *str = alloca (len + 2);
12734
12735 str[0] = '@';
12736 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
12737 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
12738 }
12739 }
12740 }
12741
12742 static const char *
rs6000_elf_strip_name_encoding(str)12743 rs6000_elf_strip_name_encoding (str)
12744 const char *str;
12745 {
12746 while (*str == '*' || *str == '@')
12747 str++;
12748 return str;
12749 }
12750
12751 static bool
rs6000_elf_in_small_data_p(decl)12752 rs6000_elf_in_small_data_p (decl)
12753 tree decl;
12754 {
12755 if (rs6000_sdata == SDATA_NONE)
12756 return false;
12757
12758 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
12759 {
12760 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
12761 if (strcmp (section, ".sdata") == 0
12762 || strcmp (section, ".sdata2") == 0
12763 || strcmp (section, ".sbss") == 0)
12764 return true;
12765 }
12766 else
12767 {
12768 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
12769
12770 if (size > 0
12771 && size <= g_switch_value
12772 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
12773 return true;
12774 }
12775
12776 return false;
12777 }
12778
12779 #endif /* USING_ELFOS_H */
12780
12781
12782 /* Return a REG that occurs in ADDR with coefficient 1.
12783 ADDR can be effectively incremented by incrementing REG.
12784
12785 r0 is special and we must not select it as an address
12786 register by this routine since our caller will try to
12787 increment the returned register via an "la" instruction. */
12788
12789 struct rtx_def *
find_addr_reg(addr)12790 find_addr_reg (addr)
12791 rtx addr;
12792 {
12793 while (GET_CODE (addr) == PLUS)
12794 {
12795 if (GET_CODE (XEXP (addr, 0)) == REG
12796 && REGNO (XEXP (addr, 0)) != 0)
12797 addr = XEXP (addr, 0);
12798 else if (GET_CODE (XEXP (addr, 1)) == REG
12799 && REGNO (XEXP (addr, 1)) != 0)
12800 addr = XEXP (addr, 1);
12801 else if (CONSTANT_P (XEXP (addr, 0)))
12802 addr = XEXP (addr, 1);
12803 else if (CONSTANT_P (XEXP (addr, 1)))
12804 addr = XEXP (addr, 0);
12805 else
12806 abort ();
12807 }
12808 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
12809 return addr;
12810 abort ();
12811 }
12812
12813 void
rs6000_fatal_bad_address(op)12814 rs6000_fatal_bad_address (op)
12815 rtx op;
12816 {
12817 fatal_insn ("bad address", op);
12818 }
12819
12820 /* Called to register all of our global variables with the garbage
12821 collector. */
12822
12823 static void
rs6000_add_gc_roots()12824 rs6000_add_gc_roots ()
12825 {
12826 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
12827 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
12828 toc_hash_mark_table);
12829 }
12830
12831 #if TARGET_MACHO
12832
12833 #if 0
12834 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12835 reference and a constant. */
12836
12837 int
12838 symbolic_operand (op)
12839 rtx op;
12840 {
12841 switch (GET_CODE (op))
12842 {
12843 case SYMBOL_REF:
12844 case LABEL_REF:
12845 return 1;
12846 case CONST:
12847 op = XEXP (op, 0);
12848 return (GET_CODE (op) == SYMBOL_REF ||
12849 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
12850 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
12851 && GET_CODE (XEXP (op, 1)) == CONST_INT);
12852 default:
12853 return 0;
12854 }
12855 }
12856 #endif
12857
12858 #ifdef RS6000_LONG_BRANCH
12859
12860 static tree stub_list = 0;
12861
12862 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12863 procedure calls to the linked list. */
12864
12865 void
add_compiler_stub(label_name,function_name,line_number)12866 add_compiler_stub (label_name, function_name, line_number)
12867 tree label_name;
12868 tree function_name;
12869 int line_number;
12870 {
12871 tree stub = build_tree_list (function_name, label_name);
12872 TREE_TYPE (stub) = build_int_2 (line_number, 0);
12873 TREE_CHAIN (stub) = stub_list;
12874 stub_list = stub;
12875 }
12876
12877 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12878 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12879 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12880
12881 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12882 handling procedure calls from the linked list and initializes the
12883 linked list. */
12884
12885 void
output_compiler_stub()12886 output_compiler_stub ()
12887 {
12888 char tmp_buf[256];
12889 char label_buf[256];
12890 tree stub;
12891
12892 if (!flag_pic)
12893 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12894 {
12895 fprintf (asm_out_file,
12896 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
12897
12898 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12899 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12900 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
12901 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12902
12903 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
12904 strcpy (label_buf,
12905 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
12906 else
12907 {
12908 label_buf[0] = '_';
12909 strcpy (label_buf+1,
12910 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
12911 }
12912
12913 strcpy (tmp_buf, "lis r12,hi16(");
12914 strcat (tmp_buf, label_buf);
12915 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
12916 strcat (tmp_buf, label_buf);
12917 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
12918 output_asm_insn (tmp_buf, 0);
12919
12920 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12921 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12922 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
12923 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12924 }
12925
12926 stub_list = 0;
12927 }
12928
12929 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
12930 already there or not. */
12931
12932 int
no_previous_def(function_name)12933 no_previous_def (function_name)
12934 tree function_name;
12935 {
12936 tree stub;
12937 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12938 if (function_name == STUB_FUNCTION_NAME (stub))
12939 return 0;
12940 return 1;
12941 }
12942
12943 /* GET_PREV_LABEL gets the label name from the previous definition of
12944 the function. */
12945
12946 tree
get_prev_label(function_name)12947 get_prev_label (function_name)
12948 tree function_name;
12949 {
12950 tree stub;
12951 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12952 if (function_name == STUB_FUNCTION_NAME (stub))
12953 return STUB_LABEL_NAME (stub);
12954 return 0;
12955 }
12956
12957 /* INSN is either a function call or a millicode call. It may have an
12958 unconditional jump in its delay slot.
12959
12960 CALL_DEST is the routine we are calling. */
12961
12962 char *
output_call(insn,call_dest,operand_number)12963 output_call (insn, call_dest, operand_number)
12964 rtx insn;
12965 rtx call_dest;
12966 int operand_number;
12967 {
12968 static char buf[256];
12969 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
12970 {
12971 tree labelname;
12972 tree funname = get_identifier (XSTR (call_dest, 0));
12973
12974 if (no_previous_def (funname))
12975 {
12976 int line_number = 0;
12977 rtx label_rtx = gen_label_rtx ();
12978 char *label_buf, temp_buf[256];
12979 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
12980 CODE_LABEL_NUMBER (label_rtx));
12981 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
12982 labelname = get_identifier (label_buf);
12983 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
12984 if (insn)
12985 line_number = NOTE_LINE_NUMBER (insn);
12986 add_compiler_stub (labelname, funname, line_number);
12987 }
12988 else
12989 labelname = get_prev_label (funname);
12990
12991 sprintf (buf, "jbsr %%z%d,%.246s",
12992 operand_number, IDENTIFIER_POINTER (labelname));
12993 return buf;
12994 }
12995 else
12996 {
12997 sprintf (buf, "bl %%z%d", operand_number);
12998 return buf;
12999 }
13000 }
13001
13002 #endif /* RS6000_LONG_BRANCH */
13003
13004 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13005 do { \
13006 const char *const symbol_ = (SYMBOL); \
13007 char *buffer_ = (BUF); \
13008 if (symbol_[0] == '"') \
13009 { \
13010 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13011 } \
13012 else if (name_needs_quotes(symbol_)) \
13013 { \
13014 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13015 } \
13016 else \
13017 { \
13018 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13019 } \
13020 } while (0)
13021
13022
13023 /* Generate PIC and indirect symbol stubs. */
13024
13025 void
machopic_output_stub(file,symb,stub)13026 machopic_output_stub (file, symb, stub)
13027 FILE *file;
13028 const char *symb, *stub;
13029 {
13030 unsigned int length;
13031 char *symbol_name, *lazy_ptr_name;
13032 char *local_label_0;
13033 static int label = 0;
13034
13035 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13036 symb = (*targetm.strip_name_encoding) (symb);
13037
13038 label += 1;
13039
13040 length = strlen (symb);
13041 symbol_name = alloca (length + 32);
13042 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13043
13044 lazy_ptr_name = alloca (length + 32);
13045 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13046
13047 local_label_0 = alloca (length + 32);
13048 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13049
13050 if (flag_pic == 2)
13051 machopic_picsymbol_stub_section ();
13052 else
13053 machopic_symbol_stub_section ();
13054
13055 fprintf (file, "%s:\n", stub);
13056 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13057
13058 if (flag_pic == 2)
13059 {
13060 fprintf (file, "\tmflr r0\n");
13061 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13062 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13063 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13064 lazy_ptr_name, local_label_0);
13065 fprintf (file, "\tmtlr r0\n");
13066 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
13067 lazy_ptr_name, local_label_0);
13068 fprintf (file, "\tmtctr r12\n");
13069 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
13070 lazy_ptr_name, local_label_0);
13071 fprintf (file, "\tbctr\n");
13072 }
13073 else
13074 fprintf (file, "non-pure not supported\n");
13075
13076 machopic_lazy_symbol_ptr_section ();
13077 fprintf (file, "%s:\n", lazy_ptr_name);
13078 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13079 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13080 }
13081
13082 /* Legitimize PIC addresses. If the address is already
13083 position-independent, we return ORIG. Newly generated
13084 position-independent addresses go into a reg. This is REG if non
13085 zero, otherwise we allocate register(s) as necessary. */
13086
13087 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13088
13089 rtx
rs6000_machopic_legitimize_pic_address(orig,mode,reg)13090 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13091 rtx orig;
13092 enum machine_mode mode;
13093 rtx reg;
13094 {
13095 rtx base, offset;
13096
13097 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13098 reg = gen_reg_rtx (Pmode);
13099
13100 if (GET_CODE (orig) == CONST)
13101 {
13102 if (GET_CODE (XEXP (orig, 0)) == PLUS
13103 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13104 return orig;
13105
13106 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13107 {
13108 base =
13109 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13110 Pmode, reg);
13111 offset =
13112 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13113 Pmode, reg);
13114 }
13115 else
13116 abort ();
13117
13118 if (GET_CODE (offset) == CONST_INT)
13119 {
13120 if (SMALL_INT (offset))
13121 return plus_constant (base, INTVAL (offset));
13122 else if (! reload_in_progress && ! reload_completed)
13123 offset = force_reg (Pmode, offset);
13124 else
13125 {
13126 rtx mem = force_const_mem (Pmode, orig);
13127 return machopic_legitimize_pic_address (mem, Pmode, reg);
13128 }
13129 }
13130 return gen_rtx (PLUS, Pmode, base, offset);
13131 }
13132
13133 /* Fall back on generic machopic code. */
13134 return machopic_legitimize_pic_address (orig, mode, reg);
13135 }
13136
13137 /* This is just a placeholder to make linking work without having to
13138 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13139 ever needed for Darwin (not too likely!) this would have to get a
13140 real definition. */
13141
13142 void
toc_section()13143 toc_section ()
13144 {
13145 }
13146
13147 #endif /* TARGET_MACHO */
13148
13149 #if TARGET_ELF
13150 static unsigned int
rs6000_elf_section_type_flags(decl,name,reloc)13151 rs6000_elf_section_type_flags (decl, name, reloc)
13152 tree decl;
13153 const char *name;
13154 int reloc;
13155 {
13156 return default_section_type_flags_1 (decl, name, reloc,
13157 flag_pic || DEFAULT_ABI == ABI_AIX);
13158 }
13159
13160 /* Record an element in the table of global constructors. SYMBOL is
13161 a SYMBOL_REF of the function to be called; PRIORITY is a number
13162 between 0 and MAX_INIT_PRIORITY.
13163
13164 This differs from default_named_section_asm_out_constructor in
13165 that we have special handling for -mrelocatable. */
13166
13167 static void
rs6000_elf_asm_out_constructor(symbol,priority)13168 rs6000_elf_asm_out_constructor (symbol, priority)
13169 rtx symbol;
13170 int priority;
13171 {
13172 const char *section = ".ctors";
13173 char buf[16];
13174
13175 if (priority != DEFAULT_INIT_PRIORITY)
13176 {
13177 sprintf (buf, ".ctors.%.5u",
13178 /* Invert the numbering so the linker puts us in the proper
13179 order; constructors are run from right to left, and the
13180 linker sorts in increasing order. */
13181 MAX_INIT_PRIORITY - priority);
13182 section = buf;
13183 }
13184
13185 named_section_flags (section, SECTION_WRITE);
13186 assemble_align (POINTER_SIZE);
13187
13188 if (TARGET_RELOCATABLE)
13189 {
13190 fputs ("\t.long (", asm_out_file);
13191 output_addr_const (asm_out_file, symbol);
13192 fputs (")@fixup\n", asm_out_file);
13193 }
13194 else
13195 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13196 }
13197
13198 static void
rs6000_elf_asm_out_destructor(symbol,priority)13199 rs6000_elf_asm_out_destructor (symbol, priority)
13200 rtx symbol;
13201 int priority;
13202 {
13203 const char *section = ".dtors";
13204 char buf[16];
13205
13206 if (priority != DEFAULT_INIT_PRIORITY)
13207 {
13208 sprintf (buf, ".dtors.%.5u",
13209 /* Invert the numbering so the linker puts us in the proper
13210 order; constructors are run from right to left, and the
13211 linker sorts in increasing order. */
13212 MAX_INIT_PRIORITY - priority);
13213 section = buf;
13214 }
13215
13216 named_section_flags (section, SECTION_WRITE);
13217 assemble_align (POINTER_SIZE);
13218
13219 if (TARGET_RELOCATABLE)
13220 {
13221 fputs ("\t.long (", asm_out_file);
13222 output_addr_const (asm_out_file, symbol);
13223 fputs (")@fixup\n", asm_out_file);
13224 }
13225 else
13226 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13227 }
13228 #endif
13229
13230 #if TARGET_XCOFF
13231 static void
rs6000_xcoff_asm_globalize_label(stream,name)13232 rs6000_xcoff_asm_globalize_label (stream, name)
13233 FILE *stream;
13234 const char *name;
13235 {
13236 fputs (GLOBAL_ASM_OP, stream);
13237 RS6000_OUTPUT_BASENAME (stream, name);
13238 putc ('\n', stream);
13239 }
13240
13241 static void
rs6000_xcoff_asm_named_section(name,flags)13242 rs6000_xcoff_asm_named_section (name, flags)
13243 const char *name;
13244 unsigned int flags;
13245 {
13246 int smclass;
13247 static const char * const suffix[3] = { "PR", "RO", "RW" };
13248
13249 if (flags & SECTION_CODE)
13250 smclass = 0;
13251 else if (flags & SECTION_WRITE)
13252 smclass = 2;
13253 else
13254 smclass = 1;
13255
13256 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13257 (flags & SECTION_CODE) ? "." : "",
13258 name, suffix[smclass], flags & SECTION_ENTSIZE);
13259 }
13260
13261 static void
rs6000_xcoff_select_section(decl,reloc,align)13262 rs6000_xcoff_select_section (decl, reloc, align)
13263 tree decl;
13264 int reloc;
13265 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13266 {
13267 if (decl_readonly_section_1 (decl, reloc, 1))
13268 {
13269 if (TREE_PUBLIC (decl))
13270 read_only_data_section ();
13271 else
13272 read_only_private_data_section ();
13273 }
13274 else
13275 {
13276 if (TREE_PUBLIC (decl))
13277 data_section ();
13278 else
13279 private_data_section ();
13280 }
13281 }
13282
13283 static void
rs6000_xcoff_unique_section(decl,reloc)13284 rs6000_xcoff_unique_section (decl, reloc)
13285 tree decl;
13286 int reloc ATTRIBUTE_UNUSED;
13287 {
13288 const char *name;
13289
13290 /* Use select_section for private and uninitialized data. */
13291 if (!TREE_PUBLIC (decl)
13292 || DECL_COMMON (decl)
13293 || DECL_INITIAL (decl) == NULL_TREE
13294 || DECL_INITIAL (decl) == error_mark_node
13295 || (flag_zero_initialized_in_bss
13296 && initializer_zerop (DECL_INITIAL (decl))))
13297 return;
13298
13299 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13300 name = (*targetm.strip_name_encoding) (name);
13301 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
13302 }
13303
13304 /* Select section for constant in constant pool.
13305
13306 On RS/6000, all constants are in the private read-only data area.
13307 However, if this is being placed in the TOC it must be output as a
13308 toc entry. */
13309
13310 static void
rs6000_xcoff_select_rtx_section(mode,x,align)13311 rs6000_xcoff_select_rtx_section (mode, x, align)
13312 enum machine_mode mode;
13313 rtx x;
13314 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13315 {
13316 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13317 toc_section ();
13318 else
13319 read_only_private_data_section ();
13320 }
13321
13322 /* Remove any trailing [DS] or the like from the symbol name. */
13323
13324 static const char *
rs6000_xcoff_strip_name_encoding(name)13325 rs6000_xcoff_strip_name_encoding (name)
13326 const char *name;
13327 {
13328 size_t len;
13329 if (*name == '*')
13330 name++;
13331 len = strlen (name);
13332 if (name[len - 1] == ']')
13333 return ggc_alloc_string (name, len - 4);
13334 else
13335 return name;
13336 }
13337
13338 /* Section attributes. AIX is always PIC. */
13339
13340 static unsigned int
rs6000_xcoff_section_type_flags(decl,name,reloc)13341 rs6000_xcoff_section_type_flags (decl, name, reloc)
13342 tree decl;
13343 const char *name;
13344 int reloc;
13345 {
13346 unsigned int align;
13347 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
13348
13349 /* Align to at least UNIT size. */
13350 if (flags & SECTION_CODE)
13351 align = MIN_UNITS_PER_WORD;
13352 else
13353 /* Increase alignment of large objects if not already stricter. */
13354 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
13355 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
13356 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
13357
13358 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
13359 }
13360
13361 #endif /* TARGET_XCOFF */
13362
13363 /* Note that this is also used for PPC64 Linux. */
13364
13365 static void
rs6000_xcoff_encode_section_info(decl,first)13366 rs6000_xcoff_encode_section_info (decl, first)
13367 tree decl;
13368 int first ATTRIBUTE_UNUSED;
13369 {
13370 if (TREE_CODE (decl) == FUNCTION_DECL
13371 && (*targetm.binds_local_p) (decl))
13372 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
13373 }
13374
13375 /* Cross-module name binding. For AIX and PPC64 Linux, which always are
13376 PIC, use private copy of flag_pic. */
13377
13378 static bool
rs6000_binds_local_p(decl)13379 rs6000_binds_local_p (decl)
13380 tree decl;
13381 {
13382 return default_binds_local_p_1 (decl, flag_pic || rs6000_flag_pic);
13383 }
13384
13385 /* A C expression returning the cost of moving data from a register of class
13386 CLASS1 to one of CLASS2. */
13387
13388 int
rs6000_register_move_cost(mode,from,to)13389 rs6000_register_move_cost (mode, from, to)
13390 enum machine_mode mode;
13391 enum reg_class from, to;
13392 {
13393 /* Moves from/to GENERAL_REGS. */
13394 if (reg_classes_intersect_p (to, GENERAL_REGS)
13395 || reg_classes_intersect_p (from, GENERAL_REGS))
13396 {
13397 if (! reg_classes_intersect_p (to, GENERAL_REGS))
13398 from = to;
13399
13400 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
13401 return (rs6000_memory_move_cost (mode, from, 0)
13402 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
13403
13404 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13405 else if (from == CR_REGS)
13406 return 4;
13407
13408 else
13409 /* A move will cost one instruction per GPR moved. */
13410 return 2 * HARD_REGNO_NREGS (0, mode);
13411 }
13412
13413 /* Moving between two similar registers is just one instruction. */
13414 else if (reg_classes_intersect_p (to, from))
13415 return mode == TFmode ? 4 : 2;
13416
13417 /* Everything else has to go through GENERAL_REGS. */
13418 else
13419 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
13420 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
13421 }
13422
13423 /* A C expressions returning the cost of moving data of MODE from a register to
13424 or from memory. */
13425
13426 int
rs6000_memory_move_cost(mode,class,in)13427 rs6000_memory_move_cost (mode, class, in)
13428 enum machine_mode mode;
13429 enum reg_class class;
13430 int in ATTRIBUTE_UNUSED;
13431 {
13432 if (reg_classes_intersect_p (class, GENERAL_REGS))
13433 return 4 * HARD_REGNO_NREGS (0, mode);
13434 else if (reg_classes_intersect_p (class, FLOAT_REGS))
13435 return 4 * HARD_REGNO_NREGS (32, mode);
13436 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
13437 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
13438 else
13439 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
13440 }
13441
13442