1 /* Copyright (C) 1997-2020 Free Software Foundation, Inc.
2 Contributed by Red Hat, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #define IN_TARGET_CODE 1
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "stringpool.h"
33 #include "attribs.h"
34 #include "optabs.h"
35 #include "regs.h"
36 #include "emit-rtl.h"
37 #include "recog.h"
38 #include "diagnostic-core.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "output.h"
43 #include "insn-attr.h"
44 #include "explow.h"
45 #include "expr.h"
46 #include "cfgrtl.h"
47 #include "langhooks.h"
48 #include "dumpfile.h"
49 #include "builtins.h"
50 #include "ifcvt.h"
51 #include "rtl-iter.h"
52 #include "calls.h"
53
54 /* This file should be included last. */
55 #include "target-def.h"
56
57 #ifndef FRV_INLINE
58 #define FRV_INLINE inline
59 #endif
60
61 /* The maximum number of distinct NOP patterns. There are three:
62 nop, fnop and mnop. */
63 #define NUM_NOP_PATTERNS 3
64
65 /* Classification of instructions and units: integer, floating-point/media,
66 branch and control. */
67 enum frv_insn_group { GROUP_I, GROUP_FM, GROUP_B, GROUP_C, NUM_GROUPS };
68
69 /* The DFA names of the units, in packet order. */
70 static const char *const frv_unit_names[] =
71 {
72 "c",
73 "i0", "f0",
74 "i1", "f1",
75 "i2", "f2",
76 "i3", "f3",
77 "b0", "b1"
78 };
79
80 /* The classification of each unit in frv_unit_names[]. */
81 static const enum frv_insn_group frv_unit_groups[ARRAY_SIZE (frv_unit_names)] =
82 {
83 GROUP_C,
84 GROUP_I, GROUP_FM,
85 GROUP_I, GROUP_FM,
86 GROUP_I, GROUP_FM,
87 GROUP_I, GROUP_FM,
88 GROUP_B, GROUP_B
89 };
90
91 /* Return the DFA unit code associated with the Nth unit of integer
92 or floating-point group GROUP, */
93 #define NTH_UNIT(GROUP, N) frv_unit_codes[(GROUP) + (N) * 2 + 1]
94
95 /* Return the number of integer or floating-point unit UNIT
96 (1 for I1, 2 for F2, etc.). */
97 #define UNIT_NUMBER(UNIT) (((UNIT) - 1) / 2)
98
99 /* The DFA unit number for each unit in frv_unit_names[]. */
100 static int frv_unit_codes[ARRAY_SIZE (frv_unit_names)];
101
102 /* FRV_TYPE_TO_UNIT[T] is the last unit in frv_unit_names[] that can issue
103 an instruction of type T. The value is ARRAY_SIZE (frv_unit_names) if
104 no instruction of type T has been seen. */
105 static unsigned int frv_type_to_unit[TYPE_UNKNOWN + 1];
106
107 /* An array of dummy nop INSNs, one for each type of nop that the
108 target supports. */
109 static GTY(()) rtx_insn *frv_nops[NUM_NOP_PATTERNS];
110
111 /* The number of nop instructions in frv_nops[]. */
112 static unsigned int frv_num_nops;
113
114 /* The type of access. FRV_IO_UNKNOWN means the access can be either
115 a read or a write. */
116 enum frv_io_type { FRV_IO_UNKNOWN, FRV_IO_READ, FRV_IO_WRITE };
117
118 /* Information about one __builtin_read or __builtin_write access, or
119 the combination of several such accesses. The most general value
120 is all-zeros (an unknown access to an unknown address). */
121 struct frv_io {
122 enum frv_io_type type;
123
124 /* The constant address being accessed, or zero if not known. */
125 HOST_WIDE_INT const_address;
126
127 /* The run-time address, as used in operand 0 of the membar pattern. */
128 rtx var_address;
129 };
130
131 /* Return true if instruction INSN should be packed with the following
132 instruction. */
133 #define PACKING_FLAG_P(INSN) (GET_MODE (INSN) == TImode)
134
135 /* Set the value of PACKING_FLAG_P(INSN). */
136 #define SET_PACKING_FLAG(INSN) PUT_MODE (INSN, TImode)
137 #define CLEAR_PACKING_FLAG(INSN) PUT_MODE (INSN, VOIDmode)
138
139 /* Loop with REG set to each hard register in rtx X. */
140 #define FOR_EACH_REGNO(REG, X) \
141 for (REG = REGNO (X); REG < END_REGNO (X); REG++)
142
143 /* This structure contains machine specific function data. */
144 struct GTY(()) machine_function
145 {
146 /* True if we have created an rtx that relies on the stack frame. */
147 int frame_needed;
148
149 /* True if this function contains at least one __builtin_{read,write}*. */
150 bool has_membar_p;
151 };
152
153 /* Temporary register allocation support structure. */
154 typedef struct frv_tmp_reg_struct
155 {
156 HARD_REG_SET regs; /* possible registers to allocate */
157 int next_reg[N_REG_CLASSES]; /* next register to allocate per class */
158 }
159 frv_tmp_reg_t;
160
161 /* Register state information for VLIW re-packing phase. */
162 #define REGSTATE_CC_MASK 0x07 /* Mask to isolate CCn for cond exec */
163 #define REGSTATE_MODIFIED 0x08 /* reg modified in current VLIW insn */
164 #define REGSTATE_IF_TRUE 0x10 /* reg modified in cond exec true */
165 #define REGSTATE_IF_FALSE 0x20 /* reg modified in cond exec false */
166
167 #define REGSTATE_IF_EITHER (REGSTATE_IF_TRUE | REGSTATE_IF_FALSE)
168
169 typedef unsigned char regstate_t;
170
171 /* Used in frv_frame_accessor_t to indicate the direction of a register-to-
172 memory move. */
173 enum frv_stack_op
174 {
175 FRV_LOAD,
176 FRV_STORE
177 };
178
179 /* Information required by frv_frame_access. */
180 typedef struct
181 {
182 /* This field is FRV_LOAD if registers are to be loaded from the stack and
183 FRV_STORE if they should be stored onto the stack. FRV_STORE implies
184 the move is being done by the prologue code while FRV_LOAD implies it
185 is being done by the epilogue. */
186 enum frv_stack_op op;
187
188 /* The base register to use when accessing the stack. This may be the
189 frame pointer, stack pointer, or a temporary. The choice of register
190 depends on which part of the frame is being accessed and how big the
191 frame is. */
192 rtx base;
193
194 /* The offset of BASE from the bottom of the current frame, in bytes. */
195 int base_offset;
196 } frv_frame_accessor_t;
197
198 /* Conditional execution support gathered together in one structure. */
199 typedef struct
200 {
201 /* Linked list of insns to add if the conditional execution conversion was
202 successful. Each link points to an EXPR_LIST which points to the pattern
203 of the insn to add, and the insn to be inserted before. */
204 rtx added_insns_list;
205
206 /* Identify which registers are safe to allocate for if conversions to
207 conditional execution. We keep the last allocated register in the
208 register classes between COND_EXEC statements. This will mean we allocate
209 different registers for each different COND_EXEC group if we can. This
210 might allow the scheduler to intermix two different COND_EXEC sections. */
211 frv_tmp_reg_t tmp_reg;
212
213 /* For nested IFs, identify which CC registers are used outside of setting
214 via a compare isnsn, and using via a check insn. This will allow us to
215 know if we can rewrite the register to use a different register that will
216 be paired with the CR register controlling the nested IF-THEN blocks. */
217 HARD_REG_SET nested_cc_ok_rewrite;
218
219 /* Temporary registers allocated to hold constants during conditional
220 execution. */
221 rtx scratch_regs[FIRST_PSEUDO_REGISTER];
222
223 /* Current number of temp registers available. */
224 int cur_scratch_regs;
225
226 /* Number of nested conditional execution blocks. */
227 int num_nested_cond_exec;
228
229 /* Map of insns that set up constants in scratch registers. */
230 bitmap scratch_insns_bitmap;
231
232 /* Conditional execution test register (CC0..CC7). */
233 rtx cr_reg;
234
235 /* Conditional execution compare register that is paired with cr_reg, so that
236 nested compares can be done. The csubcc and caddcc instructions don't
237 have enough bits to specify both a CC register to be set and a CR register
238 to do the test on, so the same bit number is used for both. Needless to
239 say, this is rather inconvenient for GCC. */
240 rtx nested_cc_reg;
241
242 /* Extra CR registers used for &&, ||. */
243 rtx extra_int_cr;
244 rtx extra_fp_cr;
245
246 /* Previous CR used in nested if, to make sure we are dealing with the same
247 nested if as the previous statement. */
248 rtx last_nested_if_cr;
249 }
250 frv_ifcvt_t;
251
252 static /* GTY(()) */ frv_ifcvt_t frv_ifcvt;
253
254 /* Map register number to smallest register class. */
255 enum reg_class regno_reg_class[FIRST_PSEUDO_REGISTER];
256
257 /* Cached value of frv_stack_info. */
258 static frv_stack_t *frv_stack_cache = (frv_stack_t *)0;
259
260 /* Forward references */
261
262 static void frv_option_override (void);
263 static bool frv_legitimate_address_p (machine_mode, rtx, bool);
264 static int frv_default_flags_for_cpu (void);
265 static int frv_string_begins_with (const char *, const char *);
266 static FRV_INLINE bool frv_small_data_reloc_p (rtx, int);
267 static void frv_print_operand (FILE *, rtx, int);
268 static void frv_print_operand_address (FILE *, machine_mode, rtx);
269 static bool frv_print_operand_punct_valid_p (unsigned char code);
270 static void frv_print_operand_memory_reference_reg
271 (FILE *, rtx);
272 static void frv_print_operand_memory_reference (FILE *, rtx, int);
273 static int frv_print_operand_jump_hint (rtx_insn *);
274 static const char *comparison_string (enum rtx_code, rtx);
275 static rtx frv_function_value (const_tree, const_tree,
276 bool);
277 static rtx frv_libcall_value (machine_mode,
278 const_rtx);
279 static FRV_INLINE int frv_regno_ok_for_base_p (int, int);
280 static rtx single_set_pattern (rtx);
281 static int frv_function_contains_far_jump (void);
282 static rtx frv_alloc_temp_reg (frv_tmp_reg_t *,
283 enum reg_class,
284 machine_mode,
285 int, int);
286 static rtx frv_frame_offset_rtx (int);
287 static rtx frv_frame_mem (machine_mode, rtx, int);
288 static rtx frv_dwarf_store (rtx, int);
289 static void frv_frame_insn (rtx, rtx);
290 static void frv_frame_access (frv_frame_accessor_t*,
291 rtx, int);
292 static void frv_frame_access_multi (frv_frame_accessor_t*,
293 frv_stack_t *, int);
294 static void frv_frame_access_standard_regs (enum frv_stack_op,
295 frv_stack_t *);
296 static struct machine_function *frv_init_machine_status (void);
297 static rtx frv_int_to_acc (enum insn_code, int, rtx);
298 static machine_mode frv_matching_accg_mode (machine_mode);
299 static rtx frv_read_argument (tree, unsigned int);
300 static rtx frv_read_iacc_argument (machine_mode, tree, unsigned int);
301 static int frv_check_constant_argument (enum insn_code, int, rtx);
302 static rtx frv_legitimize_target (enum insn_code, rtx);
303 static rtx frv_legitimize_argument (enum insn_code, int, rtx);
304 static rtx frv_legitimize_tls_address (rtx, enum tls_model);
305 static rtx frv_legitimize_address (rtx, rtx, machine_mode);
306 static rtx frv_expand_set_builtin (enum insn_code, tree, rtx);
307 static rtx frv_expand_unop_builtin (enum insn_code, tree, rtx);
308 static rtx frv_expand_binop_builtin (enum insn_code, tree, rtx);
309 static rtx frv_expand_cut_builtin (enum insn_code, tree, rtx);
310 static rtx frv_expand_binopimm_builtin (enum insn_code, tree, rtx);
311 static rtx frv_expand_voidbinop_builtin (enum insn_code, tree);
312 static rtx frv_expand_int_void2arg (enum insn_code, tree);
313 static rtx frv_expand_prefetches (enum insn_code, tree);
314 static rtx frv_expand_voidtriop_builtin (enum insn_code, tree);
315 static rtx frv_expand_voidaccop_builtin (enum insn_code, tree);
316 static rtx frv_expand_mclracc_builtin (tree);
317 static rtx frv_expand_mrdacc_builtin (enum insn_code, tree);
318 static rtx frv_expand_mwtacc_builtin (enum insn_code, tree);
319 static rtx frv_expand_noargs_builtin (enum insn_code);
320 static void frv_split_iacc_move (rtx, rtx);
321 static rtx frv_emit_comparison (enum rtx_code, rtx, rtx);
322 static void frv_ifcvt_add_insn (rtx, rtx_insn *, int);
323 static rtx frv_ifcvt_rewrite_mem (rtx, machine_mode, rtx);
324 static rtx frv_ifcvt_load_value (rtx, rtx);
325 static unsigned int frv_insn_unit (rtx_insn *);
326 static bool frv_issues_to_branch_unit_p (rtx_insn *);
327 static int frv_cond_flags (rtx);
328 static bool frv_regstate_conflict_p (regstate_t, regstate_t);
329 static bool frv_registers_conflict_p (rtx);
330 static void frv_registers_update_1 (rtx, const_rtx, void *);
331 static void frv_registers_update (rtx);
332 static void frv_start_packet (void);
333 static void frv_start_packet_block (void);
334 static void frv_finish_packet (void (*) (void));
335 static bool frv_pack_insn_p (rtx_insn *);
336 static void frv_add_insn_to_packet (rtx_insn *);
337 static void frv_insert_nop_in_packet (rtx_insn *);
338 static bool frv_for_each_packet (void (*) (void));
339 static bool frv_sort_insn_group_1 (enum frv_insn_group,
340 unsigned int, unsigned int,
341 unsigned int, unsigned int,
342 state_t);
343 static int frv_compare_insns (const void *, const void *);
344 static void frv_sort_insn_group (enum frv_insn_group);
345 static void frv_reorder_packet (void);
346 static void frv_fill_unused_units (enum frv_insn_group);
347 static void frv_align_label (void);
348 static void frv_reorg_packet (void);
349 static void frv_register_nop (rtx);
350 static void frv_reorg (void);
351 static void frv_pack_insns (void);
352 static void frv_function_prologue (FILE *);
353 static void frv_function_epilogue (FILE *);
354 static bool frv_assemble_integer (rtx, unsigned, int);
355 static void frv_init_builtins (void);
356 static rtx frv_expand_builtin (tree, rtx, rtx, machine_mode, int);
357 static void frv_init_libfuncs (void);
358 static bool frv_in_small_data_p (const_tree);
359 static void frv_asm_output_mi_thunk
360 (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, tree);
361 static void frv_setup_incoming_varargs (cumulative_args_t,
362 const function_arg_info &,
363 int *, int);
364 static rtx frv_expand_builtin_saveregs (void);
365 static void frv_expand_builtin_va_start (tree, rtx);
366 static bool frv_rtx_costs (rtx, machine_mode, int, int,
367 int*, bool);
368 static int frv_register_move_cost (machine_mode,
369 reg_class_t, reg_class_t);
370 static int frv_memory_move_cost (machine_mode,
371 reg_class_t, bool);
372 static void frv_asm_out_constructor (rtx, int);
373 static void frv_asm_out_destructor (rtx, int);
374 static bool frv_function_symbol_referenced_p (rtx);
375 static bool frv_legitimate_constant_p (machine_mode, rtx);
376 static bool frv_cannot_force_const_mem (machine_mode, rtx);
377 static const char *unspec_got_name (int);
378 static void frv_output_const_unspec (FILE *,
379 const struct frv_unspec *);
380 static bool frv_function_ok_for_sibcall (tree, tree);
381 static rtx frv_struct_value_rtx (tree, int);
382 static bool frv_must_pass_in_stack (const function_arg_info &);
383 static int frv_arg_partial_bytes (cumulative_args_t,
384 const function_arg_info &);
385 static rtx frv_function_arg (cumulative_args_t, const function_arg_info &);
386 static rtx frv_function_incoming_arg (cumulative_args_t,
387 const function_arg_info &);
388 static void frv_function_arg_advance (cumulative_args_t,
389 const function_arg_info &);
390 static unsigned int frv_function_arg_boundary (machine_mode,
391 const_tree);
392 static void frv_output_dwarf_dtprel (FILE *, int, rtx)
393 ATTRIBUTE_UNUSED;
394 static reg_class_t frv_secondary_reload (bool, rtx, reg_class_t,
395 machine_mode,
396 secondary_reload_info *);
397 static bool frv_frame_pointer_required (void);
398 static bool frv_can_eliminate (const int, const int);
399 static void frv_conditional_register_usage (void);
400 static void frv_trampoline_init (rtx, tree, rtx);
401 static bool frv_class_likely_spilled_p (reg_class_t);
402 static unsigned int frv_hard_regno_nregs (unsigned int, machine_mode);
403 static bool frv_hard_regno_mode_ok (unsigned int, machine_mode);
404 static bool frv_modes_tieable_p (machine_mode, machine_mode);
405
406 /* Initialize the GCC target structure. */
407 #undef TARGET_PRINT_OPERAND
408 #define TARGET_PRINT_OPERAND frv_print_operand
409 #undef TARGET_PRINT_OPERAND_ADDRESS
410 #define TARGET_PRINT_OPERAND_ADDRESS frv_print_operand_address
411 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
412 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P frv_print_operand_punct_valid_p
413 #undef TARGET_ASM_FUNCTION_PROLOGUE
414 #define TARGET_ASM_FUNCTION_PROLOGUE frv_function_prologue
415 #undef TARGET_ASM_FUNCTION_EPILOGUE
416 #define TARGET_ASM_FUNCTION_EPILOGUE frv_function_epilogue
417 #undef TARGET_ASM_INTEGER
418 #define TARGET_ASM_INTEGER frv_assemble_integer
419 #undef TARGET_OPTION_OVERRIDE
420 #define TARGET_OPTION_OVERRIDE frv_option_override
421 #undef TARGET_INIT_BUILTINS
422 #define TARGET_INIT_BUILTINS frv_init_builtins
423 #undef TARGET_EXPAND_BUILTIN
424 #define TARGET_EXPAND_BUILTIN frv_expand_builtin
425 #undef TARGET_INIT_LIBFUNCS
426 #define TARGET_INIT_LIBFUNCS frv_init_libfuncs
427 #undef TARGET_IN_SMALL_DATA_P
428 #define TARGET_IN_SMALL_DATA_P frv_in_small_data_p
429 #undef TARGET_REGISTER_MOVE_COST
430 #define TARGET_REGISTER_MOVE_COST frv_register_move_cost
431 #undef TARGET_MEMORY_MOVE_COST
432 #define TARGET_MEMORY_MOVE_COST frv_memory_move_cost
433 #undef TARGET_RTX_COSTS
434 #define TARGET_RTX_COSTS frv_rtx_costs
435 #undef TARGET_ASM_CONSTRUCTOR
436 #define TARGET_ASM_CONSTRUCTOR frv_asm_out_constructor
437 #undef TARGET_ASM_DESTRUCTOR
438 #define TARGET_ASM_DESTRUCTOR frv_asm_out_destructor
439
440 #undef TARGET_ASM_OUTPUT_MI_THUNK
441 #define TARGET_ASM_OUTPUT_MI_THUNK frv_asm_output_mi_thunk
442 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
443 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
444
445 #undef TARGET_SCHED_ISSUE_RATE
446 #define TARGET_SCHED_ISSUE_RATE frv_issue_rate
447
448 #undef TARGET_LEGITIMIZE_ADDRESS
449 #define TARGET_LEGITIMIZE_ADDRESS frv_legitimize_address
450
451 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
452 #define TARGET_FUNCTION_OK_FOR_SIBCALL frv_function_ok_for_sibcall
453 #undef TARGET_LEGITIMATE_CONSTANT_P
454 #define TARGET_LEGITIMATE_CONSTANT_P frv_legitimate_constant_p
455 #undef TARGET_CANNOT_FORCE_CONST_MEM
456 #define TARGET_CANNOT_FORCE_CONST_MEM frv_cannot_force_const_mem
457
458 #undef TARGET_HAVE_TLS
459 #define TARGET_HAVE_TLS HAVE_AS_TLS
460
461 #undef TARGET_STRUCT_VALUE_RTX
462 #define TARGET_STRUCT_VALUE_RTX frv_struct_value_rtx
463 #undef TARGET_MUST_PASS_IN_STACK
464 #define TARGET_MUST_PASS_IN_STACK frv_must_pass_in_stack
465 #undef TARGET_PASS_BY_REFERENCE
466 #define TARGET_PASS_BY_REFERENCE hook_pass_by_reference_must_pass_in_stack
467 #undef TARGET_ARG_PARTIAL_BYTES
468 #define TARGET_ARG_PARTIAL_BYTES frv_arg_partial_bytes
469 #undef TARGET_FUNCTION_ARG
470 #define TARGET_FUNCTION_ARG frv_function_arg
471 #undef TARGET_FUNCTION_INCOMING_ARG
472 #define TARGET_FUNCTION_INCOMING_ARG frv_function_incoming_arg
473 #undef TARGET_FUNCTION_ARG_ADVANCE
474 #define TARGET_FUNCTION_ARG_ADVANCE frv_function_arg_advance
475 #undef TARGET_FUNCTION_ARG_BOUNDARY
476 #define TARGET_FUNCTION_ARG_BOUNDARY frv_function_arg_boundary
477
478 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
479 #define TARGET_EXPAND_BUILTIN_SAVEREGS frv_expand_builtin_saveregs
480 #undef TARGET_SETUP_INCOMING_VARARGS
481 #define TARGET_SETUP_INCOMING_VARARGS frv_setup_incoming_varargs
482 #undef TARGET_MACHINE_DEPENDENT_REORG
483 #define TARGET_MACHINE_DEPENDENT_REORG frv_reorg
484
485 #undef TARGET_EXPAND_BUILTIN_VA_START
486 #define TARGET_EXPAND_BUILTIN_VA_START frv_expand_builtin_va_start
487
488 #if HAVE_AS_TLS
489 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
490 #define TARGET_ASM_OUTPUT_DWARF_DTPREL frv_output_dwarf_dtprel
491 #endif
492
493 #undef TARGET_CLASS_LIKELY_SPILLED_P
494 #define TARGET_CLASS_LIKELY_SPILLED_P frv_class_likely_spilled_p
495
496 #undef TARGET_SECONDARY_RELOAD
497 #define TARGET_SECONDARY_RELOAD frv_secondary_reload
498
499 #undef TARGET_LRA_P
500 #define TARGET_LRA_P hook_bool_void_false
501
502 #undef TARGET_LEGITIMATE_ADDRESS_P
503 #define TARGET_LEGITIMATE_ADDRESS_P frv_legitimate_address_p
504
505 #undef TARGET_FRAME_POINTER_REQUIRED
506 #define TARGET_FRAME_POINTER_REQUIRED frv_frame_pointer_required
507
508 #undef TARGET_CAN_ELIMINATE
509 #define TARGET_CAN_ELIMINATE frv_can_eliminate
510
511 #undef TARGET_CONDITIONAL_REGISTER_USAGE
512 #define TARGET_CONDITIONAL_REGISTER_USAGE frv_conditional_register_usage
513
514 #undef TARGET_TRAMPOLINE_INIT
515 #define TARGET_TRAMPOLINE_INIT frv_trampoline_init
516
517 #undef TARGET_FUNCTION_VALUE
518 #define TARGET_FUNCTION_VALUE frv_function_value
519 #undef TARGET_LIBCALL_VALUE
520 #define TARGET_LIBCALL_VALUE frv_libcall_value
521
522 #undef TARGET_HARD_REGNO_NREGS
523 #define TARGET_HARD_REGNO_NREGS frv_hard_regno_nregs
524 #undef TARGET_HARD_REGNO_MODE_OK
525 #define TARGET_HARD_REGNO_MODE_OK frv_hard_regno_mode_ok
526 #undef TARGET_MODES_TIEABLE_P
527 #define TARGET_MODES_TIEABLE_P frv_modes_tieable_p
528 #undef TARGET_CONSTANT_ALIGNMENT
529 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
530
531 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
532 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
533
534 struct gcc_target targetm = TARGET_INITIALIZER;
535
536 #define FRV_SYMBOL_REF_TLS_P(RTX) \
537 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
538
539
540 /* Any function call that satisfies the machine-independent
541 requirements is eligible on FR-V. */
542
543 static bool
frv_function_ok_for_sibcall(tree decl ATTRIBUTE_UNUSED,tree exp ATTRIBUTE_UNUSED)544 frv_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
545 tree exp ATTRIBUTE_UNUSED)
546 {
547 return true;
548 }
549
550 /* Return true if SYMBOL is a small data symbol and relocation RELOC
551 can be used to access it directly in a load or store. */
552
553 static FRV_INLINE bool
frv_small_data_reloc_p(rtx symbol,int reloc)554 frv_small_data_reloc_p (rtx symbol, int reloc)
555 {
556 return (GET_CODE (symbol) == SYMBOL_REF
557 && SYMBOL_REF_SMALL_P (symbol)
558 && (!TARGET_FDPIC || flag_pic == 1)
559 && (reloc == R_FRV_GOTOFF12 || reloc == R_FRV_GPREL12));
560 }
561
562 /* Return true if X is a valid relocation unspec. If it is, fill in UNSPEC
563 appropriately. */
564
565 bool
frv_const_unspec_p(rtx x,struct frv_unspec * unspec)566 frv_const_unspec_p (rtx x, struct frv_unspec *unspec)
567 {
568 if (GET_CODE (x) == CONST)
569 {
570 unspec->offset = 0;
571 x = XEXP (x, 0);
572 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
573 {
574 unspec->offset += INTVAL (XEXP (x, 1));
575 x = XEXP (x, 0);
576 }
577 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_GOT)
578 {
579 unspec->symbol = XVECEXP (x, 0, 0);
580 unspec->reloc = INTVAL (XVECEXP (x, 0, 1));
581
582 if (unspec->offset == 0)
583 return true;
584
585 if (frv_small_data_reloc_p (unspec->symbol, unspec->reloc)
586 && unspec->offset > 0
587 && unspec->offset < g_switch_value)
588 return true;
589 }
590 }
591 return false;
592 }
593
594 /* Decide whether we can force certain constants to memory. If we
595 decide we can't, the caller should be able to cope with it in
596 another way.
597
598 We never allow constants to be forced into memory for TARGET_FDPIC.
599 This is necessary for several reasons:
600
601 1. Since frv_legitimate_constant_p rejects constant pool addresses, the
602 target-independent code will try to force them into the constant
603 pool, thus leading to infinite recursion.
604
605 2. We can never introduce new constant pool references during reload.
606 Any such reference would require use of the pseudo FDPIC register.
607
608 3. We can't represent a constant added to a function pointer (which is
609 not the same as a pointer to a function+constant).
610
611 4. In many cases, it's more efficient to calculate the constant in-line. */
612
613 static bool
frv_cannot_force_const_mem(machine_mode mode ATTRIBUTE_UNUSED,rtx x ATTRIBUTE_UNUSED)614 frv_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED,
615 rtx x ATTRIBUTE_UNUSED)
616 {
617 return TARGET_FDPIC;
618 }
619
620 static int
frv_default_flags_for_cpu(void)621 frv_default_flags_for_cpu (void)
622 {
623 switch (frv_cpu_type)
624 {
625 case FRV_CPU_GENERIC:
626 return MASK_DEFAULT_FRV;
627
628 case FRV_CPU_FR550:
629 return MASK_DEFAULT_FR550;
630
631 case FRV_CPU_FR500:
632 case FRV_CPU_TOMCAT:
633 return MASK_DEFAULT_FR500;
634
635 case FRV_CPU_FR450:
636 return MASK_DEFAULT_FR450;
637
638 case FRV_CPU_FR405:
639 case FRV_CPU_FR400:
640 return MASK_DEFAULT_FR400;
641
642 case FRV_CPU_FR300:
643 case FRV_CPU_SIMPLE:
644 return MASK_DEFAULT_SIMPLE;
645
646 default:
647 gcc_unreachable ();
648 }
649 }
650
651 /* Implement TARGET_OPTION_OVERRIDE. */
652
653 static void
frv_option_override(void)654 frv_option_override (void)
655 {
656 int regno;
657 unsigned int i;
658
659 target_flags |= (frv_default_flags_for_cpu () & ~target_flags_explicit);
660
661 /* -mlibrary-pic sets -fPIC and -G0 and also suppresses warnings from the
662 linker about linking pic and non-pic code. */
663 if (TARGET_LIBPIC)
664 {
665 if (!flag_pic) /* -fPIC */
666 flag_pic = 2;
667
668 if (!global_options_set.x_g_switch_value) /* -G0 */
669 {
670 g_switch_value = 0;
671 }
672 }
673
674 /* A C expression whose value is a register class containing hard
675 register REGNO. In general there is more than one such class;
676 choose a class which is "minimal", meaning that no smaller class
677 also contains the register. */
678
679 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
680 {
681 enum reg_class rclass;
682
683 if (GPR_P (regno))
684 {
685 int gpr_reg = regno - GPR_FIRST;
686
687 if (gpr_reg == GR8_REG)
688 rclass = GR8_REGS;
689
690 else if (gpr_reg == GR9_REG)
691 rclass = GR9_REGS;
692
693 else if (gpr_reg == GR14_REG)
694 rclass = FDPIC_FPTR_REGS;
695
696 else if (gpr_reg == FDPIC_REGNO)
697 rclass = FDPIC_REGS;
698
699 else if ((gpr_reg & 3) == 0)
700 rclass = QUAD_REGS;
701
702 else if ((gpr_reg & 1) == 0)
703 rclass = EVEN_REGS;
704
705 else
706 rclass = GPR_REGS;
707 }
708
709 else if (FPR_P (regno))
710 {
711 int fpr_reg = regno - GPR_FIRST;
712 if ((fpr_reg & 3) == 0)
713 rclass = QUAD_FPR_REGS;
714
715 else if ((fpr_reg & 1) == 0)
716 rclass = FEVEN_REGS;
717
718 else
719 rclass = FPR_REGS;
720 }
721
722 else if (regno == LR_REGNO)
723 rclass = LR_REG;
724
725 else if (regno == LCR_REGNO)
726 rclass = LCR_REG;
727
728 else if (ICC_P (regno))
729 rclass = ICC_REGS;
730
731 else if (FCC_P (regno))
732 rclass = FCC_REGS;
733
734 else if (ICR_P (regno))
735 rclass = ICR_REGS;
736
737 else if (FCR_P (regno))
738 rclass = FCR_REGS;
739
740 else if (ACC_P (regno))
741 {
742 int r = regno - ACC_FIRST;
743 if ((r & 3) == 0)
744 rclass = QUAD_ACC_REGS;
745 else if ((r & 1) == 0)
746 rclass = EVEN_ACC_REGS;
747 else
748 rclass = ACC_REGS;
749 }
750
751 else if (ACCG_P (regno))
752 rclass = ACCG_REGS;
753
754 else
755 rclass = NO_REGS;
756
757 regno_reg_class[regno] = rclass;
758 }
759
760 /* Check for small data option */
761 if (!global_options_set.x_g_switch_value && !TARGET_LIBPIC)
762 g_switch_value = SDATA_DEFAULT_SIZE;
763
764 /* There is no single unaligned SI op for PIC code. Sometimes we
765 need to use ".4byte" and sometimes we need to use ".picptr".
766 See frv_assemble_integer for details. */
767 if (flag_pic || TARGET_FDPIC)
768 targetm.asm_out.unaligned_op.si = 0;
769
770 if ((target_flags_explicit & MASK_LINKED_FP) == 0)
771 target_flags |= MASK_LINKED_FP;
772
773 if ((target_flags_explicit & MASK_OPTIMIZE_MEMBAR) == 0)
774 target_flags |= MASK_OPTIMIZE_MEMBAR;
775
776 for (i = 0; i < ARRAY_SIZE (frv_unit_names); i++)
777 frv_unit_codes[i] = get_cpu_unit_code (frv_unit_names[i]);
778
779 for (i = 0; i < ARRAY_SIZE (frv_type_to_unit); i++)
780 frv_type_to_unit[i] = ARRAY_SIZE (frv_unit_codes);
781
782 init_machine_status = frv_init_machine_status;
783 }
784
785
786 /* Return true if NAME (a STRING_CST node) begins with PREFIX. */
787
788 static int
frv_string_begins_with(const char * name,const char * prefix)789 frv_string_begins_with (const char *name, const char *prefix)
790 {
791 const int prefix_len = strlen (prefix);
792
793 /* Remember: NAME's length includes the null terminator. */
794 return (strncmp (name, prefix, prefix_len) == 0);
795 }
796
797 /* Implement TARGET_CONDITIONAL_REGISTER_USAGE. */
798
799 static void
frv_conditional_register_usage(void)800 frv_conditional_register_usage (void)
801 {
802 int i;
803
804 for (i = GPR_FIRST + NUM_GPRS; i <= GPR_LAST; i++)
805 fixed_regs[i] = call_used_regs[i] = 1;
806
807 for (i = FPR_FIRST + NUM_FPRS; i <= FPR_LAST; i++)
808 fixed_regs[i] = call_used_regs[i] = 1;
809
810 /* Reserve the registers used for conditional execution. At present, we need
811 1 ICC and 1 ICR register. */
812 fixed_regs[ICC_TEMP] = call_used_regs[ICC_TEMP] = 1;
813 fixed_regs[ICR_TEMP] = call_used_regs[ICR_TEMP] = 1;
814
815 if (TARGET_FIXED_CC)
816 {
817 fixed_regs[ICC_FIRST] = call_used_regs[ICC_FIRST] = 1;
818 fixed_regs[FCC_FIRST] = call_used_regs[FCC_FIRST] = 1;
819 fixed_regs[ICR_FIRST] = call_used_regs[ICR_FIRST] = 1;
820 fixed_regs[FCR_FIRST] = call_used_regs[FCR_FIRST] = 1;
821 }
822
823 if (TARGET_FDPIC)
824 fixed_regs[GPR_FIRST + 16] = fixed_regs[GPR_FIRST + 17] =
825 call_used_regs[GPR_FIRST + 16] = call_used_regs[GPR_FIRST + 17] = 0;
826
827 #if 0
828 /* If -fpic, SDA_BASE_REG is the PIC register. */
829 if (g_switch_value == 0 && !flag_pic)
830 fixed_regs[SDA_BASE_REG] = call_used_regs[SDA_BASE_REG] = 0;
831
832 if (!flag_pic)
833 fixed_regs[PIC_REGNO] = call_used_regs[PIC_REGNO] = 0;
834 #endif
835 }
836
837
838 /*
839 * Compute the stack frame layout
840 *
841 * Register setup:
842 * +---------------+-----------------------+-----------------------+
843 * |Register |type |caller-save/callee-save|
844 * +---------------+-----------------------+-----------------------+
845 * |GR0 |Zero register | - |
846 * |GR1 |Stack pointer(SP) | - |
847 * |GR2 |Frame pointer(FP) | - |
848 * |GR3 |Hidden parameter | caller save |
849 * |GR4-GR7 | - | caller save |
850 * |GR8-GR13 |Argument register | caller save |
851 * |GR14-GR15 | - | caller save |
852 * |GR16-GR31 | - | callee save |
853 * |GR32-GR47 | - | caller save |
854 * |GR48-GR63 | - | callee save |
855 * |FR0-FR15 | - | caller save |
856 * |FR16-FR31 | - | callee save |
857 * |FR32-FR47 | - | caller save |
858 * |FR48-FR63 | - | callee save |
859 * +---------------+-----------------------+-----------------------+
860 *
861 * Stack frame setup:
862 * Low
863 * SP-> |-----------------------------------|
864 * | Argument area |
865 * |-----------------------------------|
866 * | Register save area |
867 * |-----------------------------------|
868 * | Local variable save area |
869 * FP-> |-----------------------------------|
870 * | Old FP |
871 * |-----------------------------------|
872 * | Hidden parameter save area |
873 * |-----------------------------------|
874 * | Return address(LR) storage area |
875 * |-----------------------------------|
876 * | Padding for alignment |
877 * |-----------------------------------|
878 * | Register argument area |
879 * OLD SP-> |-----------------------------------|
880 * | Parameter area |
881 * |-----------------------------------|
882 * High
883 *
884 * Argument area/Parameter area:
885 *
886 * When a function is called, this area is used for argument transfer. When
887 * the argument is set up by the caller function, this area is referred to as
888 * the argument area. When the argument is referenced by the callee function,
889 * this area is referred to as the parameter area. The area is allocated when
890 * all arguments cannot be placed on the argument register at the time of
891 * argument transfer.
892 *
893 * Register save area:
894 *
895 * This is a register save area that must be guaranteed for the caller
896 * function. This area is not secured when the register save operation is not
897 * needed.
898 *
899 * Local variable save area:
900 *
901 * This is the area for local variables and temporary variables.
902 *
903 * Old FP:
904 *
905 * This area stores the FP value of the caller function.
906 *
907 * Hidden parameter save area:
908 *
909 * This area stores the start address of the return value storage
910 * area for a struct/union return function.
911 * When a struct/union is used as the return value, the caller
912 * function stores the return value storage area start address in
913 * register GR3 and passes it to the caller function.
914 * The callee function interprets the address stored in the GR3
915 * as the return value storage area start address.
916 * When register GR3 needs to be saved into memory, the callee
917 * function saves it in the hidden parameter save area. This
918 * area is not secured when the save operation is not needed.
919 *
920 * Return address(LR) storage area:
921 *
922 * This area saves the LR. The LR stores the address of a return to the caller
923 * function for the purpose of function calling.
924 *
925 * Argument register area:
926 *
927 * This area saves the argument register. This area is not secured when the
928 * save operation is not needed.
929 *
930 * Argument:
931 *
932 * Arguments, the count of which equals the count of argument registers (6
933 * words), are positioned in registers GR8 to GR13 and delivered to the callee
934 * function. When a struct/union return function is called, the return value
935 * area address is stored in register GR3. Arguments not placed in the
936 * argument registers will be stored in the stack argument area for transfer
937 * purposes. When an 8-byte type argument is to be delivered using registers,
938 * it is divided into two and placed in two registers for transfer. When
939 * argument registers must be saved to memory, the callee function secures an
940 * argument register save area in the stack. In this case, a continuous
941 * argument register save area must be established in the parameter area. The
942 * argument register save area must be allocated as needed to cover the size of
943 * the argument register to be saved. If the function has a variable count of
944 * arguments, it saves all argument registers in the argument register save
945 * area.
946 *
947 * Argument Extension Format:
948 *
949 * When an argument is to be stored in the stack, its type is converted to an
950 * extended type in accordance with the individual argument type. The argument
951 * is freed by the caller function after the return from the callee function is
952 * made.
953 *
954 * +-----------------------+---------------+------------------------+
955 * | Argument Type |Extended Type |Stack Storage Size(byte)|
956 * +-----------------------+---------------+------------------------+
957 * |char |int | 4 |
958 * |signed char |int | 4 |
959 * |unsigned char |int | 4 |
960 * |[signed] short int |int | 4 |
961 * |unsigned short int |int | 4 |
962 * |[signed] int |No extension | 4 |
963 * |unsigned int |No extension | 4 |
964 * |[signed] long int |No extension | 4 |
965 * |unsigned long int |No extension | 4 |
966 * |[signed] long long int |No extension | 8 |
967 * |unsigned long long int |No extension | 8 |
968 * |float |double | 8 |
969 * |double |No extension | 8 |
970 * |long double |No extension | 8 |
971 * |pointer |No extension | 4 |
972 * |struct/union |- | 4 (*1) |
973 * +-----------------------+---------------+------------------------+
974 *
975 * When a struct/union is to be delivered as an argument, the caller copies it
976 * to the local variable area and delivers the address of that area.
977 *
978 * Return Value:
979 *
980 * +-------------------------------+----------------------+
981 * |Return Value Type |Return Value Interface|
982 * +-------------------------------+----------------------+
983 * |void |None |
984 * |[signed|unsigned] char |GR8 |
985 * |[signed|unsigned] short int |GR8 |
986 * |[signed|unsigned] int |GR8 |
987 * |[signed|unsigned] long int |GR8 |
988 * |pointer |GR8 |
989 * |[signed|unsigned] long long int|GR8 & GR9 |
990 * |float |GR8 |
991 * |double |GR8 & GR9 |
992 * |long double |GR8 & GR9 |
993 * |struct/union |(*1) |
994 * +-------------------------------+----------------------+
995 *
996 * When a struct/union is used as the return value, the caller function stores
997 * the start address of the return value storage area into GR3 and then passes
998 * it to the callee function. The callee function interprets GR3 as the start
999 * address of the return value storage area. When this address needs to be
1000 * saved in memory, the callee function secures the hidden parameter save area
1001 * and saves the address in that area.
1002 */
1003
1004 frv_stack_t *
frv_stack_info(void)1005 frv_stack_info (void)
1006 {
1007 static frv_stack_t info, zero_info;
1008 frv_stack_t *info_ptr = &info;
1009 tree fndecl = current_function_decl;
1010 int varargs_p = 0;
1011 tree cur_arg;
1012 tree next_arg;
1013 int range;
1014 int alignment;
1015 int offset;
1016
1017 /* If we've already calculated the values and reload is complete,
1018 just return now. */
1019 if (frv_stack_cache)
1020 return frv_stack_cache;
1021
1022 /* Zero all fields. */
1023 info = zero_info;
1024
1025 /* Set up the register range information. */
1026 info_ptr->regs[STACK_REGS_GPR].name = "gpr";
1027 info_ptr->regs[STACK_REGS_GPR].first = LAST_ARG_REGNUM + 1;
1028 info_ptr->regs[STACK_REGS_GPR].last = GPR_LAST;
1029 info_ptr->regs[STACK_REGS_GPR].dword_p = TRUE;
1030
1031 info_ptr->regs[STACK_REGS_FPR].name = "fpr";
1032 info_ptr->regs[STACK_REGS_FPR].first = FPR_FIRST;
1033 info_ptr->regs[STACK_REGS_FPR].last = FPR_LAST;
1034 info_ptr->regs[STACK_REGS_FPR].dword_p = TRUE;
1035
1036 info_ptr->regs[STACK_REGS_LR].name = "lr";
1037 info_ptr->regs[STACK_REGS_LR].first = LR_REGNO;
1038 info_ptr->regs[STACK_REGS_LR].last = LR_REGNO;
1039 info_ptr->regs[STACK_REGS_LR].special_p = 1;
1040
1041 info_ptr->regs[STACK_REGS_CC].name = "cc";
1042 info_ptr->regs[STACK_REGS_CC].first = CC_FIRST;
1043 info_ptr->regs[STACK_REGS_CC].last = CC_LAST;
1044 info_ptr->regs[STACK_REGS_CC].field_p = TRUE;
1045
1046 info_ptr->regs[STACK_REGS_LCR].name = "lcr";
1047 info_ptr->regs[STACK_REGS_LCR].first = LCR_REGNO;
1048 info_ptr->regs[STACK_REGS_LCR].last = LCR_REGNO;
1049
1050 info_ptr->regs[STACK_REGS_STDARG].name = "stdarg";
1051 info_ptr->regs[STACK_REGS_STDARG].first = FIRST_ARG_REGNUM;
1052 info_ptr->regs[STACK_REGS_STDARG].last = LAST_ARG_REGNUM;
1053 info_ptr->regs[STACK_REGS_STDARG].dword_p = 1;
1054 info_ptr->regs[STACK_REGS_STDARG].special_p = 1;
1055
1056 info_ptr->regs[STACK_REGS_STRUCT].name = "struct";
1057 info_ptr->regs[STACK_REGS_STRUCT].first = FRV_STRUCT_VALUE_REGNUM;
1058 info_ptr->regs[STACK_REGS_STRUCT].last = FRV_STRUCT_VALUE_REGNUM;
1059 info_ptr->regs[STACK_REGS_STRUCT].special_p = 1;
1060
1061 info_ptr->regs[STACK_REGS_FP].name = "fp";
1062 info_ptr->regs[STACK_REGS_FP].first = FRAME_POINTER_REGNUM;
1063 info_ptr->regs[STACK_REGS_FP].last = FRAME_POINTER_REGNUM;
1064 info_ptr->regs[STACK_REGS_FP].special_p = 1;
1065
1066 /* Determine if this is a stdarg function. If so, allocate space to store
1067 the 6 arguments. */
1068 if (cfun->stdarg)
1069 varargs_p = 1;
1070
1071 else
1072 {
1073 /* Find the last argument, and see if it is __builtin_va_alist. */
1074 for (cur_arg = DECL_ARGUMENTS (fndecl); cur_arg != (tree)0; cur_arg = next_arg)
1075 {
1076 next_arg = DECL_CHAIN (cur_arg);
1077 if (next_arg == (tree)0)
1078 {
1079 if (DECL_NAME (cur_arg)
1080 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (cur_arg)), "__builtin_va_alist"))
1081 varargs_p = 1;
1082
1083 break;
1084 }
1085 }
1086 }
1087
1088 /* Iterate over all of the register ranges. */
1089 for (range = 0; range < STACK_REGS_MAX; range++)
1090 {
1091 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1092 int first = reg_ptr->first;
1093 int last = reg_ptr->last;
1094 int size_1word = 0;
1095 int size_2words = 0;
1096 int regno;
1097
1098 /* Calculate which registers need to be saved & save area size. */
1099 switch (range)
1100 {
1101 default:
1102 for (regno = first; regno <= last; regno++)
1103 {
1104 if ((df_regs_ever_live_p (regno)
1105 && !call_used_or_fixed_reg_p (regno))
1106 || (crtl->calls_eh_return
1107 && (regno >= FIRST_EH_REGNUM && regno <= LAST_EH_REGNUM))
1108 || (!TARGET_FDPIC && flag_pic
1109 && crtl->uses_pic_offset_table && regno == PIC_REGNO))
1110 {
1111 info_ptr->save_p[regno] = REG_SAVE_1WORD;
1112 size_1word += UNITS_PER_WORD;
1113 }
1114 }
1115 break;
1116
1117 /* Calculate whether we need to create a frame after everything else
1118 has been processed. */
1119 case STACK_REGS_FP:
1120 break;
1121
1122 case STACK_REGS_LR:
1123 if (df_regs_ever_live_p (LR_REGNO)
1124 || profile_flag
1125 /* This is set for __builtin_return_address, etc. */
1126 || cfun->machine->frame_needed
1127 || (TARGET_LINKED_FP && frame_pointer_needed)
1128 || (!TARGET_FDPIC && flag_pic
1129 && crtl->uses_pic_offset_table))
1130 {
1131 info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD;
1132 size_1word += UNITS_PER_WORD;
1133 }
1134 break;
1135
1136 case STACK_REGS_STDARG:
1137 if (varargs_p)
1138 {
1139 /* If this is a stdarg function with a non varardic
1140 argument split between registers and the stack,
1141 adjust the saved registers downward. */
1142 last -= (ADDR_ALIGN (crtl->args.pretend_args_size, UNITS_PER_WORD)
1143 / UNITS_PER_WORD);
1144
1145 for (regno = first; regno <= last; regno++)
1146 {
1147 info_ptr->save_p[regno] = REG_SAVE_1WORD;
1148 size_1word += UNITS_PER_WORD;
1149 }
1150
1151 info_ptr->stdarg_size = size_1word;
1152 }
1153 break;
1154
1155 case STACK_REGS_STRUCT:
1156 if (cfun->returns_struct)
1157 {
1158 info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD;
1159 size_1word += UNITS_PER_WORD;
1160 }
1161 break;
1162 }
1163
1164
1165 if (size_1word)
1166 {
1167 /* If this is a field, it only takes one word. */
1168 if (reg_ptr->field_p)
1169 size_1word = UNITS_PER_WORD;
1170
1171 /* Determine which register pairs can be saved together. */
1172 else if (reg_ptr->dword_p && TARGET_DWORD)
1173 {
1174 for (regno = first; regno < last; regno += 2)
1175 {
1176 if (info_ptr->save_p[regno] && info_ptr->save_p[regno+1])
1177 {
1178 size_2words += 2 * UNITS_PER_WORD;
1179 size_1word -= 2 * UNITS_PER_WORD;
1180 info_ptr->save_p[regno] = REG_SAVE_2WORDS;
1181 info_ptr->save_p[regno+1] = REG_SAVE_NO_SAVE;
1182 }
1183 }
1184 }
1185
1186 reg_ptr->size_1word = size_1word;
1187 reg_ptr->size_2words = size_2words;
1188
1189 if (! reg_ptr->special_p)
1190 {
1191 info_ptr->regs_size_1word += size_1word;
1192 info_ptr->regs_size_2words += size_2words;
1193 }
1194 }
1195 }
1196
1197 /* Set up the sizes of each field in the frame body, making the sizes
1198 of each be divisible by the size of a dword if dword operations might
1199 be used, or the size of a word otherwise. */
1200 alignment = (TARGET_DWORD? 2 * UNITS_PER_WORD : UNITS_PER_WORD);
1201
1202 info_ptr->parameter_size = ADDR_ALIGN (crtl->outgoing_args_size, alignment);
1203 info_ptr->regs_size = ADDR_ALIGN (info_ptr->regs_size_2words
1204 + info_ptr->regs_size_1word,
1205 alignment);
1206 info_ptr->vars_size = ADDR_ALIGN (get_frame_size (), alignment);
1207
1208 info_ptr->pretend_size = crtl->args.pretend_args_size;
1209
1210 /* Work out the size of the frame, excluding the header. Both the frame
1211 body and register parameter area will be dword-aligned. */
1212 info_ptr->total_size
1213 = (ADDR_ALIGN (info_ptr->parameter_size
1214 + info_ptr->regs_size
1215 + info_ptr->vars_size,
1216 2 * UNITS_PER_WORD)
1217 + ADDR_ALIGN (info_ptr->pretend_size
1218 + info_ptr->stdarg_size,
1219 2 * UNITS_PER_WORD));
1220
1221 /* See if we need to create a frame at all, if so add header area. */
1222 if (info_ptr->total_size > 0
1223 || frame_pointer_needed
1224 || info_ptr->regs[STACK_REGS_LR].size_1word > 0
1225 || info_ptr->regs[STACK_REGS_STRUCT].size_1word > 0)
1226 {
1227 offset = info_ptr->parameter_size;
1228 info_ptr->header_size = 4 * UNITS_PER_WORD;
1229 info_ptr->total_size += 4 * UNITS_PER_WORD;
1230
1231 /* Calculate the offsets to save normal register pairs. */
1232 for (range = 0; range < STACK_REGS_MAX; range++)
1233 {
1234 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1235 if (! reg_ptr->special_p)
1236 {
1237 int first = reg_ptr->first;
1238 int last = reg_ptr->last;
1239 int regno;
1240
1241 for (regno = first; regno <= last; regno++)
1242 if (info_ptr->save_p[regno] == REG_SAVE_2WORDS
1243 && regno != FRAME_POINTER_REGNUM
1244 && (regno < FIRST_ARG_REGNUM
1245 || regno > LAST_ARG_REGNUM))
1246 {
1247 info_ptr->reg_offset[regno] = offset;
1248 offset += 2 * UNITS_PER_WORD;
1249 }
1250 }
1251 }
1252
1253 /* Calculate the offsets to save normal single registers. */
1254 for (range = 0; range < STACK_REGS_MAX; range++)
1255 {
1256 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1257 if (! reg_ptr->special_p)
1258 {
1259 int first = reg_ptr->first;
1260 int last = reg_ptr->last;
1261 int regno;
1262
1263 for (regno = first; regno <= last; regno++)
1264 if (info_ptr->save_p[regno] == REG_SAVE_1WORD
1265 && regno != FRAME_POINTER_REGNUM
1266 && (regno < FIRST_ARG_REGNUM
1267 || regno > LAST_ARG_REGNUM))
1268 {
1269 info_ptr->reg_offset[regno] = offset;
1270 offset += UNITS_PER_WORD;
1271 }
1272 }
1273 }
1274
1275 /* Calculate the offset to save the local variables at. */
1276 offset = ADDR_ALIGN (offset, alignment);
1277 if (info_ptr->vars_size)
1278 {
1279 info_ptr->vars_offset = offset;
1280 offset += info_ptr->vars_size;
1281 }
1282
1283 /* Align header to a dword-boundary. */
1284 offset = ADDR_ALIGN (offset, 2 * UNITS_PER_WORD);
1285
1286 /* Calculate the offsets in the fixed frame. */
1287 info_ptr->save_p[FRAME_POINTER_REGNUM] = REG_SAVE_1WORD;
1288 info_ptr->reg_offset[FRAME_POINTER_REGNUM] = offset;
1289 info_ptr->regs[STACK_REGS_FP].size_1word = UNITS_PER_WORD;
1290
1291 info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD;
1292 info_ptr->reg_offset[LR_REGNO] = offset + 2*UNITS_PER_WORD;
1293 info_ptr->regs[STACK_REGS_LR].size_1word = UNITS_PER_WORD;
1294
1295 if (cfun->returns_struct)
1296 {
1297 info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD;
1298 info_ptr->reg_offset[FRV_STRUCT_VALUE_REGNUM] = offset + UNITS_PER_WORD;
1299 info_ptr->regs[STACK_REGS_STRUCT].size_1word = UNITS_PER_WORD;
1300 }
1301
1302 /* Calculate the offsets to store the arguments passed in registers
1303 for stdarg functions. The register pairs are first and the single
1304 register if any is last. The register save area starts on a
1305 dword-boundary. */
1306 if (info_ptr->stdarg_size)
1307 {
1308 int first = info_ptr->regs[STACK_REGS_STDARG].first;
1309 int last = info_ptr->regs[STACK_REGS_STDARG].last;
1310 int regno;
1311
1312 /* Skip the header. */
1313 offset += 4 * UNITS_PER_WORD;
1314 for (regno = first; regno <= last; regno++)
1315 {
1316 if (info_ptr->save_p[regno] == REG_SAVE_2WORDS)
1317 {
1318 info_ptr->reg_offset[regno] = offset;
1319 offset += 2 * UNITS_PER_WORD;
1320 }
1321 else if (info_ptr->save_p[regno] == REG_SAVE_1WORD)
1322 {
1323 info_ptr->reg_offset[regno] = offset;
1324 offset += UNITS_PER_WORD;
1325 }
1326 }
1327 }
1328 }
1329
1330 if (reload_completed)
1331 frv_stack_cache = info_ptr;
1332
1333 return info_ptr;
1334 }
1335
1336
1337 /* Print the information about the frv stack offsets, etc. when debugging. */
1338
1339 void
frv_debug_stack(frv_stack_t * info)1340 frv_debug_stack (frv_stack_t *info)
1341 {
1342 int range;
1343
1344 if (!info)
1345 info = frv_stack_info ();
1346
1347 fprintf (stderr, "\nStack information for function %s:\n",
1348 ((current_function_decl && DECL_NAME (current_function_decl))
1349 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
1350 : "<unknown>"));
1351
1352 fprintf (stderr, "\ttotal_size\t= %6d\n", info->total_size);
1353 fprintf (stderr, "\tvars_size\t= %6d\n", info->vars_size);
1354 fprintf (stderr, "\tparam_size\t= %6d\n", info->parameter_size);
1355 fprintf (stderr, "\tregs_size\t= %6d, 1w = %3d, 2w = %3d\n",
1356 info->regs_size, info->regs_size_1word, info->regs_size_2words);
1357
1358 fprintf (stderr, "\theader_size\t= %6d\n", info->header_size);
1359 fprintf (stderr, "\tpretend_size\t= %6d\n", info->pretend_size);
1360 fprintf (stderr, "\tvars_offset\t= %6d\n", info->vars_offset);
1361 fprintf (stderr, "\tregs_offset\t= %6d\n", info->regs_offset);
1362
1363 for (range = 0; range < STACK_REGS_MAX; range++)
1364 {
1365 frv_stack_regs_t *regs = &(info->regs[range]);
1366 if ((regs->size_1word + regs->size_2words) > 0)
1367 {
1368 int first = regs->first;
1369 int last = regs->last;
1370 int regno;
1371
1372 fprintf (stderr, "\t%s\tsize\t= %6d, 1w = %3d, 2w = %3d, save =",
1373 regs->name, regs->size_1word + regs->size_2words,
1374 regs->size_1word, regs->size_2words);
1375
1376 for (regno = first; regno <= last; regno++)
1377 {
1378 if (info->save_p[regno] == REG_SAVE_1WORD)
1379 fprintf (stderr, " %s (%d)", reg_names[regno],
1380 info->reg_offset[regno]);
1381
1382 else if (info->save_p[regno] == REG_SAVE_2WORDS)
1383 fprintf (stderr, " %s-%s (%d)", reg_names[regno],
1384 reg_names[regno+1], info->reg_offset[regno]);
1385 }
1386
1387 fputc ('\n', stderr);
1388 }
1389 }
1390
1391 fflush (stderr);
1392 }
1393
1394
1395
1396
1397 /* Used during final to control the packing of insns. The value is
1398 1 if the current instruction should be packed with the next one,
1399 0 if it shouldn't or -1 if packing is disabled altogether. */
1400
1401 static int frv_insn_packing_flag;
1402
1403 /* True if the current function contains a far jump. */
1404
1405 static int
frv_function_contains_far_jump(void)1406 frv_function_contains_far_jump (void)
1407 {
1408 rtx_insn *insn = get_insns ();
1409 while (insn != NULL
1410 && !(JUMP_P (insn)
1411 && get_attr_far_jump (insn) == FAR_JUMP_YES))
1412 insn = NEXT_INSN (insn);
1413 return (insn != NULL);
1414 }
1415
1416 /* For the FRV, this function makes sure that a function with far jumps
1417 will return correctly. It also does the VLIW packing. */
1418
1419 static void
frv_function_prologue(FILE * file)1420 frv_function_prologue (FILE *file)
1421 {
1422 /* If no frame was created, check whether the function uses a call
1423 instruction to implement a far jump. If so, save the link in gr3 and
1424 replace all returns to LR with returns to GR3. GR3 is used because it
1425 is call-clobbered, because is not available to the register allocator,
1426 and because all functions that take a hidden argument pointer will have
1427 a stack frame. */
1428 if (frv_stack_info ()->total_size == 0 && frv_function_contains_far_jump ())
1429 {
1430 rtx_insn *insn;
1431
1432 /* Just to check that the above comment is true. */
1433 gcc_assert (!df_regs_ever_live_p (GPR_FIRST + 3));
1434
1435 /* Generate the instruction that saves the link register. */
1436 fprintf (file, "\tmovsg lr,gr3\n");
1437
1438 /* Replace the LR with GR3 in *return_internal patterns. The insn
1439 will now return using jmpl @(gr3,0) rather than bralr. We cannot
1440 simply emit a different assembly directive because bralr and jmpl
1441 execute in different units. */
1442 for (insn = get_insns(); insn != NULL; insn = NEXT_INSN (insn))
1443 if (JUMP_P (insn))
1444 {
1445 rtx pattern = PATTERN (insn);
1446 if (GET_CODE (pattern) == PARALLEL
1447 && XVECLEN (pattern, 0) >= 2
1448 && GET_CODE (XVECEXP (pattern, 0, 0)) == RETURN
1449 && GET_CODE (XVECEXP (pattern, 0, 1)) == USE)
1450 {
1451 rtx address = XEXP (XVECEXP (pattern, 0, 1), 0);
1452 if (GET_CODE (address) == REG && REGNO (address) == LR_REGNO)
1453 SET_REGNO (address, GPR_FIRST + 3);
1454 }
1455 }
1456 }
1457
1458 frv_pack_insns ();
1459
1460 /* Allow the garbage collector to free the nops created by frv_reorg. */
1461 memset (frv_nops, 0, sizeof (frv_nops));
1462 }
1463
1464
1465 /* Return the next available temporary register in a given class. */
1466
1467 static rtx
frv_alloc_temp_reg(frv_tmp_reg_t * info,enum reg_class rclass,machine_mode mode,int mark_as_used,int no_abort)1468 frv_alloc_temp_reg (
1469 frv_tmp_reg_t *info, /* which registers are available */
1470 enum reg_class rclass, /* register class desired */
1471 machine_mode mode, /* mode to allocate register with */
1472 int mark_as_used, /* register not available after allocation */
1473 int no_abort) /* return NULL instead of aborting */
1474 {
1475 int regno = info->next_reg[ (int)rclass ];
1476 int orig_regno = regno;
1477 HARD_REG_SET *reg_in_class = ®_class_contents[ (int)rclass ];
1478 int i, nr;
1479
1480 for (;;)
1481 {
1482 if (TEST_HARD_REG_BIT (*reg_in_class, regno)
1483 && TEST_HARD_REG_BIT (info->regs, regno))
1484 break;
1485
1486 if (++regno >= FIRST_PSEUDO_REGISTER)
1487 regno = 0;
1488 if (regno == orig_regno)
1489 {
1490 gcc_assert (no_abort);
1491 return NULL_RTX;
1492 }
1493 }
1494
1495 nr = hard_regno_nregs (regno, mode);
1496 info->next_reg[ (int)rclass ] = regno + nr;
1497
1498 if (mark_as_used)
1499 for (i = 0; i < nr; i++)
1500 CLEAR_HARD_REG_BIT (info->regs, regno+i);
1501
1502 return gen_rtx_REG (mode, regno);
1503 }
1504
1505
1506 /* Return an rtx with the value OFFSET, which will either be a register or a
1507 signed 12-bit integer. It can be used as the second operand in an "add"
1508 instruction, or as the index in a load or store.
1509
1510 The function returns a constant rtx if OFFSET is small enough, otherwise
1511 it loads the constant into register OFFSET_REGNO and returns that. */
1512 static rtx
frv_frame_offset_rtx(int offset)1513 frv_frame_offset_rtx (int offset)
1514 {
1515 rtx offset_rtx = GEN_INT (offset);
1516 if (IN_RANGE (offset, -2048, 2047))
1517 return offset_rtx;
1518 else
1519 {
1520 rtx reg_rtx = gen_rtx_REG (SImode, OFFSET_REGNO);
1521 if (IN_RANGE (offset, -32768, 32767))
1522 emit_insn (gen_movsi (reg_rtx, offset_rtx));
1523 else
1524 {
1525 emit_insn (gen_movsi_high (reg_rtx, offset_rtx));
1526 emit_insn (gen_movsi_lo_sum (reg_rtx, offset_rtx));
1527 }
1528 return reg_rtx;
1529 }
1530 }
1531
1532 /* Generate (mem:MODE (plus:Pmode BASE (frv_frame_offset OFFSET)))). The
1533 prologue and epilogue uses such expressions to access the stack. */
1534 static rtx
frv_frame_mem(machine_mode mode,rtx base,int offset)1535 frv_frame_mem (machine_mode mode, rtx base, int offset)
1536 {
1537 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode,
1538 base,
1539 frv_frame_offset_rtx (offset)));
1540 }
1541
1542 /* Generate a frame-related expression:
1543
1544 (set REG (mem (plus (sp) (const_int OFFSET)))).
1545
1546 Such expressions are used in FRAME_RELATED_EXPR notes for more complex
1547 instructions. Marking the expressions as frame-related is superfluous if
1548 the note contains just a single set. But if the note contains a PARALLEL
1549 or SEQUENCE that has several sets, each set must be individually marked
1550 as frame-related. */
1551 static rtx
frv_dwarf_store(rtx reg,int offset)1552 frv_dwarf_store (rtx reg, int offset)
1553 {
1554 rtx set = gen_rtx_SET (gen_rtx_MEM (GET_MODE (reg),
1555 plus_constant (Pmode, stack_pointer_rtx,
1556 offset)),
1557 reg);
1558 RTX_FRAME_RELATED_P (set) = 1;
1559 return set;
1560 }
1561
1562 /* Emit a frame-related instruction whose pattern is PATTERN. The
1563 instruction is the last in a sequence that cumulatively performs the
1564 operation described by DWARF_PATTERN. The instruction is marked as
1565 frame-related and has a REG_FRAME_RELATED_EXPR note containing
1566 DWARF_PATTERN. */
1567 static void
frv_frame_insn(rtx pattern,rtx dwarf_pattern)1568 frv_frame_insn (rtx pattern, rtx dwarf_pattern)
1569 {
1570 rtx insn = emit_insn (pattern);
1571 RTX_FRAME_RELATED_P (insn) = 1;
1572 REG_NOTES (insn) = alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1573 dwarf_pattern,
1574 REG_NOTES (insn));
1575 }
1576
1577 /* Emit instructions that transfer REG to or from the memory location (sp +
1578 STACK_OFFSET). The register is stored in memory if ACCESSOR->OP is
1579 FRV_STORE and loaded if it is FRV_LOAD. Only the prologue uses this
1580 function to store registers and only the epilogue uses it to load them.
1581
1582 The caller sets up ACCESSOR so that BASE is equal to (sp + BASE_OFFSET).
1583 The generated instruction will use BASE as its base register. BASE may
1584 simply be the stack pointer, but if several accesses are being made to a
1585 region far away from the stack pointer, it may be more efficient to set
1586 up a temporary instead.
1587
1588 Store instructions will be frame-related and will be annotated with the
1589 overall effect of the store. Load instructions will be followed by a
1590 (use) to prevent later optimizations from zapping them.
1591
1592 The function takes care of the moves to and from SPRs, using TEMP_REGNO
1593 as a temporary in such cases. */
1594 static void
frv_frame_access(frv_frame_accessor_t * accessor,rtx reg,int stack_offset)1595 frv_frame_access (frv_frame_accessor_t *accessor, rtx reg, int stack_offset)
1596 {
1597 machine_mode mode = GET_MODE (reg);
1598 rtx mem = frv_frame_mem (mode,
1599 accessor->base,
1600 stack_offset - accessor->base_offset);
1601
1602 if (accessor->op == FRV_LOAD)
1603 {
1604 if (SPR_P (REGNO (reg)))
1605 {
1606 rtx temp = gen_rtx_REG (mode, TEMP_REGNO);
1607 emit_insn (gen_rtx_SET (temp, mem));
1608 emit_insn (gen_rtx_SET (reg, temp));
1609 }
1610 else
1611 {
1612 /* We cannot use reg+reg addressing for DImode access. */
1613 if (mode == DImode
1614 && GET_CODE (XEXP (mem, 0)) == PLUS
1615 && GET_CODE (XEXP (XEXP (mem, 0), 0)) == REG
1616 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == REG)
1617 {
1618 rtx temp = gen_rtx_REG (SImode, TEMP_REGNO);
1619
1620 emit_move_insn (temp,
1621 gen_rtx_PLUS (SImode, XEXP (XEXP (mem, 0), 0),
1622 XEXP (XEXP (mem, 0), 1)));
1623 mem = gen_rtx_MEM (DImode, temp);
1624 }
1625 emit_insn (gen_rtx_SET (reg, mem));
1626 }
1627 emit_use (reg);
1628 }
1629 else
1630 {
1631 if (SPR_P (REGNO (reg)))
1632 {
1633 rtx temp = gen_rtx_REG (mode, TEMP_REGNO);
1634 emit_insn (gen_rtx_SET (temp, reg));
1635 frv_frame_insn (gen_rtx_SET (mem, temp),
1636 frv_dwarf_store (reg, stack_offset));
1637 }
1638 else if (mode == DImode)
1639 {
1640 /* For DImode saves, the dwarf2 version needs to be a SEQUENCE
1641 with a separate save for each register. */
1642 rtx reg1 = gen_rtx_REG (SImode, REGNO (reg));
1643 rtx reg2 = gen_rtx_REG (SImode, REGNO (reg) + 1);
1644 rtx set1 = frv_dwarf_store (reg1, stack_offset);
1645 rtx set2 = frv_dwarf_store (reg2, stack_offset + 4);
1646
1647 /* Also we cannot use reg+reg addressing. */
1648 if (GET_CODE (XEXP (mem, 0)) == PLUS
1649 && GET_CODE (XEXP (XEXP (mem, 0), 0)) == REG
1650 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == REG)
1651 {
1652 rtx temp = gen_rtx_REG (SImode, TEMP_REGNO);
1653 emit_move_insn (temp,
1654 gen_rtx_PLUS (SImode, XEXP (XEXP (mem, 0), 0),
1655 XEXP (XEXP (mem, 0), 1)));
1656 mem = gen_rtx_MEM (DImode, temp);
1657 }
1658
1659 frv_frame_insn (gen_rtx_SET (mem, reg),
1660 gen_rtx_PARALLEL (VOIDmode,
1661 gen_rtvec (2, set1, set2)));
1662 }
1663 else
1664 frv_frame_insn (gen_rtx_SET (mem, reg),
1665 frv_dwarf_store (reg, stack_offset));
1666 }
1667 }
1668
1669 /* A function that uses frv_frame_access to transfer a group of registers to
1670 or from the stack. ACCESSOR is passed directly to frv_frame_access, INFO
1671 is the stack information generated by frv_stack_info, and REG_SET is the
1672 number of the register set to transfer. */
1673 static void
frv_frame_access_multi(frv_frame_accessor_t * accessor,frv_stack_t * info,int reg_set)1674 frv_frame_access_multi (frv_frame_accessor_t *accessor,
1675 frv_stack_t *info,
1676 int reg_set)
1677 {
1678 frv_stack_regs_t *regs_info;
1679 int regno;
1680
1681 regs_info = &info->regs[reg_set];
1682 for (regno = regs_info->first; regno <= regs_info->last; regno++)
1683 if (info->save_p[regno])
1684 frv_frame_access (accessor,
1685 info->save_p[regno] == REG_SAVE_2WORDS
1686 ? gen_rtx_REG (DImode, regno)
1687 : gen_rtx_REG (SImode, regno),
1688 info->reg_offset[regno]);
1689 }
1690
1691 /* Save or restore callee-saved registers that are kept outside the frame
1692 header. The function saves the registers if OP is FRV_STORE and restores
1693 them if OP is FRV_LOAD. INFO is the stack information generated by
1694 frv_stack_info. */
1695 static void
frv_frame_access_standard_regs(enum frv_stack_op op,frv_stack_t * info)1696 frv_frame_access_standard_regs (enum frv_stack_op op, frv_stack_t *info)
1697 {
1698 frv_frame_accessor_t accessor;
1699
1700 accessor.op = op;
1701 accessor.base = stack_pointer_rtx;
1702 accessor.base_offset = 0;
1703 frv_frame_access_multi (&accessor, info, STACK_REGS_GPR);
1704 frv_frame_access_multi (&accessor, info, STACK_REGS_FPR);
1705 frv_frame_access_multi (&accessor, info, STACK_REGS_LCR);
1706 }
1707
1708
1709 /* Called after register allocation to add any instructions needed for the
1710 prologue. Using a prologue insn is favored compared to putting all of the
1711 instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since
1712 it allows the scheduler to intermix instructions with the saves of
1713 the caller saved registers. In some cases, it might be necessary
1714 to emit a barrier instruction as the last insn to prevent such
1715 scheduling.
1716
1717 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1718 so that the debug info generation code can handle them properly. */
1719 void
frv_expand_prologue(void)1720 frv_expand_prologue (void)
1721 {
1722 frv_stack_t *info = frv_stack_info ();
1723 rtx sp = stack_pointer_rtx;
1724 rtx fp = frame_pointer_rtx;
1725 frv_frame_accessor_t accessor;
1726
1727 if (TARGET_DEBUG_STACK)
1728 frv_debug_stack (info);
1729
1730 if (flag_stack_usage_info)
1731 current_function_static_stack_size = info->total_size;
1732
1733 if (info->total_size == 0)
1734 return;
1735
1736 /* We're interested in three areas of the frame here:
1737
1738 A: the register save area
1739 B: the old FP
1740 C: the header after B
1741
1742 If the frame pointer isn't used, we'll have to set up A, B and C
1743 using the stack pointer. If the frame pointer is used, we'll access
1744 them as follows:
1745
1746 A: set up using sp
1747 B: set up using sp or a temporary (see below)
1748 C: set up using fp
1749
1750 We set up B using the stack pointer if the frame is small enough.
1751 Otherwise, it's more efficient to copy the old stack pointer into a
1752 temporary and use that.
1753
1754 Note that it's important to make sure the prologue and epilogue use the
1755 same registers to access A and C, since doing otherwise will confuse
1756 the aliasing code. */
1757
1758 /* Set up ACCESSOR for accessing region B above. If the frame pointer
1759 isn't used, the same method will serve for C. */
1760 accessor.op = FRV_STORE;
1761 if (frame_pointer_needed && info->total_size > 2048)
1762 {
1763 accessor.base = gen_rtx_REG (Pmode, OLD_SP_REGNO);
1764 accessor.base_offset = info->total_size;
1765 emit_insn (gen_movsi (accessor.base, sp));
1766 }
1767 else
1768 {
1769 accessor.base = stack_pointer_rtx;
1770 accessor.base_offset = 0;
1771 }
1772
1773 /* Allocate the stack space. */
1774 {
1775 rtx asm_offset = frv_frame_offset_rtx (-info->total_size);
1776 rtx dwarf_offset = GEN_INT (-info->total_size);
1777
1778 frv_frame_insn (gen_stack_adjust (sp, sp, asm_offset),
1779 gen_rtx_SET (sp, gen_rtx_PLUS (Pmode, sp, dwarf_offset)));
1780 }
1781
1782 /* If the frame pointer is needed, store the old one at (sp + FP_OFFSET)
1783 and point the new one to that location. */
1784 if (frame_pointer_needed)
1785 {
1786 int fp_offset = info->reg_offset[FRAME_POINTER_REGNUM];
1787
1788 /* ASM_SRC and DWARF_SRC both point to the frame header. ASM_SRC is
1789 based on ACCESSOR.BASE but DWARF_SRC is always based on the stack
1790 pointer. */
1791 rtx asm_src = plus_constant (Pmode, accessor.base,
1792 fp_offset - accessor.base_offset);
1793 rtx dwarf_src = plus_constant (Pmode, sp, fp_offset);
1794
1795 /* Store the old frame pointer at (sp + FP_OFFSET). */
1796 frv_frame_access (&accessor, fp, fp_offset);
1797
1798 /* Set up the new frame pointer. */
1799 frv_frame_insn (gen_rtx_SET (fp, asm_src),
1800 gen_rtx_SET (fp, dwarf_src));
1801
1802 /* Access region C from the frame pointer. */
1803 accessor.base = fp;
1804 accessor.base_offset = fp_offset;
1805 }
1806
1807 /* Set up region C. */
1808 frv_frame_access_multi (&accessor, info, STACK_REGS_STRUCT);
1809 frv_frame_access_multi (&accessor, info, STACK_REGS_LR);
1810 frv_frame_access_multi (&accessor, info, STACK_REGS_STDARG);
1811
1812 /* Set up region A. */
1813 frv_frame_access_standard_regs (FRV_STORE, info);
1814
1815 /* If this is a varargs/stdarg function, issue a blockage to prevent the
1816 scheduler from moving loads before the stores saving the registers. */
1817 if (info->stdarg_size > 0)
1818 emit_insn (gen_blockage ());
1819
1820 /* Set up pic register/small data register for this function. */
1821 if (!TARGET_FDPIC && flag_pic && crtl->uses_pic_offset_table)
1822 emit_insn (gen_pic_prologue (gen_rtx_REG (Pmode, PIC_REGNO),
1823 gen_rtx_REG (Pmode, LR_REGNO),
1824 gen_rtx_REG (SImode, OFFSET_REGNO)));
1825 }
1826
1827
1828 /* Under frv, all of the work is done via frv_expand_epilogue, but
1829 this function provides a convenient place to do cleanup. */
1830
1831 static void
frv_function_epilogue(FILE *)1832 frv_function_epilogue (FILE *)
1833 {
1834 frv_stack_cache = (frv_stack_t *)0;
1835
1836 /* Zap last used registers for conditional execution. */
1837 memset (&frv_ifcvt.tmp_reg, 0, sizeof (frv_ifcvt.tmp_reg));
1838
1839 /* Release the bitmap of created insns. */
1840 BITMAP_FREE (frv_ifcvt.scratch_insns_bitmap);
1841 }
1842
1843
1844 /* Called after register allocation to add any instructions needed for the
1845 epilogue. Using an epilogue insn is favored compared to putting all of the
1846 instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since
1847 it allows the scheduler to intermix instructions with the saves of
1848 the caller saved registers. In some cases, it might be necessary
1849 to emit a barrier instruction as the last insn to prevent such
1850 scheduling. */
1851
1852 void
frv_expand_epilogue(bool emit_return)1853 frv_expand_epilogue (bool emit_return)
1854 {
1855 frv_stack_t *info = frv_stack_info ();
1856 rtx fp = frame_pointer_rtx;
1857 rtx sp = stack_pointer_rtx;
1858 rtx return_addr;
1859 int fp_offset;
1860
1861 fp_offset = info->reg_offset[FRAME_POINTER_REGNUM];
1862
1863 /* Restore the stack pointer to its original value if alloca or the like
1864 is used. */
1865 if (! crtl->sp_is_unchanging)
1866 emit_insn (gen_addsi3 (sp, fp, frv_frame_offset_rtx (-fp_offset)));
1867
1868 /* Restore the callee-saved registers that were used in this function. */
1869 frv_frame_access_standard_regs (FRV_LOAD, info);
1870
1871 /* Set RETURN_ADDR to the address we should return to. Set it to NULL if
1872 no return instruction should be emitted. */
1873 if (info->save_p[LR_REGNO])
1874 {
1875 int lr_offset;
1876 rtx mem;
1877
1878 /* Use the same method to access the link register's slot as we did in
1879 the prologue. In other words, use the frame pointer if available,
1880 otherwise use the stack pointer.
1881
1882 LR_OFFSET is the offset of the link register's slot from the start
1883 of the frame and MEM is a memory rtx for it. */
1884 lr_offset = info->reg_offset[LR_REGNO];
1885 if (frame_pointer_needed)
1886 mem = frv_frame_mem (Pmode, fp, lr_offset - fp_offset);
1887 else
1888 mem = frv_frame_mem (Pmode, sp, lr_offset);
1889
1890 /* Load the old link register into a GPR. */
1891 return_addr = gen_rtx_REG (Pmode, TEMP_REGNO);
1892 emit_insn (gen_rtx_SET (return_addr, mem));
1893 }
1894 else
1895 return_addr = gen_rtx_REG (Pmode, LR_REGNO);
1896
1897 /* Restore the old frame pointer. Emit a USE afterwards to make sure
1898 the load is preserved. */
1899 if (frame_pointer_needed)
1900 {
1901 emit_insn (gen_rtx_SET (fp, gen_rtx_MEM (Pmode, fp)));
1902 emit_use (fp);
1903 }
1904
1905 /* Deallocate the stack frame. */
1906 if (info->total_size != 0)
1907 {
1908 rtx offset = frv_frame_offset_rtx (info->total_size);
1909 emit_insn (gen_stack_adjust (sp, sp, offset));
1910 }
1911
1912 /* If this function uses eh_return, add the final stack adjustment now. */
1913 if (crtl->calls_eh_return)
1914 emit_insn (gen_stack_adjust (sp, sp, EH_RETURN_STACKADJ_RTX));
1915
1916 if (emit_return)
1917 emit_jump_insn (gen_epilogue_return (return_addr));
1918 else
1919 {
1920 rtx lr = return_addr;
1921
1922 if (REGNO (return_addr) != LR_REGNO)
1923 {
1924 lr = gen_rtx_REG (Pmode, LR_REGNO);
1925 emit_move_insn (lr, return_addr);
1926 }
1927
1928 emit_use (lr);
1929 }
1930 }
1931
1932
1933 /* Worker function for TARGET_ASM_OUTPUT_MI_THUNK. */
1934
1935 static void
frv_asm_output_mi_thunk(FILE * file,tree thunk_fndecl ATTRIBUTE_UNUSED,HOST_WIDE_INT delta,HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,tree function)1936 frv_asm_output_mi_thunk (FILE *file,
1937 tree thunk_fndecl ATTRIBUTE_UNUSED,
1938 HOST_WIDE_INT delta,
1939 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1940 tree function)
1941 {
1942 const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1943 const char *name_func = XSTR (XEXP (DECL_RTL (function), 0), 0);
1944 const char *name_arg0 = reg_names[FIRST_ARG_REGNUM];
1945 const char *name_jmp = reg_names[JUMP_REGNO];
1946 const char *parallel = (frv_issue_rate () > 1 ? ".p" : "");
1947
1948 assemble_start_function (thunk_fndecl, fnname);
1949
1950 /* Do the add using an addi if possible. */
1951 if (IN_RANGE (delta, -2048, 2047))
1952 fprintf (file, "\taddi %s,#%d,%s\n", name_arg0, (int) delta, name_arg0);
1953 else
1954 {
1955 const char *const name_add = reg_names[TEMP_REGNO];
1956 fprintf (file, "\tsethi%s #hi(" HOST_WIDE_INT_PRINT_DEC "),%s\n",
1957 parallel, delta, name_add);
1958 fprintf (file, "\tsetlo #lo(" HOST_WIDE_INT_PRINT_DEC "),%s\n",
1959 delta, name_add);
1960 fprintf (file, "\tadd %s,%s,%s\n", name_add, name_arg0, name_arg0);
1961 }
1962
1963 if (TARGET_FDPIC)
1964 {
1965 const char *name_pic = reg_names[FDPIC_REGNO];
1966 name_jmp = reg_names[FDPIC_FPTR_REGNO];
1967
1968 if (flag_pic != 1)
1969 {
1970 fprintf (file, "\tsethi%s #gotofffuncdeschi(", parallel);
1971 assemble_name (file, name_func);
1972 fprintf (file, "),%s\n", name_jmp);
1973
1974 fprintf (file, "\tsetlo #gotofffuncdesclo(");
1975 assemble_name (file, name_func);
1976 fprintf (file, "),%s\n", name_jmp);
1977
1978 fprintf (file, "\tldd @(%s,%s), %s\n", name_jmp, name_pic, name_jmp);
1979 }
1980 else
1981 {
1982 fprintf (file, "\tlddo @(%s,#gotofffuncdesc12(", name_pic);
1983 assemble_name (file, name_func);
1984 fprintf (file, "\t)), %s\n", name_jmp);
1985 }
1986 }
1987 else if (!flag_pic)
1988 {
1989 fprintf (file, "\tsethi%s #hi(", parallel);
1990 assemble_name (file, name_func);
1991 fprintf (file, "),%s\n", name_jmp);
1992
1993 fprintf (file, "\tsetlo #lo(");
1994 assemble_name (file, name_func);
1995 fprintf (file, "),%s\n", name_jmp);
1996 }
1997 else
1998 {
1999 /* Use JUMP_REGNO as a temporary PIC register. */
2000 const char *name_lr = reg_names[LR_REGNO];
2001 const char *name_gppic = name_jmp;
2002 const char *name_tmp = reg_names[TEMP_REGNO];
2003
2004 fprintf (file, "\tmovsg %s,%s\n", name_lr, name_tmp);
2005 fprintf (file, "\tcall 1f\n");
2006 fprintf (file, "1:\tmovsg %s,%s\n", name_lr, name_gppic);
2007 fprintf (file, "\tmovgs %s,%s\n", name_tmp, name_lr);
2008 fprintf (file, "\tsethi%s #gprelhi(1b),%s\n", parallel, name_tmp);
2009 fprintf (file, "\tsetlo #gprello(1b),%s\n", name_tmp);
2010 fprintf (file, "\tsub %s,%s,%s\n", name_gppic, name_tmp, name_gppic);
2011
2012 fprintf (file, "\tsethi%s #gprelhi(", parallel);
2013 assemble_name (file, name_func);
2014 fprintf (file, "),%s\n", name_tmp);
2015
2016 fprintf (file, "\tsetlo #gprello(");
2017 assemble_name (file, name_func);
2018 fprintf (file, "),%s\n", name_tmp);
2019
2020 fprintf (file, "\tadd %s,%s,%s\n", name_gppic, name_tmp, name_jmp);
2021 }
2022
2023 /* Jump to the function address. */
2024 fprintf (file, "\tjmpl @(%s,%s)\n", name_jmp, reg_names[GPR_FIRST+0]);
2025 assemble_end_function (thunk_fndecl, fnname);
2026 }
2027
2028
2029
2030 /* On frv, create a frame whenever we need to create stack. */
2031
2032 static bool
frv_frame_pointer_required(void)2033 frv_frame_pointer_required (void)
2034 {
2035 /* If we forgoing the usual linkage requirements, we only need
2036 a frame pointer if the stack pointer might change. */
2037 if (!TARGET_LINKED_FP)
2038 return !crtl->sp_is_unchanging;
2039
2040 if (! crtl->is_leaf)
2041 return true;
2042
2043 if (get_frame_size () != 0)
2044 return true;
2045
2046 if (cfun->stdarg)
2047 return true;
2048
2049 if (!crtl->sp_is_unchanging)
2050 return true;
2051
2052 if (!TARGET_FDPIC && flag_pic && crtl->uses_pic_offset_table)
2053 return true;
2054
2055 if (profile_flag)
2056 return true;
2057
2058 if (cfun->machine->frame_needed)
2059 return true;
2060
2061 return false;
2062 }
2063
2064
2065 /* Worker function for TARGET_CAN_ELIMINATE. */
2066
2067 bool
frv_can_eliminate(const int from,const int to)2068 frv_can_eliminate (const int from, const int to)
2069 {
2070 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2071 ? ! frame_pointer_needed
2072 : true);
2073 }
2074
2075 /* This function returns the initial difference between the specified
2076 pair of registers. */
2077
2078 /* See frv_stack_info for more details on the frv stack frame. */
2079
2080 int
frv_initial_elimination_offset(int from,int to)2081 frv_initial_elimination_offset (int from, int to)
2082 {
2083 frv_stack_t *info = frv_stack_info ();
2084 int ret = 0;
2085
2086 if (to == STACK_POINTER_REGNUM && from == ARG_POINTER_REGNUM)
2087 ret = info->total_size - info->pretend_size;
2088
2089 else if (to == STACK_POINTER_REGNUM && from == FRAME_POINTER_REGNUM)
2090 ret = info->reg_offset[FRAME_POINTER_REGNUM];
2091
2092 else if (to == FRAME_POINTER_REGNUM && from == ARG_POINTER_REGNUM)
2093 ret = (info->total_size
2094 - info->reg_offset[FRAME_POINTER_REGNUM]
2095 - info->pretend_size);
2096
2097 else
2098 gcc_unreachable ();
2099
2100 if (TARGET_DEBUG_STACK)
2101 fprintf (stderr, "Eliminate %s to %s by adding %d\n",
2102 reg_names [from], reg_names[to], ret);
2103
2104 return ret;
2105 }
2106
2107
2108 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2109
2110 static void
frv_setup_incoming_varargs(cumulative_args_t cum_v,const function_arg_info & arg,int * pretend_size,int second_time)2111 frv_setup_incoming_varargs (cumulative_args_t cum_v,
2112 const function_arg_info &arg,
2113 int *pretend_size,
2114 int second_time)
2115 {
2116 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2117
2118 if (TARGET_DEBUG_ARG)
2119 fprintf (stderr,
2120 "setup_vararg: words = %2d, mode = %4s, pretend_size = %d, second_time = %d\n",
2121 *cum, GET_MODE_NAME (arg.mode), *pretend_size, second_time);
2122 }
2123
2124
2125 /* Worker function for TARGET_EXPAND_BUILTIN_SAVEREGS. */
2126
2127 static rtx
frv_expand_builtin_saveregs(void)2128 frv_expand_builtin_saveregs (void)
2129 {
2130 int offset = UNITS_PER_WORD * FRV_NUM_ARG_REGS;
2131
2132 if (TARGET_DEBUG_ARG)
2133 fprintf (stderr, "expand_builtin_saveregs: offset from ap = %d\n",
2134 offset);
2135
2136 return gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx, GEN_INT (- offset));
2137 }
2138
2139
2140 /* Expand __builtin_va_start to do the va_start macro. */
2141
2142 static void
frv_expand_builtin_va_start(tree valist,rtx nextarg)2143 frv_expand_builtin_va_start (tree valist, rtx nextarg)
2144 {
2145 tree t;
2146 int num = crtl->args.info - FIRST_ARG_REGNUM - FRV_NUM_ARG_REGS;
2147
2148 nextarg = gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx,
2149 GEN_INT (UNITS_PER_WORD * num));
2150
2151 if (TARGET_DEBUG_ARG)
2152 {
2153 fprintf (stderr, "va_start: args_info = %d, num = %d\n",
2154 crtl->args.info, num);
2155
2156 debug_rtx (nextarg);
2157 }
2158
2159 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
2160 fold_convert (TREE_TYPE (valist),
2161 make_tree (sizetype, nextarg)));
2162 TREE_SIDE_EFFECTS (t) = 1;
2163
2164 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2165 }
2166
2167
2168 /* Expand a block move operation, and return 1 if successful. Return 0
2169 if we should let the compiler generate normal code.
2170
2171 operands[0] is the destination
2172 operands[1] is the source
2173 operands[2] is the length
2174 operands[3] is the alignment */
2175
2176 /* Maximum number of loads to do before doing the stores */
2177 #ifndef MAX_MOVE_REG
2178 #define MAX_MOVE_REG 4
2179 #endif
2180
2181 /* Maximum number of total loads to do. */
2182 #ifndef TOTAL_MOVE_REG
2183 #define TOTAL_MOVE_REG 8
2184 #endif
2185
2186 int
frv_expand_block_move(rtx operands[])2187 frv_expand_block_move (rtx operands[])
2188 {
2189 rtx orig_dest = operands[0];
2190 rtx orig_src = operands[1];
2191 rtx bytes_rtx = operands[2];
2192 rtx align_rtx = operands[3];
2193 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
2194 int align;
2195 int bytes;
2196 int offset;
2197 int num_reg;
2198 int i;
2199 rtx src_reg;
2200 rtx dest_reg;
2201 rtx src_addr;
2202 rtx dest_addr;
2203 rtx src_mem;
2204 rtx dest_mem;
2205 rtx tmp_reg;
2206 rtx stores[MAX_MOVE_REG];
2207 int move_bytes;
2208 machine_mode mode;
2209
2210 /* If this is not a fixed size move, just call memcpy. */
2211 if (! constp)
2212 return FALSE;
2213
2214 /* This should be a fixed size alignment. */
2215 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
2216
2217 align = INTVAL (align_rtx);
2218
2219 /* Anything to move? */
2220 bytes = INTVAL (bytes_rtx);
2221 if (bytes <= 0)
2222 return TRUE;
2223
2224 /* Don't support real large moves. */
2225 if (bytes > TOTAL_MOVE_REG*align)
2226 return FALSE;
2227
2228 /* Move the address into scratch registers. */
2229 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
2230 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2231
2232 num_reg = offset = 0;
2233 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
2234 {
2235 /* Calculate the correct offset for src/dest. */
2236 if (offset == 0)
2237 {
2238 src_addr = src_reg;
2239 dest_addr = dest_reg;
2240 }
2241 else
2242 {
2243 src_addr = plus_constant (Pmode, src_reg, offset);
2244 dest_addr = plus_constant (Pmode, dest_reg, offset);
2245 }
2246
2247 /* Generate the appropriate load and store, saving the stores
2248 for later. */
2249 if (bytes >= 4 && align >= 4)
2250 mode = SImode;
2251 else if (bytes >= 2 && align >= 2)
2252 mode = HImode;
2253 else
2254 mode = QImode;
2255
2256 move_bytes = GET_MODE_SIZE (mode);
2257 tmp_reg = gen_reg_rtx (mode);
2258 src_mem = change_address (orig_src, mode, src_addr);
2259 dest_mem = change_address (orig_dest, mode, dest_addr);
2260 emit_insn (gen_rtx_SET (tmp_reg, src_mem));
2261 stores[num_reg++] = gen_rtx_SET (dest_mem, tmp_reg);
2262
2263 if (num_reg >= MAX_MOVE_REG)
2264 {
2265 for (i = 0; i < num_reg; i++)
2266 emit_insn (stores[i]);
2267 num_reg = 0;
2268 }
2269 }
2270
2271 for (i = 0; i < num_reg; i++)
2272 emit_insn (stores[i]);
2273
2274 return TRUE;
2275 }
2276
2277
2278 /* Expand a block clear operation, and return 1 if successful. Return 0
2279 if we should let the compiler generate normal code.
2280
2281 operands[0] is the destination
2282 operands[1] is the length
2283 operands[3] is the alignment */
2284
2285 int
frv_expand_block_clear(rtx operands[])2286 frv_expand_block_clear (rtx operands[])
2287 {
2288 rtx orig_dest = operands[0];
2289 rtx bytes_rtx = operands[1];
2290 rtx align_rtx = operands[3];
2291 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
2292 int align;
2293 int bytes;
2294 int offset;
2295 rtx dest_reg;
2296 rtx dest_addr;
2297 rtx dest_mem;
2298 int clear_bytes;
2299 machine_mode mode;
2300
2301 /* If this is not a fixed size move, just call memcpy. */
2302 if (! constp)
2303 return FALSE;
2304
2305 /* This should be a fixed size alignment. */
2306 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
2307
2308 align = INTVAL (align_rtx);
2309
2310 /* Anything to move? */
2311 bytes = INTVAL (bytes_rtx);
2312 if (bytes <= 0)
2313 return TRUE;
2314
2315 /* Don't support real large clears. */
2316 if (bytes > TOTAL_MOVE_REG*align)
2317 return FALSE;
2318
2319 /* Move the address into a scratch register. */
2320 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
2321
2322 offset = 0;
2323 for ( ; bytes > 0; (bytes -= clear_bytes), (offset += clear_bytes))
2324 {
2325 /* Calculate the correct offset for src/dest. */
2326 dest_addr = ((offset == 0)
2327 ? dest_reg
2328 : plus_constant (Pmode, dest_reg, offset));
2329
2330 /* Generate the appropriate store of gr0. */
2331 if (bytes >= 4 && align >= 4)
2332 mode = SImode;
2333 else if (bytes >= 2 && align >= 2)
2334 mode = HImode;
2335 else
2336 mode = QImode;
2337
2338 clear_bytes = GET_MODE_SIZE (mode);
2339 dest_mem = change_address (orig_dest, mode, dest_addr);
2340 emit_insn (gen_rtx_SET (dest_mem, const0_rtx));
2341 }
2342
2343 return TRUE;
2344 }
2345
2346
2347 /* The following variable is used to output modifiers of assembler
2348 code of the current output insn. */
2349
2350 static rtx *frv_insn_operands;
2351
2352 /* The following function is used to add assembler insn code suffix .p
2353 if it is necessary. */
2354
2355 const char *
frv_asm_output_opcode(FILE * f,const char * ptr)2356 frv_asm_output_opcode (FILE *f, const char *ptr)
2357 {
2358 int c;
2359
2360 if (frv_insn_packing_flag <= 0)
2361 return ptr;
2362
2363 for (; *ptr && *ptr != ' ' && *ptr != '\t';)
2364 {
2365 c = *ptr++;
2366 if (c == '%' && ((*ptr >= 'a' && *ptr <= 'z')
2367 || (*ptr >= 'A' && *ptr <= 'Z')))
2368 {
2369 int letter = *ptr++;
2370
2371 c = atoi (ptr);
2372 frv_print_operand (f, frv_insn_operands [c], letter);
2373 while ((c = *ptr) >= '0' && c <= '9')
2374 ptr++;
2375 }
2376 else
2377 fputc (c, f);
2378 }
2379
2380 fprintf (f, ".p");
2381
2382 return ptr;
2383 }
2384
2385 /* Set up the packing bit for the current output insn. Note that this
2386 function is not called for asm insns. */
2387
2388 void
frv_final_prescan_insn(rtx_insn * insn,rtx * opvec,int noperands ATTRIBUTE_UNUSED)2389 frv_final_prescan_insn (rtx_insn *insn, rtx *opvec,
2390 int noperands ATTRIBUTE_UNUSED)
2391 {
2392 if (INSN_P (insn))
2393 {
2394 if (frv_insn_packing_flag >= 0)
2395 {
2396 frv_insn_operands = opvec;
2397 frv_insn_packing_flag = PACKING_FLAG_P (insn);
2398 }
2399 else if (recog_memoized (insn) >= 0
2400 && get_attr_acc_group (insn) == ACC_GROUP_ODD)
2401 /* Packing optimizations have been disabled, but INSN can only
2402 be issued in M1. Insert an mnop in M0. */
2403 fprintf (asm_out_file, "\tmnop.p\n");
2404 }
2405 }
2406
2407
2408
2409 /* A C expression whose value is RTL representing the address in a stack frame
2410 where the pointer to the caller's frame is stored. Assume that FRAMEADDR is
2411 an RTL expression for the address of the stack frame itself.
2412
2413 If you don't define this macro, the default is to return the value of
2414 FRAMEADDR--that is, the stack frame address is also the address of the stack
2415 word that points to the previous frame. */
2416
2417 /* The default is correct, but we need to make sure the frame gets created. */
2418 rtx
frv_dynamic_chain_address(rtx frame)2419 frv_dynamic_chain_address (rtx frame)
2420 {
2421 cfun->machine->frame_needed = 1;
2422 return frame;
2423 }
2424
2425
2426 /* A C expression whose value is RTL representing the value of the return
2427 address for the frame COUNT steps up from the current frame, after the
2428 prologue. FRAMEADDR is the frame pointer of the COUNT frame, or the frame
2429 pointer of the COUNT - 1 frame if `RETURN_ADDR_IN_PREVIOUS_FRAME' is
2430 defined.
2431
2432 The value of the expression must always be the correct address when COUNT is
2433 zero, but may be `NULL_RTX' if there is not way to determine the return
2434 address of other frames. */
2435
2436 rtx
frv_return_addr_rtx(int count,rtx frame)2437 frv_return_addr_rtx (int count, rtx frame)
2438 {
2439 if (count != 0)
2440 return const0_rtx;
2441 cfun->machine->frame_needed = 1;
2442 return gen_rtx_MEM (Pmode, plus_constant (Pmode, frame, 8));
2443 }
2444
2445 /* Given a memory reference MEMREF, interpret the referenced memory as
2446 an array of MODE values, and return a reference to the element
2447 specified by INDEX. Assume that any pre-modification implicit in
2448 MEMREF has already happened.
2449
2450 MEMREF must be a legitimate operand for modes larger than SImode.
2451 frv_legitimate_address_p forbids register+register addresses, which
2452 this function cannot handle. */
2453 rtx
frv_index_memory(rtx memref,machine_mode mode,int index)2454 frv_index_memory (rtx memref, machine_mode mode, int index)
2455 {
2456 rtx base = XEXP (memref, 0);
2457 if (GET_CODE (base) == PRE_MODIFY)
2458 base = XEXP (base, 0);
2459 return change_address (memref, mode,
2460 plus_constant (Pmode, base,
2461 index * GET_MODE_SIZE (mode)));
2462 }
2463
2464
2465 /* Print a memory address as an operand to reference that memory location. */
2466 static void
frv_print_operand_address(FILE * stream,machine_mode,rtx x)2467 frv_print_operand_address (FILE * stream, machine_mode /* mode */, rtx x)
2468 {
2469 if (GET_CODE (x) == MEM)
2470 x = XEXP (x, 0);
2471
2472 switch (GET_CODE (x))
2473 {
2474 case REG:
2475 fputs (reg_names [ REGNO (x)], stream);
2476 return;
2477
2478 case CONST_INT:
2479 fprintf (stream, "%ld", (long) INTVAL (x));
2480 return;
2481
2482 case SYMBOL_REF:
2483 assemble_name (stream, XSTR (x, 0));
2484 return;
2485
2486 case LABEL_REF:
2487 case CONST:
2488 output_addr_const (stream, x);
2489 return;
2490
2491 case PLUS:
2492 /* Poorly constructed asm statements can trigger this alternative.
2493 See gcc/testsuite/gcc.dg/asm-4.c for an example. */
2494 frv_print_operand_memory_reference (stream, x, 0);
2495 return;
2496
2497 default:
2498 break;
2499 }
2500
2501 fatal_insn ("bad insn to frv_print_operand_address:", x);
2502 }
2503
2504
2505 static void
frv_print_operand_memory_reference_reg(FILE * stream,rtx x)2506 frv_print_operand_memory_reference_reg (FILE * stream, rtx x)
2507 {
2508 int regno = true_regnum (x);
2509 if (GPR_P (regno))
2510 fputs (reg_names[regno], stream);
2511 else
2512 fatal_insn ("bad register to frv_print_operand_memory_reference_reg:", x);
2513 }
2514
2515 /* Print a memory reference suitable for the ld/st instructions. */
2516
2517 static void
frv_print_operand_memory_reference(FILE * stream,rtx x,int addr_offset)2518 frv_print_operand_memory_reference (FILE * stream, rtx x, int addr_offset)
2519 {
2520 struct frv_unspec unspec;
2521 rtx x0 = NULL_RTX;
2522 rtx x1 = NULL_RTX;
2523
2524 switch (GET_CODE (x))
2525 {
2526 case SUBREG:
2527 case REG:
2528 x0 = x;
2529 break;
2530
2531 case PRE_MODIFY: /* (pre_modify (reg) (plus (reg) (reg))) */
2532 x0 = XEXP (x, 0);
2533 x1 = XEXP (XEXP (x, 1), 1);
2534 break;
2535
2536 case CONST_INT:
2537 x1 = x;
2538 break;
2539
2540 case PLUS:
2541 x0 = XEXP (x, 0);
2542 x1 = XEXP (x, 1);
2543 if (GET_CODE (x0) == CONST_INT)
2544 {
2545 x0 = XEXP (x, 1);
2546 x1 = XEXP (x, 0);
2547 }
2548 break;
2549
2550 default:
2551 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2552 break;
2553
2554 }
2555
2556 if (addr_offset)
2557 {
2558 if (!x1)
2559 x1 = const0_rtx;
2560 else if (GET_CODE (x1) != CONST_INT)
2561 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2562 }
2563
2564 fputs ("@(", stream);
2565 if (!x0)
2566 fputs (reg_names[GPR_R0], stream);
2567 else if (GET_CODE (x0) == REG || GET_CODE (x0) == SUBREG)
2568 frv_print_operand_memory_reference_reg (stream, x0);
2569 else
2570 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2571
2572 fputs (",", stream);
2573 if (!x1)
2574 fputs (reg_names [GPR_R0], stream);
2575
2576 else
2577 {
2578 switch (GET_CODE (x1))
2579 {
2580 case SUBREG:
2581 case REG:
2582 frv_print_operand_memory_reference_reg (stream, x1);
2583 break;
2584
2585 case CONST_INT:
2586 fprintf (stream, "%ld", (long) (INTVAL (x1) + addr_offset));
2587 break;
2588
2589 case CONST:
2590 if (!frv_const_unspec_p (x1, &unspec))
2591 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x1);
2592 frv_output_const_unspec (stream, &unspec);
2593 break;
2594
2595 default:
2596 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2597 }
2598 }
2599
2600 fputs (")", stream);
2601 }
2602
2603
2604 /* Return 2 for likely branches and 0 for non-likely branches */
2605
2606 #define FRV_JUMP_LIKELY 2
2607 #define FRV_JUMP_NOT_LIKELY 0
2608
2609 static int
frv_print_operand_jump_hint(rtx_insn * insn)2610 frv_print_operand_jump_hint (rtx_insn *insn)
2611 {
2612 rtx note;
2613 rtx labelref;
2614 int ret;
2615 enum { UNKNOWN, BACKWARD, FORWARD } jump_type = UNKNOWN;
2616
2617 gcc_assert (JUMP_P (insn));
2618
2619 /* Assume any non-conditional jump is likely. */
2620 if (! any_condjump_p (insn))
2621 ret = FRV_JUMP_LIKELY;
2622
2623 else
2624 {
2625 labelref = condjump_label (insn);
2626 if (labelref)
2627 {
2628 rtx label = XEXP (labelref, 0);
2629 jump_type = (insn_current_address > INSN_ADDRESSES (INSN_UID (label))
2630 ? BACKWARD
2631 : FORWARD);
2632 }
2633
2634 note = find_reg_note (insn, REG_BR_PROB, 0);
2635 if (!note)
2636 ret = ((jump_type == BACKWARD) ? FRV_JUMP_LIKELY : FRV_JUMP_NOT_LIKELY);
2637
2638 else
2639 {
2640 ret = ((profile_probability::from_reg_br_prob_note (XINT (note, 0))
2641 >= profile_probability::even ())
2642 ? FRV_JUMP_LIKELY
2643 : FRV_JUMP_NOT_LIKELY);
2644 }
2645 }
2646
2647 #if 0
2648 if (TARGET_DEBUG)
2649 {
2650 char *direction;
2651
2652 switch (jump_type)
2653 {
2654 default:
2655 case UNKNOWN: direction = "unknown jump direction"; break;
2656 case BACKWARD: direction = "jump backward"; break;
2657 case FORWARD: direction = "jump forward"; break;
2658 }
2659
2660 fprintf (stderr,
2661 "%s: uid %ld, %s, probability = %d, max prob. = %d, hint = %d\n",
2662 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
2663 (long)INSN_UID (insn), direction, prob,
2664 REG_BR_PROB_BASE, ret);
2665 }
2666 #endif
2667
2668 return ret;
2669 }
2670
2671
2672 /* Return the comparison operator to use for CODE given that the ICC
2673 register is OP0. */
2674
2675 static const char *
comparison_string(enum rtx_code code,rtx op0)2676 comparison_string (enum rtx_code code, rtx op0)
2677 {
2678 bool is_nz_p = GET_MODE (op0) == CC_NZmode;
2679 switch (code)
2680 {
2681 default: output_operand_lossage ("bad condition code"); return "";
2682 case EQ: return "eq";
2683 case NE: return "ne";
2684 case LT: return is_nz_p ? "n" : "lt";
2685 case LE: return "le";
2686 case GT: return "gt";
2687 case GE: return is_nz_p ? "p" : "ge";
2688 case LTU: return is_nz_p ? "no" : "c";
2689 case LEU: return is_nz_p ? "eq" : "ls";
2690 case GTU: return is_nz_p ? "ne" : "hi";
2691 case GEU: return is_nz_p ? "ra" : "nc";
2692 }
2693 }
2694
2695 /* Print an operand to an assembler instruction.
2696
2697 `%' followed by a letter and a digit says to output an operand in an
2698 alternate fashion. Four letters have standard, built-in meanings
2699 described below. The hook `TARGET_PRINT_OPERAND' can define
2700 additional letters with nonstandard meanings.
2701
2702 `%cDIGIT' can be used to substitute an operand that is a constant value
2703 without the syntax that normally indicates an immediate operand.
2704
2705 `%nDIGIT' is like `%cDIGIT' except that the value of the constant is negated
2706 before printing.
2707
2708 `%aDIGIT' can be used to substitute an operand as if it were a memory
2709 reference, with the actual operand treated as the address. This may be
2710 useful when outputting a "load address" instruction, because often the
2711 assembler syntax for such an instruction requires you to write the operand
2712 as if it were a memory reference.
2713
2714 `%lDIGIT' is used to substitute a `label_ref' into a jump instruction.
2715
2716 `%=' outputs a number which is unique to each instruction in the entire
2717 compilation. This is useful for making local labels to be referred to more
2718 than once in a single template that generates multiple assembler
2719 instructions.
2720
2721 `%' followed by a punctuation character specifies a substitution that
2722 does not use an operand. Only one case is standard: `%%' outputs a
2723 `%' into the assembler code. Other nonstandard cases can be defined
2724 in the `TARGET_PRINT_OPERAND' hook. You must also define which
2725 punctuation characters are valid with the
2726 `TARGET_PRINT_OPERAND_PUNCT_VALID_P' hook. */
2727
2728 static void
frv_print_operand(FILE * file,rtx x,int code)2729 frv_print_operand (FILE * file, rtx x, int code)
2730 {
2731 struct frv_unspec unspec;
2732 HOST_WIDE_INT value;
2733 int offset;
2734
2735 if (code != 0 && !ISALPHA (code))
2736 value = 0;
2737
2738 else if (GET_CODE (x) == CONST_INT)
2739 value = INTVAL (x);
2740
2741 else if (GET_CODE (x) == CONST_DOUBLE)
2742 {
2743 if (GET_MODE (x) == SFmode)
2744 {
2745 long l;
2746
2747 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
2748 value = l;
2749 }
2750
2751 else if (GET_MODE (x) == VOIDmode)
2752 value = CONST_DOUBLE_LOW (x);
2753
2754 else
2755 fatal_insn ("bad insn in frv_print_operand, bad const_double", x);
2756 }
2757
2758 else
2759 value = 0;
2760
2761 switch (code)
2762 {
2763
2764 case '.':
2765 /* Output r0. */
2766 fputs (reg_names[GPR_R0], file);
2767 break;
2768
2769 case '#':
2770 fprintf (file, "%d", frv_print_operand_jump_hint (current_output_insn));
2771 break;
2772
2773 case '@':
2774 /* Output small data area base register (gr16). */
2775 fputs (reg_names[SDA_BASE_REG], file);
2776 break;
2777
2778 case '~':
2779 /* Output pic register (gr17). */
2780 fputs (reg_names[PIC_REGNO], file);
2781 break;
2782
2783 case '*':
2784 /* Output the temporary integer CCR register. */
2785 fputs (reg_names[ICR_TEMP], file);
2786 break;
2787
2788 case '&':
2789 /* Output the temporary integer CC register. */
2790 fputs (reg_names[ICC_TEMP], file);
2791 break;
2792
2793 /* case 'a': print an address. */
2794
2795 case 'C':
2796 /* Print appropriate test for integer branch false operation. */
2797 fputs (comparison_string (reverse_condition (GET_CODE (x)),
2798 XEXP (x, 0)), file);
2799 break;
2800
2801 case 'c':
2802 /* Print appropriate test for integer branch true operation. */
2803 fputs (comparison_string (GET_CODE (x), XEXP (x, 0)), file);
2804 break;
2805
2806 case 'e':
2807 /* Print 1 for a NE and 0 for an EQ to give the final argument
2808 for a conditional instruction. */
2809 if (GET_CODE (x) == NE)
2810 fputs ("1", file);
2811
2812 else if (GET_CODE (x) == EQ)
2813 fputs ("0", file);
2814
2815 else
2816 fatal_insn ("bad insn to frv_print_operand, 'e' modifier:", x);
2817 break;
2818
2819 case 'F':
2820 /* Print appropriate test for floating point branch false operation. */
2821 switch (GET_CODE (x))
2822 {
2823 default:
2824 fatal_insn ("bad insn to frv_print_operand, 'F' modifier:", x);
2825
2826 case EQ: fputs ("ne", file); break;
2827 case NE: fputs ("eq", file); break;
2828 case LT: fputs ("uge", file); break;
2829 case LE: fputs ("ug", file); break;
2830 case GT: fputs ("ule", file); break;
2831 case GE: fputs ("ul", file); break;
2832 }
2833 break;
2834
2835 case 'f':
2836 /* Print appropriate test for floating point branch true operation. */
2837 switch (GET_CODE (x))
2838 {
2839 default:
2840 fatal_insn ("bad insn to frv_print_operand, 'f' modifier:", x);
2841
2842 case EQ: fputs ("eq", file); break;
2843 case NE: fputs ("ne", file); break;
2844 case LT: fputs ("lt", file); break;
2845 case LE: fputs ("le", file); break;
2846 case GT: fputs ("gt", file); break;
2847 case GE: fputs ("ge", file); break;
2848 }
2849 break;
2850
2851 case 'g':
2852 /* Print appropriate GOT function. */
2853 if (GET_CODE (x) != CONST_INT)
2854 fatal_insn ("bad insn to frv_print_operand, 'g' modifier:", x);
2855 fputs (unspec_got_name (INTVAL (x)), file);
2856 break;
2857
2858 case 'I':
2859 /* Print 'i' if the operand is a constant, or is a memory reference that
2860 adds a constant. */
2861 if (GET_CODE (x) == MEM)
2862 x = ((GET_CODE (XEXP (x, 0)) == PLUS)
2863 ? XEXP (XEXP (x, 0), 1)
2864 : XEXP (x, 0));
2865 else if (GET_CODE (x) == PLUS)
2866 x = XEXP (x, 1);
2867
2868 switch (GET_CODE (x))
2869 {
2870 default:
2871 break;
2872
2873 case CONST_INT:
2874 case SYMBOL_REF:
2875 case CONST:
2876 fputs ("i", file);
2877 break;
2878 }
2879 break;
2880
2881 case 'i':
2882 /* For jump instructions, print 'i' if the operand is a constant or
2883 is an expression that adds a constant. */
2884 if (GET_CODE (x) == CONST_INT)
2885 fputs ("i", file);
2886
2887 else
2888 {
2889 if (GET_CODE (x) == CONST_INT
2890 || (GET_CODE (x) == PLUS
2891 && (GET_CODE (XEXP (x, 1)) == CONST_INT
2892 || GET_CODE (XEXP (x, 0)) == CONST_INT)))
2893 fputs ("i", file);
2894 }
2895 break;
2896
2897 case 'L':
2898 /* Print the lower register of a double word register pair */
2899 if (GET_CODE (x) == REG)
2900 fputs (reg_names[ REGNO (x)+1 ], file);
2901 else
2902 fatal_insn ("bad insn to frv_print_operand, 'L' modifier:", x);
2903 break;
2904
2905 /* case 'l': print a LABEL_REF. */
2906
2907 case 'M':
2908 case 'N':
2909 /* Print a memory reference for ld/st/jmp, %N prints a memory reference
2910 for the second word of double memory operations. */
2911 offset = (code == 'M') ? 0 : UNITS_PER_WORD;
2912 switch (GET_CODE (x))
2913 {
2914 default:
2915 fatal_insn ("bad insn to frv_print_operand, 'M/N' modifier:", x);
2916
2917 case MEM:
2918 frv_print_operand_memory_reference (file, XEXP (x, 0), offset);
2919 break;
2920
2921 case REG:
2922 case SUBREG:
2923 case CONST_INT:
2924 case PLUS:
2925 case SYMBOL_REF:
2926 frv_print_operand_memory_reference (file, x, offset);
2927 break;
2928 }
2929 break;
2930
2931 case 'O':
2932 /* Print the opcode of a command. */
2933 switch (GET_CODE (x))
2934 {
2935 default:
2936 fatal_insn ("bad insn to frv_print_operand, 'O' modifier:", x);
2937
2938 case PLUS: fputs ("add", file); break;
2939 case MINUS: fputs ("sub", file); break;
2940 case AND: fputs ("and", file); break;
2941 case IOR: fputs ("or", file); break;
2942 case XOR: fputs ("xor", file); break;
2943 case ASHIFT: fputs ("sll", file); break;
2944 case ASHIFTRT: fputs ("sra", file); break;
2945 case LSHIFTRT: fputs ("srl", file); break;
2946 }
2947 break;
2948
2949 /* case 'n': negate and print a constant int. */
2950
2951 case 'P':
2952 /* Print PIC label using operand as the number. */
2953 if (GET_CODE (x) != CONST_INT)
2954 fatal_insn ("bad insn to frv_print_operand, P modifier:", x);
2955
2956 fprintf (file, ".LCF%ld", (long)INTVAL (x));
2957 break;
2958
2959 case 'U':
2960 /* Print 'u' if the operand is a update load/store. */
2961 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
2962 fputs ("u", file);
2963 break;
2964
2965 case 'z':
2966 /* If value is 0, print gr0, otherwise it must be a register. */
2967 if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0)
2968 fputs (reg_names[GPR_R0], file);
2969
2970 else if (GET_CODE (x) == REG)
2971 fputs (reg_names [REGNO (x)], file);
2972
2973 else
2974 fatal_insn ("bad insn in frv_print_operand, z case", x);
2975 break;
2976
2977 case 'x':
2978 /* Print constant in hex. */
2979 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
2980 {
2981 fprintf (file, "%s0x%.4lx", IMMEDIATE_PREFIX, (long) value);
2982 break;
2983 }
2984
2985 /* Fall through. */
2986
2987 case '\0':
2988 if (GET_CODE (x) == REG)
2989 fputs (reg_names [REGNO (x)], file);
2990
2991 else if (GET_CODE (x) == CONST_INT
2992 || GET_CODE (x) == CONST_DOUBLE)
2993 fprintf (file, "%s%ld", IMMEDIATE_PREFIX, (long) value);
2994
2995 else if (frv_const_unspec_p (x, &unspec))
2996 frv_output_const_unspec (file, &unspec);
2997
2998 else if (GET_CODE (x) == MEM)
2999 frv_print_operand_address (file, GET_MODE (x), XEXP (x, 0));
3000
3001 else if (CONSTANT_ADDRESS_P (x))
3002 frv_print_operand_address (file, VOIDmode, x);
3003
3004 else
3005 fatal_insn ("bad insn in frv_print_operand, 0 case", x);
3006
3007 break;
3008
3009 default:
3010 fatal_insn ("frv_print_operand: unknown code", x);
3011 break;
3012 }
3013
3014 return;
3015 }
3016
3017 static bool
frv_print_operand_punct_valid_p(unsigned char code)3018 frv_print_operand_punct_valid_p (unsigned char code)
3019 {
3020 return (code == '.' || code == '#' || code == '@' || code == '~'
3021 || code == '*' || code == '&');
3022 }
3023
3024
3025 /* A C statement (sans semicolon) for initializing the variable CUM for the
3026 state at the beginning of the argument list. The variable has type
3027 `CUMULATIVE_ARGS'. The value of FNTYPE is the tree node for the data type
3028 of the function which will receive the args, or 0 if the args are to a
3029 compiler support library function. The value of INDIRECT is nonzero when
3030 processing an indirect call, for example a call through a function pointer.
3031 The value of INDIRECT is zero for a call to an explicitly named function, a
3032 library function call, or when `INIT_CUMULATIVE_ARGS' is used to find
3033 arguments for the function being compiled.
3034
3035 When processing a call to a compiler support library function, LIBNAME
3036 identifies which one. It is a `symbol_ref' rtx which contains the name of
3037 the function, as a string. LIBNAME is 0 when an ordinary C function call is
3038 being processed. Thus, each time this macro is called, either LIBNAME or
3039 FNTYPE is nonzero, but never both of them at once. */
3040
3041 void
frv_init_cumulative_args(CUMULATIVE_ARGS * cum,tree fntype,rtx libname,tree fndecl,int incoming)3042 frv_init_cumulative_args (CUMULATIVE_ARGS *cum,
3043 tree fntype,
3044 rtx libname,
3045 tree fndecl,
3046 int incoming)
3047 {
3048 *cum = FIRST_ARG_REGNUM;
3049
3050 if (TARGET_DEBUG_ARG)
3051 {
3052 fprintf (stderr, "\ninit_cumulative_args:");
3053 if (!fndecl && fntype)
3054 fputs (" indirect", stderr);
3055
3056 if (incoming)
3057 fputs (" incoming", stderr);
3058
3059 if (fntype)
3060 {
3061 tree ret_type = TREE_TYPE (fntype);
3062 fprintf (stderr, " return=%s,",
3063 get_tree_code_name (TREE_CODE (ret_type)));
3064 }
3065
3066 if (libname && GET_CODE (libname) == SYMBOL_REF)
3067 fprintf (stderr, " libname=%s", XSTR (libname, 0));
3068
3069 if (cfun->returns_struct)
3070 fprintf (stderr, " return-struct");
3071
3072 putc ('\n', stderr);
3073 }
3074 }
3075
3076
3077 /* Return true if we should pass an argument on the stack rather than
3078 in registers. */
3079
3080 static bool
frv_must_pass_in_stack(const function_arg_info & arg)3081 frv_must_pass_in_stack (const function_arg_info &arg)
3082 {
3083 return arg.mode == BLKmode || arg.aggregate_type_p ();
3084 }
3085
3086 /* If defined, a C expression that gives the alignment boundary, in bits, of an
3087 argument with the specified mode and type. If it is not defined,
3088 `PARM_BOUNDARY' is used for all arguments. */
3089
3090 static unsigned int
frv_function_arg_boundary(machine_mode mode ATTRIBUTE_UNUSED,const_tree type ATTRIBUTE_UNUSED)3091 frv_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
3092 const_tree type ATTRIBUTE_UNUSED)
3093 {
3094 return BITS_PER_WORD;
3095 }
3096
3097 static rtx
frv_function_arg_1(cumulative_args_t cum_v,const function_arg_info & arg,bool incoming ATTRIBUTE_UNUSED)3098 frv_function_arg_1 (cumulative_args_t cum_v, const function_arg_info &arg,
3099 bool incoming ATTRIBUTE_UNUSED)
3100 {
3101 const CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3102
3103 machine_mode xmode = (arg.mode == BLKmode) ? SImode : arg.mode;
3104 int arg_num = *cum;
3105 rtx ret;
3106 const char *debstr;
3107
3108 /* Return a marker for use in the call instruction. */
3109 if (xmode == VOIDmode)
3110 {
3111 ret = const0_rtx;
3112 debstr = "<0>";
3113 }
3114
3115 else if (arg_num <= LAST_ARG_REGNUM)
3116 {
3117 ret = gen_rtx_REG (xmode, arg_num);
3118 debstr = reg_names[arg_num];
3119 }
3120
3121 else
3122 {
3123 ret = NULL_RTX;
3124 debstr = "memory";
3125 }
3126
3127 if (TARGET_DEBUG_ARG)
3128 fprintf (stderr,
3129 "function_arg: words = %2d, mode = %4s, named = %d, size = %3d, arg = %s\n",
3130 arg_num, GET_MODE_NAME (arg.mode), arg.named,
3131 GET_MODE_SIZE (arg.mode), debstr);
3132
3133 return ret;
3134 }
3135
3136 static rtx
frv_function_arg(cumulative_args_t cum,const function_arg_info & arg)3137 frv_function_arg (cumulative_args_t cum, const function_arg_info &arg)
3138 {
3139 return frv_function_arg_1 (cum, arg, false);
3140 }
3141
3142 static rtx
frv_function_incoming_arg(cumulative_args_t cum,const function_arg_info & arg)3143 frv_function_incoming_arg (cumulative_args_t cum, const function_arg_info &arg)
3144 {
3145 return frv_function_arg_1 (cum, arg, true);
3146 }
3147
3148
3149 /* Implement TARGET_FUNCTION_ARG_ADVANCE. */
3150
3151 static void
frv_function_arg_advance(cumulative_args_t cum_v,const function_arg_info & arg)3152 frv_function_arg_advance (cumulative_args_t cum_v,
3153 const function_arg_info &arg)
3154 {
3155 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3156
3157 machine_mode xmode = (arg.mode == BLKmode) ? SImode : arg.mode;
3158 int bytes = GET_MODE_SIZE (xmode);
3159 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3160 int arg_num = *cum;
3161
3162 *cum = arg_num + words;
3163
3164 if (TARGET_DEBUG_ARG)
3165 fprintf (stderr,
3166 "function_adv: words = %2d, mode = %4s, named = %d, size = %3d\n",
3167 arg_num, GET_MODE_NAME (arg.mode), arg.named,
3168 words * UNITS_PER_WORD);
3169 }
3170
3171
3172 /* Implement TARGET_ARG_PARTIAL_BYTES. */
3173
3174 static int
frv_arg_partial_bytes(cumulative_args_t cum,const function_arg_info & arg)3175 frv_arg_partial_bytes (cumulative_args_t cum, const function_arg_info &arg)
3176 {
3177 machine_mode xmode = (arg.mode == BLKmode) ? SImode : arg.mode;
3178 int bytes = GET_MODE_SIZE (xmode);
3179 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3180 int arg_num = *get_cumulative_args (cum);
3181 int ret;
3182
3183 ret = ((arg_num <= LAST_ARG_REGNUM && arg_num + words > LAST_ARG_REGNUM+1)
3184 ? LAST_ARG_REGNUM - arg_num + 1
3185 : 0);
3186 ret *= UNITS_PER_WORD;
3187
3188 if (TARGET_DEBUG_ARG && ret)
3189 fprintf (stderr, "frv_arg_partial_bytes: %d\n", ret);
3190
3191 return ret;
3192 }
3193
3194
3195 /* Implements TARGET_FUNCTION_VALUE. */
3196
3197 static rtx
frv_function_value(const_tree valtype,const_tree fn_decl_or_type ATTRIBUTE_UNUSED,bool outgoing ATTRIBUTE_UNUSED)3198 frv_function_value (const_tree valtype,
3199 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
3200 bool outgoing ATTRIBUTE_UNUSED)
3201 {
3202 return gen_rtx_REG (TYPE_MODE (valtype), RETURN_VALUE_REGNUM);
3203 }
3204
3205
3206 /* Implements TARGET_LIBCALL_VALUE. */
3207
3208 static rtx
frv_libcall_value(machine_mode mode,const_rtx fun ATTRIBUTE_UNUSED)3209 frv_libcall_value (machine_mode mode,
3210 const_rtx fun ATTRIBUTE_UNUSED)
3211 {
3212 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3213 }
3214
3215
3216 /* Implements FUNCTION_VALUE_REGNO_P. */
3217
3218 bool
frv_function_value_regno_p(const unsigned int regno)3219 frv_function_value_regno_p (const unsigned int regno)
3220 {
3221 return (regno == RETURN_VALUE_REGNUM);
3222 }
3223
3224 /* Return true if a register is ok to use as a base or index register. */
3225
3226 static FRV_INLINE int
frv_regno_ok_for_base_p(int regno,int strict_p)3227 frv_regno_ok_for_base_p (int regno, int strict_p)
3228 {
3229 if (GPR_P (regno))
3230 return TRUE;
3231
3232 if (strict_p)
3233 return (reg_renumber[regno] >= 0 && GPR_P (reg_renumber[regno]));
3234
3235 if (regno == ARG_POINTER_REGNUM)
3236 return TRUE;
3237
3238 return (regno >= FIRST_PSEUDO_REGISTER);
3239 }
3240
3241
3242 /* A C compound statement with a conditional `goto LABEL;' executed if X (an
3243 RTX) is a legitimate memory address on the target machine for a memory
3244 operand of mode MODE.
3245
3246 It usually pays to define several simpler macros to serve as subroutines for
3247 this one. Otherwise it may be too complicated to understand.
3248
3249 This macro must exist in two variants: a strict variant and a non-strict
3250 one. The strict variant is used in the reload pass. It must be defined so
3251 that any pseudo-register that has not been allocated a hard register is
3252 considered a memory reference. In contexts where some kind of register is
3253 required, a pseudo-register with no hard register must be rejected.
3254
3255 The non-strict variant is used in other passes. It must be defined to
3256 accept all pseudo-registers in every context where some kind of register is
3257 required.
3258
3259 Compiler source files that want to use the strict variant of this macro
3260 define the macro `REG_OK_STRICT'. You should use an `#ifdef REG_OK_STRICT'
3261 conditional to define the strict variant in that case and the non-strict
3262 variant otherwise.
3263
3264 Normally, constant addresses which are the sum of a `symbol_ref' and an
3265 integer are stored inside a `const' RTX to mark them as constant.
3266 Therefore, there is no need to recognize such sums specifically as
3267 legitimate addresses. Normally you would simply recognize any `const' as
3268 legitimate.
3269
3270 Usually `TARGET_PRINT_OPERAND_ADDRESS' is not prepared to handle
3271 constant sums that are not marked with `const'. It assumes that a
3272 naked `plus' indicates indexing. If so, then you *must* reject such
3273 naked constant sums as illegitimate addresses, so that none of them
3274 will be given to `TARGET_PRINT_OPERAND_ADDRESS'. */
3275
3276 int
frv_legitimate_address_p_1(machine_mode mode,rtx x,int strict_p,int condexec_p,int allow_double_reg_p)3277 frv_legitimate_address_p_1 (machine_mode mode,
3278 rtx x,
3279 int strict_p,
3280 int condexec_p,
3281 int allow_double_reg_p)
3282 {
3283 rtx x0, x1;
3284 int ret = 0;
3285 HOST_WIDE_INT value;
3286 unsigned regno0;
3287
3288 if (FRV_SYMBOL_REF_TLS_P (x))
3289 return 0;
3290
3291 switch (GET_CODE (x))
3292 {
3293 default:
3294 break;
3295
3296 case SUBREG:
3297 x = SUBREG_REG (x);
3298 if (GET_CODE (x) != REG)
3299 break;
3300
3301 /* Fall through. */
3302
3303 case REG:
3304 ret = frv_regno_ok_for_base_p (REGNO (x), strict_p);
3305 break;
3306
3307 case PRE_MODIFY:
3308 x0 = XEXP (x, 0);
3309 x1 = XEXP (x, 1);
3310 if (GET_CODE (x0) != REG
3311 || ! frv_regno_ok_for_base_p (REGNO (x0), strict_p)
3312 || GET_CODE (x1) != PLUS
3313 || ! rtx_equal_p (x0, XEXP (x1, 0))
3314 || GET_CODE (XEXP (x1, 1)) != REG
3315 || ! frv_regno_ok_for_base_p (REGNO (XEXP (x1, 1)), strict_p))
3316 break;
3317
3318 ret = 1;
3319 break;
3320
3321 case CONST_INT:
3322 /* 12-bit immediate */
3323 if (condexec_p)
3324 ret = FALSE;
3325 else
3326 {
3327 ret = IN_RANGE (INTVAL (x), -2048, 2047);
3328
3329 /* If we can't use load/store double operations, make sure we can
3330 address the second word. */
3331 if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3332 ret = IN_RANGE (INTVAL (x) + GET_MODE_SIZE (mode) - 1,
3333 -2048, 2047);
3334 }
3335 break;
3336
3337 case PLUS:
3338 x0 = XEXP (x, 0);
3339 x1 = XEXP (x, 1);
3340
3341 if (GET_CODE (x0) == SUBREG)
3342 x0 = SUBREG_REG (x0);
3343
3344 if (GET_CODE (x0) != REG)
3345 break;
3346
3347 regno0 = REGNO (x0);
3348 if (!frv_regno_ok_for_base_p (regno0, strict_p))
3349 break;
3350
3351 switch (GET_CODE (x1))
3352 {
3353 default:
3354 break;
3355
3356 case SUBREG:
3357 x1 = SUBREG_REG (x1);
3358 if (GET_CODE (x1) != REG)
3359 break;
3360
3361 /* Fall through. */
3362
3363 case REG:
3364 /* Do not allow reg+reg addressing for modes > 1 word if we
3365 can't depend on having move double instructions. */
3366 if (!allow_double_reg_p && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3367 ret = FALSE;
3368 else
3369 ret = frv_regno_ok_for_base_p (REGNO (x1), strict_p);
3370 break;
3371
3372 case CONST_INT:
3373 /* 12-bit immediate */
3374 if (condexec_p)
3375 ret = FALSE;
3376 else
3377 {
3378 value = INTVAL (x1);
3379 ret = IN_RANGE (value, -2048, 2047);
3380
3381 /* If we can't use load/store double operations, make sure we can
3382 address the second word. */
3383 if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3384 ret = IN_RANGE (value + GET_MODE_SIZE (mode) - 1, -2048, 2047);
3385 }
3386 break;
3387
3388 case CONST:
3389 if (!condexec_p && got12_operand (x1, VOIDmode))
3390 ret = TRUE;
3391 break;
3392
3393 }
3394 break;
3395 }
3396
3397 if (TARGET_DEBUG_ADDR)
3398 {
3399 fprintf (stderr, "\n========== legitimate_address_p, mode = %s, result = %d, addresses are %sstrict%s\n",
3400 GET_MODE_NAME (mode), ret, (strict_p) ? "" : "not ",
3401 (condexec_p) ? ", inside conditional code" : "");
3402 debug_rtx (x);
3403 }
3404
3405 return ret;
3406 }
3407
3408 bool
frv_legitimate_address_p(machine_mode mode,rtx x,bool strict_p)3409 frv_legitimate_address_p (machine_mode mode, rtx x, bool strict_p)
3410 {
3411 return frv_legitimate_address_p_1 (mode, x, strict_p, FALSE, FALSE);
3412 }
3413
3414 /* Given an ADDR, generate code to inline the PLT. */
3415 static rtx
gen_inlined_tls_plt(rtx addr)3416 gen_inlined_tls_plt (rtx addr)
3417 {
3418 rtx retval, dest;
3419 rtx picreg = get_hard_reg_initial_val (Pmode, FDPIC_REG);
3420
3421
3422 dest = gen_reg_rtx (DImode);
3423
3424 if (flag_pic == 1)
3425 {
3426 /*
3427 -fpic version:
3428
3429 lddi.p @(gr15, #gottlsdesc12(ADDR)), gr8
3430 calll #gettlsoff(ADDR)@(gr8, gr0)
3431 */
3432 emit_insn (gen_tls_lddi (dest, addr, picreg));
3433 }
3434 else
3435 {
3436 /*
3437 -fPIC version:
3438
3439 sethi.p #gottlsdeschi(ADDR), gr8
3440 setlo #gottlsdesclo(ADDR), gr8
3441 ldd #tlsdesc(ADDR)@(gr15, gr8), gr8
3442 calll #gettlsoff(ADDR)@(gr8, gr0)
3443 */
3444 rtx reguse = gen_reg_rtx (Pmode);
3445 emit_insn (gen_tlsoff_hilo (reguse, addr, GEN_INT (R_FRV_GOTTLSDESCHI)));
3446 emit_insn (gen_tls_tlsdesc_ldd (dest, picreg, reguse, addr));
3447 }
3448
3449 retval = gen_reg_rtx (Pmode);
3450 emit_insn (gen_tls_indirect_call (retval, addr, dest, picreg));
3451 return retval;
3452 }
3453
3454 /* Emit a TLSMOFF or TLSMOFF12 offset, depending on -mTLS. Returns
3455 the destination address. */
3456 static rtx
gen_tlsmoff(rtx addr,rtx reg)3457 gen_tlsmoff (rtx addr, rtx reg)
3458 {
3459 rtx dest = gen_reg_rtx (Pmode);
3460
3461 if (TARGET_BIG_TLS)
3462 {
3463 /* sethi.p #tlsmoffhi(x), grA
3464 setlo #tlsmofflo(x), grA
3465 */
3466 dest = gen_reg_rtx (Pmode);
3467 emit_insn (gen_tlsoff_hilo (dest, addr,
3468 GEN_INT (R_FRV_TLSMOFFHI)));
3469 dest = gen_rtx_PLUS (Pmode, dest, reg);
3470 }
3471 else
3472 {
3473 /* addi grB, #tlsmoff12(x), grC
3474 -or-
3475 ld/st @(grB, #tlsmoff12(x)), grC
3476 */
3477 dest = gen_reg_rtx (Pmode);
3478 emit_insn (gen_symGOTOFF2reg_i (dest, addr, reg,
3479 GEN_INT (R_FRV_TLSMOFF12)));
3480 }
3481 return dest;
3482 }
3483
3484 /* Generate code for a TLS address. */
3485 static rtx
frv_legitimize_tls_address(rtx addr,enum tls_model model)3486 frv_legitimize_tls_address (rtx addr, enum tls_model model)
3487 {
3488 rtx dest, tp = gen_rtx_REG (Pmode, 29);
3489 rtx picreg = get_hard_reg_initial_val (Pmode, 15);
3490
3491 switch (model)
3492 {
3493 case TLS_MODEL_INITIAL_EXEC:
3494 if (flag_pic == 1)
3495 {
3496 /* -fpic version.
3497 ldi @(gr15, #gottlsoff12(x)), gr5
3498 */
3499 dest = gen_reg_rtx (Pmode);
3500 emit_insn (gen_tls_load_gottlsoff12 (dest, addr, picreg));
3501 dest = gen_rtx_PLUS (Pmode, tp, dest);
3502 }
3503 else
3504 {
3505 /* -fPIC or anything else.
3506
3507 sethi.p #gottlsoffhi(x), gr14
3508 setlo #gottlsofflo(x), gr14
3509 ld #tlsoff(x)@(gr15, gr14), gr9
3510 */
3511 rtx tmp = gen_reg_rtx (Pmode);
3512 dest = gen_reg_rtx (Pmode);
3513 emit_insn (gen_tlsoff_hilo (tmp, addr,
3514 GEN_INT (R_FRV_GOTTLSOFF_HI)));
3515
3516 emit_insn (gen_tls_tlsoff_ld (dest, picreg, tmp, addr));
3517 dest = gen_rtx_PLUS (Pmode, tp, dest);
3518 }
3519 break;
3520 case TLS_MODEL_LOCAL_DYNAMIC:
3521 {
3522 rtx reg, retval;
3523
3524 if (TARGET_INLINE_PLT)
3525 retval = gen_inlined_tls_plt (GEN_INT (0));
3526 else
3527 {
3528 /* call #gettlsoff(0) */
3529 retval = gen_reg_rtx (Pmode);
3530 emit_insn (gen_call_gettlsoff (retval, GEN_INT (0), picreg));
3531 }
3532
3533 reg = gen_reg_rtx (Pmode);
3534 emit_insn (gen_rtx_SET (reg, gen_rtx_PLUS (Pmode, retval, tp)));
3535
3536 dest = gen_tlsmoff (addr, reg);
3537
3538 /*
3539 dest = gen_reg_rtx (Pmode);
3540 emit_insn (gen_tlsoff_hilo (dest, addr,
3541 GEN_INT (R_FRV_TLSMOFFHI)));
3542 dest = gen_rtx_PLUS (Pmode, dest, reg);
3543 */
3544 break;
3545 }
3546 case TLS_MODEL_LOCAL_EXEC:
3547 dest = gen_tlsmoff (addr, gen_rtx_REG (Pmode, 29));
3548 break;
3549 case TLS_MODEL_GLOBAL_DYNAMIC:
3550 {
3551 rtx retval;
3552
3553 if (TARGET_INLINE_PLT)
3554 retval = gen_inlined_tls_plt (addr);
3555 else
3556 {
3557 /* call #gettlsoff(x) */
3558 retval = gen_reg_rtx (Pmode);
3559 emit_insn (gen_call_gettlsoff (retval, addr, picreg));
3560 }
3561 dest = gen_rtx_PLUS (Pmode, retval, tp);
3562 break;
3563 }
3564 default:
3565 gcc_unreachable ();
3566 }
3567
3568 return dest;
3569 }
3570
3571 rtx
frv_legitimize_address(rtx x,rtx oldx ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED)3572 frv_legitimize_address (rtx x,
3573 rtx oldx ATTRIBUTE_UNUSED,
3574 machine_mode mode ATTRIBUTE_UNUSED)
3575 {
3576 if (GET_CODE (x) == SYMBOL_REF)
3577 {
3578 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3579 if (model != 0)
3580 return frv_legitimize_tls_address (x, model);
3581 }
3582
3583 return x;
3584 }
3585
3586 /* Test whether a local function descriptor is canonical, i.e.,
3587 whether we can use FUNCDESC_GOTOFF to compute the address of the
3588 function. */
3589
3590 static bool
frv_local_funcdesc_p(rtx fnx)3591 frv_local_funcdesc_p (rtx fnx)
3592 {
3593 tree fn;
3594 enum symbol_visibility vis;
3595 bool ret;
3596
3597 if (! SYMBOL_REF_LOCAL_P (fnx))
3598 return FALSE;
3599
3600 fn = SYMBOL_REF_DECL (fnx);
3601
3602 if (! fn)
3603 return FALSE;
3604
3605 vis = DECL_VISIBILITY (fn);
3606
3607 if (vis == VISIBILITY_PROTECTED)
3608 /* Private function descriptors for protected functions are not
3609 canonical. Temporarily change the visibility to global. */
3610 vis = VISIBILITY_DEFAULT;
3611 else if (flag_shlib)
3612 /* If we're already compiling for a shared library (that, unlike
3613 executables, can't assume that the existence of a definition
3614 implies local binding), we can skip the re-testing. */
3615 return TRUE;
3616
3617 ret = default_binds_local_p_1 (fn, flag_pic);
3618
3619 DECL_VISIBILITY (fn) = vis;
3620
3621 return ret;
3622 }
3623
3624 /* Load the _gp symbol into DEST. SRC is supposed to be the FDPIC
3625 register. */
3626
3627 rtx
frv_gen_GPsym2reg(rtx dest,rtx src)3628 frv_gen_GPsym2reg (rtx dest, rtx src)
3629 {
3630 tree gp = get_identifier ("_gp");
3631 rtx gp_sym = gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (gp));
3632
3633 return gen_symGOT2reg (dest, gp_sym, src, GEN_INT (R_FRV_GOT12));
3634 }
3635
3636 static const char *
unspec_got_name(int i)3637 unspec_got_name (int i)
3638 {
3639 switch (i)
3640 {
3641 case R_FRV_GOT12: return "got12";
3642 case R_FRV_GOTHI: return "gothi";
3643 case R_FRV_GOTLO: return "gotlo";
3644 case R_FRV_FUNCDESC: return "funcdesc";
3645 case R_FRV_FUNCDESC_GOT12: return "gotfuncdesc12";
3646 case R_FRV_FUNCDESC_GOTHI: return "gotfuncdeschi";
3647 case R_FRV_FUNCDESC_GOTLO: return "gotfuncdesclo";
3648 case R_FRV_FUNCDESC_VALUE: return "funcdescvalue";
3649 case R_FRV_FUNCDESC_GOTOFF12: return "gotofffuncdesc12";
3650 case R_FRV_FUNCDESC_GOTOFFHI: return "gotofffuncdeschi";
3651 case R_FRV_FUNCDESC_GOTOFFLO: return "gotofffuncdesclo";
3652 case R_FRV_GOTOFF12: return "gotoff12";
3653 case R_FRV_GOTOFFHI: return "gotoffhi";
3654 case R_FRV_GOTOFFLO: return "gotofflo";
3655 case R_FRV_GPREL12: return "gprel12";
3656 case R_FRV_GPRELHI: return "gprelhi";
3657 case R_FRV_GPRELLO: return "gprello";
3658 case R_FRV_GOTTLSOFF_HI: return "gottlsoffhi";
3659 case R_FRV_GOTTLSOFF_LO: return "gottlsofflo";
3660 case R_FRV_TLSMOFFHI: return "tlsmoffhi";
3661 case R_FRV_TLSMOFFLO: return "tlsmofflo";
3662 case R_FRV_TLSMOFF12: return "tlsmoff12";
3663 case R_FRV_TLSDESCHI: return "tlsdeschi";
3664 case R_FRV_TLSDESCLO: return "tlsdesclo";
3665 case R_FRV_GOTTLSDESCHI: return "gottlsdeschi";
3666 case R_FRV_GOTTLSDESCLO: return "gottlsdesclo";
3667 default: gcc_unreachable ();
3668 }
3669 }
3670
3671 /* Write the assembler syntax for UNSPEC to STREAM. Note that any offset
3672 is added inside the relocation operator. */
3673
3674 static void
frv_output_const_unspec(FILE * stream,const struct frv_unspec * unspec)3675 frv_output_const_unspec (FILE *stream, const struct frv_unspec *unspec)
3676 {
3677 fprintf (stream, "#%s(", unspec_got_name (unspec->reloc));
3678 output_addr_const (stream, plus_constant (Pmode, unspec->symbol,
3679 unspec->offset));
3680 fputs (")", stream);
3681 }
3682
3683 /* Implement FIND_BASE_TERM. See whether ORIG_X represents #gprel12(foo)
3684 or #gotoff12(foo) for some small data symbol foo. If so, return foo,
3685 otherwise return ORIG_X. */
3686
3687 rtx
frv_find_base_term(rtx x)3688 frv_find_base_term (rtx x)
3689 {
3690 struct frv_unspec unspec;
3691
3692 if (frv_const_unspec_p (x, &unspec)
3693 && frv_small_data_reloc_p (unspec.symbol, unspec.reloc))
3694 return plus_constant (Pmode, unspec.symbol, unspec.offset);
3695
3696 return x;
3697 }
3698
3699 /* Return 1 if operand is a valid FRV address. CONDEXEC_P is true if
3700 the operand is used by a predicated instruction. */
3701
3702 int
frv_legitimate_memory_operand(rtx op,machine_mode mode,int condexec_p)3703 frv_legitimate_memory_operand (rtx op, machine_mode mode, int condexec_p)
3704 {
3705 return ((GET_MODE (op) == mode || mode == VOIDmode)
3706 && GET_CODE (op) == MEM
3707 && frv_legitimate_address_p_1 (mode, XEXP (op, 0),
3708 reload_completed, condexec_p, FALSE));
3709 }
3710
3711 void
frv_expand_fdpic_call(rtx * operands,bool ret_value,bool sibcall)3712 frv_expand_fdpic_call (rtx *operands, bool ret_value, bool sibcall)
3713 {
3714 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
3715 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REG);
3716 rtx c, rvrtx=0;
3717 rtx addr;
3718
3719 if (ret_value)
3720 {
3721 rvrtx = operands[0];
3722 operands ++;
3723 }
3724
3725 addr = XEXP (operands[0], 0);
3726
3727 /* Inline PLTs if we're optimizing for speed. We'd like to inline
3728 any calls that would involve a PLT, but can't tell, since we
3729 don't know whether an extern function is going to be provided by
3730 a separate translation unit or imported from a separate module.
3731 When compiling for shared libraries, if the function has default
3732 visibility, we assume it's overridable, so we inline the PLT, but
3733 for executables, we don't really have a way to make a good
3734 decision: a function is as likely to be imported from a shared
3735 library as it is to be defined in the executable itself. We
3736 assume executables will get global functions defined locally,
3737 whereas shared libraries will have them potentially overridden,
3738 so we only inline PLTs when compiling for shared libraries.
3739
3740 In order to mark a function as local to a shared library, any
3741 non-default visibility attribute suffices. Unfortunately,
3742 there's no simple way to tag a function declaration as ``in a
3743 different module'', which we could then use to trigger PLT
3744 inlining on executables. There's -minline-plt, but it affects
3745 all external functions, so one would have to also mark function
3746 declarations available in the same module with non-default
3747 visibility, which is advantageous in itself. */
3748 if (GET_CODE (addr) == SYMBOL_REF
3749 && ((!SYMBOL_REF_LOCAL_P (addr) && TARGET_INLINE_PLT)
3750 || sibcall))
3751 {
3752 rtx x, dest;
3753 dest = gen_reg_rtx (SImode);
3754 if (flag_pic != 1)
3755 x = gen_symGOTOFF2reg_hilo (dest, addr, OUR_FDPIC_REG,
3756 GEN_INT (R_FRV_FUNCDESC_GOTOFF12));
3757 else
3758 x = gen_symGOTOFF2reg (dest, addr, OUR_FDPIC_REG,
3759 GEN_INT (R_FRV_FUNCDESC_GOTOFF12));
3760 emit_insn (x);
3761 crtl->uses_pic_offset_table = TRUE;
3762 addr = dest;
3763 }
3764 else if (GET_CODE (addr) == SYMBOL_REF)
3765 {
3766 /* These are always either local, or handled through a local
3767 PLT. */
3768 if (ret_value)
3769 c = gen_call_value_fdpicsi (rvrtx, addr, operands[1],
3770 operands[2], picreg, lr);
3771 else
3772 c = gen_call_fdpicsi (addr, operands[1], operands[2], picreg, lr);
3773 emit_call_insn (c);
3774 return;
3775 }
3776 else if (! ldd_address_operand (addr, Pmode))
3777 addr = force_reg (Pmode, addr);
3778
3779 picreg = gen_reg_rtx (DImode);
3780 emit_insn (gen_movdi_ldd (picreg, addr));
3781
3782 if (sibcall && ret_value)
3783 c = gen_sibcall_value_fdpicdi (rvrtx, picreg, const0_rtx);
3784 else if (sibcall)
3785 c = gen_sibcall_fdpicdi (picreg, const0_rtx);
3786 else if (ret_value)
3787 c = gen_call_value_fdpicdi (rvrtx, picreg, const0_rtx, lr);
3788 else
3789 c = gen_call_fdpicdi (picreg, const0_rtx, lr);
3790 emit_call_insn (c);
3791 }
3792
3793 /* Look for a SYMBOL_REF of a function in an rtx. We always want to
3794 process these separately from any offsets, such that we add any
3795 offsets to the function descriptor (the actual pointer), not to the
3796 function address. */
3797
3798 static bool
frv_function_symbol_referenced_p(rtx x)3799 frv_function_symbol_referenced_p (rtx x)
3800 {
3801 const char *format;
3802 int length;
3803 int j;
3804
3805 if (GET_CODE (x) == SYMBOL_REF)
3806 return SYMBOL_REF_FUNCTION_P (x);
3807
3808 length = GET_RTX_LENGTH (GET_CODE (x));
3809 format = GET_RTX_FORMAT (GET_CODE (x));
3810
3811 for (j = 0; j < length; ++j)
3812 {
3813 switch (format[j])
3814 {
3815 case 'e':
3816 if (frv_function_symbol_referenced_p (XEXP (x, j)))
3817 return TRUE;
3818 break;
3819
3820 case 'V':
3821 case 'E':
3822 if (XVEC (x, j) != 0)
3823 {
3824 int k;
3825 for (k = 0; k < XVECLEN (x, j); ++k)
3826 if (frv_function_symbol_referenced_p (XVECEXP (x, j, k)))
3827 return TRUE;
3828 }
3829 break;
3830
3831 default:
3832 /* Nothing to do. */
3833 break;
3834 }
3835 }
3836
3837 return FALSE;
3838 }
3839
3840 /* Return true if the memory operand is one that can be conditionally
3841 executed. */
3842
3843 int
condexec_memory_operand(rtx op,machine_mode mode)3844 condexec_memory_operand (rtx op, machine_mode mode)
3845 {
3846 machine_mode op_mode = GET_MODE (op);
3847 rtx addr;
3848
3849 if (mode != VOIDmode && op_mode != mode)
3850 return FALSE;
3851
3852 switch (op_mode)
3853 {
3854 default:
3855 return FALSE;
3856
3857 case E_QImode:
3858 case E_HImode:
3859 case E_SImode:
3860 case E_SFmode:
3861 break;
3862 }
3863
3864 if (GET_CODE (op) != MEM)
3865 return FALSE;
3866
3867 addr = XEXP (op, 0);
3868 return frv_legitimate_address_p_1 (mode, addr, reload_completed, TRUE, FALSE);
3869 }
3870
3871 /* Return true if the bare return instruction can be used outside of the
3872 epilog code. For frv, we only do it if there was no stack allocation. */
3873
3874 int
direct_return_p(void)3875 direct_return_p (void)
3876 {
3877 frv_stack_t *info;
3878
3879 if (!reload_completed)
3880 return FALSE;
3881
3882 info = frv_stack_info ();
3883 return (info->total_size == 0);
3884 }
3885
3886
3887 void
frv_emit_move(machine_mode mode,rtx dest,rtx src)3888 frv_emit_move (machine_mode mode, rtx dest, rtx src)
3889 {
3890 if (GET_CODE (src) == SYMBOL_REF)
3891 {
3892 enum tls_model model = SYMBOL_REF_TLS_MODEL (src);
3893 if (model != 0)
3894 src = frv_legitimize_tls_address (src, model);
3895 }
3896
3897 switch (mode)
3898 {
3899 case E_SImode:
3900 if (frv_emit_movsi (dest, src))
3901 return;
3902 break;
3903
3904 case E_QImode:
3905 case E_HImode:
3906 case E_DImode:
3907 case E_SFmode:
3908 case E_DFmode:
3909 if (!reload_in_progress
3910 && !reload_completed
3911 && !register_operand (dest, mode)
3912 && !reg_or_0_operand (src, mode))
3913 src = copy_to_mode_reg (mode, src);
3914 break;
3915
3916 default:
3917 gcc_unreachable ();
3918 }
3919
3920 emit_insn (gen_rtx_SET (dest, src));
3921 }
3922
3923 /* Emit code to handle a MOVSI, adding in the small data register or pic
3924 register if needed to load up addresses. Return TRUE if the appropriate
3925 instructions are emitted. */
3926
3927 int
frv_emit_movsi(rtx dest,rtx src)3928 frv_emit_movsi (rtx dest, rtx src)
3929 {
3930 int base_regno = -1;
3931 int unspec = 0;
3932 rtx sym = src;
3933 struct frv_unspec old_unspec;
3934
3935 if (!reload_in_progress
3936 && !reload_completed
3937 && !register_operand (dest, SImode)
3938 && (!reg_or_0_operand (src, SImode)
3939 /* Virtual registers will almost always be replaced by an
3940 add instruction, so expose this to CSE by copying to
3941 an intermediate register. */
3942 || (GET_CODE (src) == REG
3943 && IN_RANGE (REGNO (src),
3944 FIRST_VIRTUAL_REGISTER,
3945 LAST_VIRTUAL_POINTER_REGISTER))))
3946 {
3947 emit_insn (gen_rtx_SET (dest, copy_to_mode_reg (SImode, src)));
3948 return TRUE;
3949 }
3950
3951 /* Explicitly add in the PIC or small data register if needed. */
3952 switch (GET_CODE (src))
3953 {
3954 default:
3955 break;
3956
3957 case LABEL_REF:
3958 handle_label:
3959 if (TARGET_FDPIC)
3960 {
3961 /* Using GPREL12, we use a single GOT entry for all symbols
3962 in read-only sections, but trade sequences such as:
3963
3964 sethi #gothi(label), gr#
3965 setlo #gotlo(label), gr#
3966 ld @(gr15,gr#), gr#
3967
3968 for
3969
3970 ld @(gr15,#got12(_gp)), gr#
3971 sethi #gprelhi(label), gr##
3972 setlo #gprello(label), gr##
3973 add gr#, gr##, gr##
3974
3975 We may often be able to share gr# for multiple
3976 computations of GPREL addresses, and we may often fold
3977 the final add into the pair of registers of a load or
3978 store instruction, so it's often profitable. Even when
3979 optimizing for size, we're trading a GOT entry for an
3980 additional instruction, which trades GOT space
3981 (read-write) for code size (read-only, shareable), as
3982 long as the symbol is not used in more than two different
3983 locations.
3984
3985 With -fpie/-fpic, we'd be trading a single load for a
3986 sequence of 4 instructions, because the offset of the
3987 label can't be assumed to be addressable with 12 bits, so
3988 we don't do this. */
3989 if (TARGET_GPREL_RO)
3990 unspec = R_FRV_GPREL12;
3991 else
3992 unspec = R_FRV_GOT12;
3993 }
3994 else if (flag_pic)
3995 base_regno = PIC_REGNO;
3996
3997 break;
3998
3999 case CONST:
4000 if (frv_const_unspec_p (src, &old_unspec))
4001 break;
4002
4003 if (TARGET_FDPIC && frv_function_symbol_referenced_p (XEXP (src, 0)))
4004 {
4005 handle_whatever:
4006 src = force_reg (GET_MODE (XEXP (src, 0)), XEXP (src, 0));
4007 emit_move_insn (dest, src);
4008 return TRUE;
4009 }
4010 else
4011 {
4012 sym = XEXP (sym, 0);
4013 if (GET_CODE (sym) == PLUS
4014 && GET_CODE (XEXP (sym, 0)) == SYMBOL_REF
4015 && GET_CODE (XEXP (sym, 1)) == CONST_INT)
4016 sym = XEXP (sym, 0);
4017 if (GET_CODE (sym) == SYMBOL_REF)
4018 goto handle_sym;
4019 else if (GET_CODE (sym) == LABEL_REF)
4020 goto handle_label;
4021 else
4022 goto handle_whatever;
4023 }
4024 break;
4025
4026 case SYMBOL_REF:
4027 handle_sym:
4028 if (TARGET_FDPIC)
4029 {
4030 enum tls_model model = SYMBOL_REF_TLS_MODEL (sym);
4031
4032 if (model != 0)
4033 {
4034 src = frv_legitimize_tls_address (src, model);
4035 emit_move_insn (dest, src);
4036 return TRUE;
4037 }
4038
4039 if (SYMBOL_REF_FUNCTION_P (sym))
4040 {
4041 if (frv_local_funcdesc_p (sym))
4042 unspec = R_FRV_FUNCDESC_GOTOFF12;
4043 else
4044 unspec = R_FRV_FUNCDESC_GOT12;
4045 }
4046 else
4047 {
4048 if (CONSTANT_POOL_ADDRESS_P (sym))
4049 switch (GET_CODE (get_pool_constant (sym)))
4050 {
4051 case CONST:
4052 case SYMBOL_REF:
4053 case LABEL_REF:
4054 if (flag_pic)
4055 {
4056 unspec = R_FRV_GOTOFF12;
4057 break;
4058 }
4059 /* Fall through. */
4060 default:
4061 if (TARGET_GPREL_RO)
4062 unspec = R_FRV_GPREL12;
4063 else
4064 unspec = R_FRV_GOT12;
4065 break;
4066 }
4067 else if (SYMBOL_REF_LOCAL_P (sym)
4068 && !SYMBOL_REF_EXTERNAL_P (sym)
4069 && SYMBOL_REF_DECL (sym)
4070 && (!DECL_P (SYMBOL_REF_DECL (sym))
4071 || !DECL_COMMON (SYMBOL_REF_DECL (sym))))
4072 {
4073 tree decl = SYMBOL_REF_DECL (sym);
4074 tree init = TREE_CODE (decl) == VAR_DECL
4075 ? DECL_INITIAL (decl)
4076 : TREE_CODE (decl) == CONSTRUCTOR
4077 ? decl : 0;
4078 int reloc = 0;
4079 bool named_section, readonly;
4080
4081 if (init && init != error_mark_node)
4082 reloc = compute_reloc_for_constant (init);
4083
4084 named_section = TREE_CODE (decl) == VAR_DECL
4085 && lookup_attribute ("section", DECL_ATTRIBUTES (decl));
4086 readonly = decl_readonly_section (decl, reloc);
4087
4088 if (named_section)
4089 unspec = R_FRV_GOT12;
4090 else if (!readonly)
4091 unspec = R_FRV_GOTOFF12;
4092 else if (readonly && TARGET_GPREL_RO)
4093 unspec = R_FRV_GPREL12;
4094 else
4095 unspec = R_FRV_GOT12;
4096 }
4097 else
4098 unspec = R_FRV_GOT12;
4099 }
4100 }
4101
4102 else if (SYMBOL_REF_SMALL_P (sym))
4103 base_regno = SDA_BASE_REG;
4104
4105 else if (flag_pic)
4106 base_regno = PIC_REGNO;
4107
4108 break;
4109 }
4110
4111 if (base_regno >= 0)
4112 {
4113 if (GET_CODE (sym) == SYMBOL_REF && SYMBOL_REF_SMALL_P (sym))
4114 emit_insn (gen_symGOTOFF2reg (dest, src,
4115 gen_rtx_REG (Pmode, base_regno),
4116 GEN_INT (R_FRV_GPREL12)));
4117 else
4118 emit_insn (gen_symGOTOFF2reg_hilo (dest, src,
4119 gen_rtx_REG (Pmode, base_regno),
4120 GEN_INT (R_FRV_GPREL12)));
4121 if (base_regno == PIC_REGNO)
4122 crtl->uses_pic_offset_table = TRUE;
4123 return TRUE;
4124 }
4125
4126 if (unspec)
4127 {
4128 rtx x;
4129
4130 /* Since OUR_FDPIC_REG is a pseudo register, we can't safely introduce
4131 new uses of it once reload has begun. */
4132 gcc_assert (!reload_in_progress && !reload_completed);
4133
4134 switch (unspec)
4135 {
4136 case R_FRV_GOTOFF12:
4137 if (!frv_small_data_reloc_p (sym, unspec))
4138 x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG,
4139 GEN_INT (unspec));
4140 else
4141 x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4142 break;
4143 case R_FRV_GPREL12:
4144 if (!frv_small_data_reloc_p (sym, unspec))
4145 x = gen_symGPREL2reg_hilo (dest, src, OUR_FDPIC_REG,
4146 GEN_INT (unspec));
4147 else
4148 x = gen_symGPREL2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4149 break;
4150 case R_FRV_FUNCDESC_GOTOFF12:
4151 if (flag_pic != 1)
4152 x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG,
4153 GEN_INT (unspec));
4154 else
4155 x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4156 break;
4157 default:
4158 if (flag_pic != 1)
4159 x = gen_symGOT2reg_hilo (dest, src, OUR_FDPIC_REG,
4160 GEN_INT (unspec));
4161 else
4162 x = gen_symGOT2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4163 break;
4164 }
4165 emit_insn (x);
4166 crtl->uses_pic_offset_table = TRUE;
4167 return TRUE;
4168 }
4169
4170
4171 return FALSE;
4172 }
4173
4174
4175 /* Return a string to output a single word move. */
4176
4177 const char *
output_move_single(rtx operands[],rtx insn)4178 output_move_single (rtx operands[], rtx insn)
4179 {
4180 rtx dest = operands[0];
4181 rtx src = operands[1];
4182
4183 if (GET_CODE (dest) == REG)
4184 {
4185 int dest_regno = REGNO (dest);
4186 machine_mode mode = GET_MODE (dest);
4187
4188 if (GPR_P (dest_regno))
4189 {
4190 if (GET_CODE (src) == REG)
4191 {
4192 /* gpr <- some sort of register */
4193 int src_regno = REGNO (src);
4194
4195 if (GPR_P (src_regno))
4196 return "mov %1, %0";
4197
4198 else if (FPR_P (src_regno))
4199 return "movfg %1, %0";
4200
4201 else if (SPR_P (src_regno))
4202 return "movsg %1, %0";
4203 }
4204
4205 else if (GET_CODE (src) == MEM)
4206 {
4207 /* gpr <- memory */
4208 switch (mode)
4209 {
4210 default:
4211 break;
4212
4213 case E_QImode:
4214 return "ldsb%I1%U1 %M1,%0";
4215
4216 case E_HImode:
4217 return "ldsh%I1%U1 %M1,%0";
4218
4219 case E_SImode:
4220 case E_SFmode:
4221 return "ld%I1%U1 %M1, %0";
4222 }
4223 }
4224
4225 else if (GET_CODE (src) == CONST_INT
4226 || GET_CODE (src) == CONST_DOUBLE)
4227 {
4228 /* gpr <- integer/floating constant */
4229 HOST_WIDE_INT value;
4230
4231 if (GET_CODE (src) == CONST_INT)
4232 value = INTVAL (src);
4233
4234 else if (mode == SFmode)
4235 {
4236 long l;
4237
4238 REAL_VALUE_TO_TARGET_SINGLE
4239 (*CONST_DOUBLE_REAL_VALUE (src), l);
4240 value = l;
4241 }
4242
4243 else
4244 value = CONST_DOUBLE_LOW (src);
4245
4246 if (IN_RANGE (value, -32768, 32767))
4247 return "setlos %1, %0";
4248
4249 return "#";
4250 }
4251
4252 else if (GET_CODE (src) == SYMBOL_REF
4253 || GET_CODE (src) == LABEL_REF
4254 || GET_CODE (src) == CONST)
4255 {
4256 return "#";
4257 }
4258 }
4259
4260 else if (FPR_P (dest_regno))
4261 {
4262 if (GET_CODE (src) == REG)
4263 {
4264 /* fpr <- some sort of register */
4265 int src_regno = REGNO (src);
4266
4267 if (GPR_P (src_regno))
4268 return "movgf %1, %0";
4269
4270 else if (FPR_P (src_regno))
4271 {
4272 if (TARGET_HARD_FLOAT)
4273 return "fmovs %1, %0";
4274 else
4275 return "mor %1, %1, %0";
4276 }
4277 }
4278
4279 else if (GET_CODE (src) == MEM)
4280 {
4281 /* fpr <- memory */
4282 switch (mode)
4283 {
4284 default:
4285 break;
4286
4287 case E_QImode:
4288 return "ldbf%I1%U1 %M1,%0";
4289
4290 case E_HImode:
4291 return "ldhf%I1%U1 %M1,%0";
4292
4293 case E_SImode:
4294 case E_SFmode:
4295 return "ldf%I1%U1 %M1, %0";
4296 }
4297 }
4298
4299 else if (ZERO_P (src))
4300 return "movgf %., %0";
4301 }
4302
4303 else if (SPR_P (dest_regno))
4304 {
4305 if (GET_CODE (src) == REG)
4306 {
4307 /* spr <- some sort of register */
4308 int src_regno = REGNO (src);
4309
4310 if (GPR_P (src_regno))
4311 return "movgs %1, %0";
4312 }
4313 else if (ZERO_P (src))
4314 return "movgs %., %0";
4315 }
4316 }
4317
4318 else if (GET_CODE (dest) == MEM)
4319 {
4320 if (GET_CODE (src) == REG)
4321 {
4322 int src_regno = REGNO (src);
4323 machine_mode mode = GET_MODE (dest);
4324
4325 if (GPR_P (src_regno))
4326 {
4327 switch (mode)
4328 {
4329 default:
4330 break;
4331
4332 case E_QImode:
4333 return "stb%I0%U0 %1, %M0";
4334
4335 case E_HImode:
4336 return "sth%I0%U0 %1, %M0";
4337
4338 case E_SImode:
4339 case E_SFmode:
4340 return "st%I0%U0 %1, %M0";
4341 }
4342 }
4343
4344 else if (FPR_P (src_regno))
4345 {
4346 switch (mode)
4347 {
4348 default:
4349 break;
4350
4351 case E_QImode:
4352 return "stbf%I0%U0 %1, %M0";
4353
4354 case E_HImode:
4355 return "sthf%I0%U0 %1, %M0";
4356
4357 case E_SImode:
4358 case E_SFmode:
4359 return "stf%I0%U0 %1, %M0";
4360 }
4361 }
4362 }
4363
4364 else if (ZERO_P (src))
4365 {
4366 switch (GET_MODE (dest))
4367 {
4368 default:
4369 break;
4370
4371 case E_QImode:
4372 return "stb%I0%U0 %., %M0";
4373
4374 case E_HImode:
4375 return "sth%I0%U0 %., %M0";
4376
4377 case E_SImode:
4378 case E_SFmode:
4379 return "st%I0%U0 %., %M0";
4380 }
4381 }
4382 }
4383
4384 fatal_insn ("bad output_move_single operand", insn);
4385 return "";
4386 }
4387
4388
4389 /* Return a string to output a double word move. */
4390
4391 const char *
output_move_double(rtx operands[],rtx insn)4392 output_move_double (rtx operands[], rtx insn)
4393 {
4394 rtx dest = operands[0];
4395 rtx src = operands[1];
4396 machine_mode mode = GET_MODE (dest);
4397
4398 if (GET_CODE (dest) == REG)
4399 {
4400 int dest_regno = REGNO (dest);
4401
4402 if (GPR_P (dest_regno))
4403 {
4404 if (GET_CODE (src) == REG)
4405 {
4406 /* gpr <- some sort of register */
4407 int src_regno = REGNO (src);
4408
4409 if (GPR_P (src_regno))
4410 return "#";
4411
4412 else if (FPR_P (src_regno))
4413 {
4414 if (((dest_regno - GPR_FIRST) & 1) == 0
4415 && ((src_regno - FPR_FIRST) & 1) == 0)
4416 return "movfgd %1, %0";
4417
4418 return "#";
4419 }
4420 }
4421
4422 else if (GET_CODE (src) == MEM)
4423 {
4424 /* gpr <- memory */
4425 if (dbl_memory_one_insn_operand (src, mode))
4426 return "ldd%I1%U1 %M1, %0";
4427
4428 return "#";
4429 }
4430
4431 else if (GET_CODE (src) == CONST_INT
4432 || GET_CODE (src) == CONST_DOUBLE)
4433 return "#";
4434 }
4435
4436 else if (FPR_P (dest_regno))
4437 {
4438 if (GET_CODE (src) == REG)
4439 {
4440 /* fpr <- some sort of register */
4441 int src_regno = REGNO (src);
4442
4443 if (GPR_P (src_regno))
4444 {
4445 if (((dest_regno - FPR_FIRST) & 1) == 0
4446 && ((src_regno - GPR_FIRST) & 1) == 0)
4447 return "movgfd %1, %0";
4448
4449 return "#";
4450 }
4451
4452 else if (FPR_P (src_regno))
4453 {
4454 if (TARGET_DOUBLE
4455 && ((dest_regno - FPR_FIRST) & 1) == 0
4456 && ((src_regno - FPR_FIRST) & 1) == 0)
4457 return "fmovd %1, %0";
4458
4459 return "#";
4460 }
4461 }
4462
4463 else if (GET_CODE (src) == MEM)
4464 {
4465 /* fpr <- memory */
4466 if (dbl_memory_one_insn_operand (src, mode))
4467 return "lddf%I1%U1 %M1, %0";
4468
4469 return "#";
4470 }
4471
4472 else if (ZERO_P (src))
4473 return "#";
4474 }
4475 }
4476
4477 else if (GET_CODE (dest) == MEM)
4478 {
4479 if (GET_CODE (src) == REG)
4480 {
4481 int src_regno = REGNO (src);
4482
4483 if (GPR_P (src_regno))
4484 {
4485 if (((src_regno - GPR_FIRST) & 1) == 0
4486 && dbl_memory_one_insn_operand (dest, mode))
4487 return "std%I0%U0 %1, %M0";
4488
4489 return "#";
4490 }
4491
4492 if (FPR_P (src_regno))
4493 {
4494 if (((src_regno - FPR_FIRST) & 1) == 0
4495 && dbl_memory_one_insn_operand (dest, mode))
4496 return "stdf%I0%U0 %1, %M0";
4497
4498 return "#";
4499 }
4500 }
4501
4502 else if (ZERO_P (src))
4503 {
4504 if (dbl_memory_one_insn_operand (dest, mode))
4505 return "std%I0%U0 %., %M0";
4506
4507 return "#";
4508 }
4509 }
4510
4511 fatal_insn ("bad output_move_double operand", insn);
4512 return "";
4513 }
4514
4515
4516 /* Return a string to output a single word conditional move.
4517 Operand0 -- EQ/NE of ccr register and 0
4518 Operand1 -- CCR register
4519 Operand2 -- destination
4520 Operand3 -- source */
4521
4522 const char *
output_condmove_single(rtx operands[],rtx insn)4523 output_condmove_single (rtx operands[], rtx insn)
4524 {
4525 rtx dest = operands[2];
4526 rtx src = operands[3];
4527
4528 if (GET_CODE (dest) == REG)
4529 {
4530 int dest_regno = REGNO (dest);
4531 machine_mode mode = GET_MODE (dest);
4532
4533 if (GPR_P (dest_regno))
4534 {
4535 if (GET_CODE (src) == REG)
4536 {
4537 /* gpr <- some sort of register */
4538 int src_regno = REGNO (src);
4539
4540 if (GPR_P (src_regno))
4541 return "cmov %z3, %2, %1, %e0";
4542
4543 else if (FPR_P (src_regno))
4544 return "cmovfg %3, %2, %1, %e0";
4545 }
4546
4547 else if (GET_CODE (src) == MEM)
4548 {
4549 /* gpr <- memory */
4550 switch (mode)
4551 {
4552 default:
4553 break;
4554
4555 case E_QImode:
4556 return "cldsb%I3%U3 %M3, %2, %1, %e0";
4557
4558 case E_HImode:
4559 return "cldsh%I3%U3 %M3, %2, %1, %e0";
4560
4561 case E_SImode:
4562 case E_SFmode:
4563 return "cld%I3%U3 %M3, %2, %1, %e0";
4564 }
4565 }
4566
4567 else if (ZERO_P (src))
4568 return "cmov %., %2, %1, %e0";
4569 }
4570
4571 else if (FPR_P (dest_regno))
4572 {
4573 if (GET_CODE (src) == REG)
4574 {
4575 /* fpr <- some sort of register */
4576 int src_regno = REGNO (src);
4577
4578 if (GPR_P (src_regno))
4579 return "cmovgf %3, %2, %1, %e0";
4580
4581 else if (FPR_P (src_regno))
4582 {
4583 if (TARGET_HARD_FLOAT)
4584 return "cfmovs %3,%2,%1,%e0";
4585 else
4586 return "cmor %3, %3, %2, %1, %e0";
4587 }
4588 }
4589
4590 else if (GET_CODE (src) == MEM)
4591 {
4592 /* fpr <- memory */
4593 if (mode == SImode || mode == SFmode)
4594 return "cldf%I3%U3 %M3, %2, %1, %e0";
4595 }
4596
4597 else if (ZERO_P (src))
4598 return "cmovgf %., %2, %1, %e0";
4599 }
4600 }
4601
4602 else if (GET_CODE (dest) == MEM)
4603 {
4604 if (GET_CODE (src) == REG)
4605 {
4606 int src_regno = REGNO (src);
4607 machine_mode mode = GET_MODE (dest);
4608
4609 if (GPR_P (src_regno))
4610 {
4611 switch (mode)
4612 {
4613 default:
4614 break;
4615
4616 case E_QImode:
4617 return "cstb%I2%U2 %3, %M2, %1, %e0";
4618
4619 case E_HImode:
4620 return "csth%I2%U2 %3, %M2, %1, %e0";
4621
4622 case E_SImode:
4623 case E_SFmode:
4624 return "cst%I2%U2 %3, %M2, %1, %e0";
4625 }
4626 }
4627
4628 else if (FPR_P (src_regno) && (mode == SImode || mode == SFmode))
4629 return "cstf%I2%U2 %3, %M2, %1, %e0";
4630 }
4631
4632 else if (ZERO_P (src))
4633 {
4634 machine_mode mode = GET_MODE (dest);
4635 switch (mode)
4636 {
4637 default:
4638 break;
4639
4640 case E_QImode:
4641 return "cstb%I2%U2 %., %M2, %1, %e0";
4642
4643 case E_HImode:
4644 return "csth%I2%U2 %., %M2, %1, %e0";
4645
4646 case E_SImode:
4647 case E_SFmode:
4648 return "cst%I2%U2 %., %M2, %1, %e0";
4649 }
4650 }
4651 }
4652
4653 fatal_insn ("bad output_condmove_single operand", insn);
4654 return "";
4655 }
4656
4657
4658 /* Emit the appropriate code to do a comparison, returning the register the
4659 comparison was done it. */
4660
4661 static rtx
frv_emit_comparison(enum rtx_code test,rtx op0,rtx op1)4662 frv_emit_comparison (enum rtx_code test, rtx op0, rtx op1)
4663 {
4664 machine_mode cc_mode;
4665 rtx cc_reg;
4666
4667 /* Floating point doesn't have comparison against a constant. */
4668 if (GET_MODE (op0) == CC_FPmode && GET_CODE (op1) != REG)
4669 op1 = force_reg (GET_MODE (op0), op1);
4670
4671 /* Possibly disable using anything but a fixed register in order to work
4672 around cse moving comparisons past function calls. */
4673 cc_mode = SELECT_CC_MODE (test, op0, op1);
4674 cc_reg = ((TARGET_ALLOC_CC)
4675 ? gen_reg_rtx (cc_mode)
4676 : gen_rtx_REG (cc_mode,
4677 (cc_mode == CC_FPmode) ? FCC_FIRST : ICC_FIRST));
4678
4679 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_COMPARE (cc_mode, op0, op1)));
4680
4681 return cc_reg;
4682 }
4683
4684
4685 /* Emit code for a conditional branch.
4686 XXX: I originally wanted to add a clobber of a CCR register to use in
4687 conditional execution, but that confuses the rest of the compiler. */
4688
4689 int
frv_emit_cond_branch(rtx operands[])4690 frv_emit_cond_branch (rtx operands[])
4691 {
4692 rtx test_rtx;
4693 rtx label_ref;
4694 rtx if_else;
4695 enum rtx_code test = GET_CODE (operands[0]);
4696 rtx cc_reg = frv_emit_comparison (test, operands[1], operands[2]);
4697 machine_mode cc_mode = GET_MODE (cc_reg);
4698
4699 /* Branches generate:
4700 (set (pc)
4701 (if_then_else (<test>, <cc_reg>, (const_int 0))
4702 (label_ref <branch_label>)
4703 (pc))) */
4704 label_ref = gen_rtx_LABEL_REF (VOIDmode, operands[3]);
4705 test_rtx = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx);
4706 if_else = gen_rtx_IF_THEN_ELSE (cc_mode, test_rtx, label_ref, pc_rtx);
4707 emit_jump_insn (gen_rtx_SET (pc_rtx, if_else));
4708 return TRUE;
4709 }
4710
4711
4712 /* Emit code to set a gpr to 1/0 based on a comparison. */
4713
4714 int
frv_emit_scc(rtx operands[])4715 frv_emit_scc (rtx operands[])
4716 {
4717 rtx set;
4718 rtx test_rtx;
4719 rtx clobber;
4720 rtx cr_reg;
4721 enum rtx_code test = GET_CODE (operands[1]);
4722 rtx cc_reg = frv_emit_comparison (test, operands[2], operands[3]);
4723
4724 /* SCC instructions generate:
4725 (parallel [(set <target> (<test>, <cc_reg>, (const_int 0))
4726 (clobber (<ccr_reg>))]) */
4727 test_rtx = gen_rtx_fmt_ee (test, SImode, cc_reg, const0_rtx);
4728 set = gen_rtx_SET (operands[0], test_rtx);
4729
4730 cr_reg = ((TARGET_ALLOC_CC)
4731 ? gen_reg_rtx (CC_CCRmode)
4732 : gen_rtx_REG (CC_CCRmode,
4733 ((GET_MODE (cc_reg) == CC_FPmode)
4734 ? FCR_FIRST
4735 : ICR_FIRST)));
4736
4737 clobber = gen_rtx_CLOBBER (VOIDmode, cr_reg);
4738 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
4739 return TRUE;
4740 }
4741
4742
4743 /* Split a SCC instruction into component parts, returning a SEQUENCE to hold
4744 the separate insns. */
4745
4746 rtx
frv_split_scc(rtx dest,rtx test,rtx cc_reg,rtx cr_reg,HOST_WIDE_INT value)4747 frv_split_scc (rtx dest, rtx test, rtx cc_reg, rtx cr_reg, HOST_WIDE_INT value)
4748 {
4749 rtx ret;
4750
4751 start_sequence ();
4752
4753 /* Set the appropriate CCR bit. */
4754 emit_insn (gen_rtx_SET (cr_reg,
4755 gen_rtx_fmt_ee (GET_CODE (test),
4756 GET_MODE (cr_reg),
4757 cc_reg,
4758 const0_rtx)));
4759
4760 /* Move the value into the destination. */
4761 emit_move_insn (dest, GEN_INT (value));
4762
4763 /* Move 0 into the destination if the test failed */
4764 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4765 gen_rtx_EQ (GET_MODE (cr_reg),
4766 cr_reg,
4767 const0_rtx),
4768 gen_rtx_SET (dest, const0_rtx)));
4769
4770 /* Finish up, return sequence. */
4771 ret = get_insns ();
4772 end_sequence ();
4773 return ret;
4774 }
4775
4776
4777 /* Emit the code for a conditional move, return TRUE if we could do the
4778 move. */
4779
4780 int
frv_emit_cond_move(rtx dest,rtx test_rtx,rtx src1,rtx src2)4781 frv_emit_cond_move (rtx dest, rtx test_rtx, rtx src1, rtx src2)
4782 {
4783 rtx set;
4784 rtx clobber_cc;
4785 rtx test2;
4786 rtx cr_reg;
4787 rtx if_rtx;
4788 enum rtx_code test = GET_CODE (test_rtx);
4789 rtx cc_reg = frv_emit_comparison (test,
4790 XEXP (test_rtx, 0), XEXP (test_rtx, 1));
4791 machine_mode cc_mode = GET_MODE (cc_reg);
4792
4793 /* Conditional move instructions generate:
4794 (parallel [(set <target>
4795 (if_then_else (<test> <cc_reg> (const_int 0))
4796 <src1>
4797 <src2>))
4798 (clobber (<ccr_reg>))]) */
4799
4800 /* Handle various cases of conditional move involving two constants. */
4801 if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT)
4802 {
4803 HOST_WIDE_INT value1 = INTVAL (src1);
4804 HOST_WIDE_INT value2 = INTVAL (src2);
4805
4806 /* Having 0 as one of the constants can be done by loading the other
4807 constant, and optionally moving in gr0. */
4808 if (value1 == 0 || value2 == 0)
4809 ;
4810
4811 /* If the first value is within an addi range and also the difference
4812 between the two fits in an addi's range, load up the difference, then
4813 conditionally move in 0, and then unconditionally add the first
4814 value. */
4815 else if (IN_RANGE (value1, -2048, 2047)
4816 && IN_RANGE (value2 - value1, -2048, 2047))
4817 ;
4818
4819 /* If neither condition holds, just force the constant into a
4820 register. */
4821 else
4822 {
4823 src1 = force_reg (GET_MODE (dest), src1);
4824 src2 = force_reg (GET_MODE (dest), src2);
4825 }
4826 }
4827
4828 /* If one value is a register, insure the other value is either 0 or a
4829 register. */
4830 else
4831 {
4832 if (GET_CODE (src1) == CONST_INT && INTVAL (src1) != 0)
4833 src1 = force_reg (GET_MODE (dest), src1);
4834
4835 if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0)
4836 src2 = force_reg (GET_MODE (dest), src2);
4837 }
4838
4839 test2 = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx);
4840 if_rtx = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), test2, src1, src2);
4841
4842 set = gen_rtx_SET (dest, if_rtx);
4843
4844 cr_reg = ((TARGET_ALLOC_CC)
4845 ? gen_reg_rtx (CC_CCRmode)
4846 : gen_rtx_REG (CC_CCRmode,
4847 (cc_mode == CC_FPmode) ? FCR_FIRST : ICR_FIRST));
4848
4849 clobber_cc = gen_rtx_CLOBBER (VOIDmode, cr_reg);
4850 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber_cc)));
4851 return TRUE;
4852 }
4853
4854
4855 /* Split a conditional move into constituent parts, returning a SEQUENCE
4856 containing all of the insns. */
4857
4858 rtx
frv_split_cond_move(rtx operands[])4859 frv_split_cond_move (rtx operands[])
4860 {
4861 rtx dest = operands[0];
4862 rtx test = operands[1];
4863 rtx cc_reg = operands[2];
4864 rtx src1 = operands[3];
4865 rtx src2 = operands[4];
4866 rtx cr_reg = operands[5];
4867 rtx ret;
4868 machine_mode cr_mode = GET_MODE (cr_reg);
4869
4870 start_sequence ();
4871
4872 /* Set the appropriate CCR bit. */
4873 emit_insn (gen_rtx_SET (cr_reg,
4874 gen_rtx_fmt_ee (GET_CODE (test),
4875 GET_MODE (cr_reg),
4876 cc_reg,
4877 const0_rtx)));
4878
4879 /* Handle various cases of conditional move involving two constants. */
4880 if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT)
4881 {
4882 HOST_WIDE_INT value1 = INTVAL (src1);
4883 HOST_WIDE_INT value2 = INTVAL (src2);
4884
4885 /* Having 0 as one of the constants can be done by loading the other
4886 constant, and optionally moving in gr0. */
4887 if (value1 == 0)
4888 {
4889 emit_move_insn (dest, src2);
4890 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4891 gen_rtx_NE (cr_mode, cr_reg,
4892 const0_rtx),
4893 gen_rtx_SET (dest, src1)));
4894 }
4895
4896 else if (value2 == 0)
4897 {
4898 emit_move_insn (dest, src1);
4899 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4900 gen_rtx_EQ (cr_mode, cr_reg,
4901 const0_rtx),
4902 gen_rtx_SET (dest, src2)));
4903 }
4904
4905 /* If the first value is within an addi range and also the difference
4906 between the two fits in an addi's range, load up the difference, then
4907 conditionally move in 0, and then unconditionally add the first
4908 value. */
4909 else if (IN_RANGE (value1, -2048, 2047)
4910 && IN_RANGE (value2 - value1, -2048, 2047))
4911 {
4912 rtx dest_si = ((GET_MODE (dest) == SImode)
4913 ? dest
4914 : gen_rtx_SUBREG (SImode, dest, 0));
4915
4916 emit_move_insn (dest_si, GEN_INT (value2 - value1));
4917 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4918 gen_rtx_NE (cr_mode, cr_reg,
4919 const0_rtx),
4920 gen_rtx_SET (dest_si, const0_rtx)));
4921 emit_insn (gen_addsi3 (dest_si, dest_si, src1));
4922 }
4923
4924 else
4925 gcc_unreachable ();
4926 }
4927 else
4928 {
4929 /* Emit the conditional move for the test being true if needed. */
4930 if (! rtx_equal_p (dest, src1))
4931 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4932 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
4933 gen_rtx_SET (dest, src1)));
4934
4935 /* Emit the conditional move for the test being false if needed. */
4936 if (! rtx_equal_p (dest, src2))
4937 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4938 gen_rtx_EQ (cr_mode, cr_reg, const0_rtx),
4939 gen_rtx_SET (dest, src2)));
4940 }
4941
4942 /* Finish up, return sequence. */
4943 ret = get_insns ();
4944 end_sequence ();
4945 return ret;
4946 }
4947
4948
4949 /* Split (set DEST SOURCE), where DEST is a double register and SOURCE is a
4950 memory location that is not known to be dword-aligned. */
4951 void
frv_split_double_load(rtx dest,rtx source)4952 frv_split_double_load (rtx dest, rtx source)
4953 {
4954 int regno = REGNO (dest);
4955 rtx dest1 = gen_highpart (SImode, dest);
4956 rtx dest2 = gen_lowpart (SImode, dest);
4957 rtx address = XEXP (source, 0);
4958
4959 /* If the address is pre-modified, load the lower-numbered register
4960 first, then load the other register using an integer offset from
4961 the modified base register. This order should always be safe,
4962 since the pre-modification cannot affect the same registers as the
4963 load does.
4964
4965 The situation for other loads is more complicated. Loading one
4966 of the registers could affect the value of ADDRESS, so we must
4967 be careful which order we do them in. */
4968 if (GET_CODE (address) == PRE_MODIFY
4969 || ! refers_to_regno_p (regno, address))
4970 {
4971 /* It is safe to load the lower-numbered register first. */
4972 emit_move_insn (dest1, change_address (source, SImode, NULL));
4973 emit_move_insn (dest2, frv_index_memory (source, SImode, 1));
4974 }
4975 else
4976 {
4977 /* ADDRESS is not pre-modified and the address depends on the
4978 lower-numbered register. Load the higher-numbered register
4979 first. */
4980 emit_move_insn (dest2, frv_index_memory (source, SImode, 1));
4981 emit_move_insn (dest1, change_address (source, SImode, NULL));
4982 }
4983 }
4984
4985 /* Split (set DEST SOURCE), where DEST refers to a dword memory location
4986 and SOURCE is either a double register or the constant zero. */
4987 void
frv_split_double_store(rtx dest,rtx source)4988 frv_split_double_store (rtx dest, rtx source)
4989 {
4990 rtx dest1 = change_address (dest, SImode, NULL);
4991 rtx dest2 = frv_index_memory (dest, SImode, 1);
4992 if (ZERO_P (source))
4993 {
4994 emit_move_insn (dest1, CONST0_RTX (SImode));
4995 emit_move_insn (dest2, CONST0_RTX (SImode));
4996 }
4997 else
4998 {
4999 emit_move_insn (dest1, gen_highpart (SImode, source));
5000 emit_move_insn (dest2, gen_lowpart (SImode, source));
5001 }
5002 }
5003
5004
5005 /* Split a min/max operation returning a SEQUENCE containing all of the
5006 insns. */
5007
5008 rtx
frv_split_minmax(rtx operands[])5009 frv_split_minmax (rtx operands[])
5010 {
5011 rtx dest = operands[0];
5012 rtx minmax = operands[1];
5013 rtx src1 = operands[2];
5014 rtx src2 = operands[3];
5015 rtx cc_reg = operands[4];
5016 rtx cr_reg = operands[5];
5017 rtx ret;
5018 enum rtx_code test_code;
5019 machine_mode cr_mode = GET_MODE (cr_reg);
5020
5021 start_sequence ();
5022
5023 /* Figure out which test to use. */
5024 switch (GET_CODE (minmax))
5025 {
5026 default:
5027 gcc_unreachable ();
5028
5029 case SMIN: test_code = LT; break;
5030 case SMAX: test_code = GT; break;
5031 case UMIN: test_code = LTU; break;
5032 case UMAX: test_code = GTU; break;
5033 }
5034
5035 /* Issue the compare instruction. */
5036 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_COMPARE (GET_MODE (cc_reg),
5037 src1, src2)));
5038
5039 /* Set the appropriate CCR bit. */
5040 emit_insn (gen_rtx_SET (cr_reg, gen_rtx_fmt_ee (test_code,
5041 GET_MODE (cr_reg),
5042 cc_reg,
5043 const0_rtx)));
5044
5045 /* If are taking the min/max of a nonzero constant, load that first, and
5046 then do a conditional move of the other value. */
5047 if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0)
5048 {
5049 gcc_assert (!rtx_equal_p (dest, src1));
5050
5051 emit_move_insn (dest, src2);
5052 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5053 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5054 gen_rtx_SET (dest, src1)));
5055 }
5056
5057 /* Otherwise, do each half of the move. */
5058 else
5059 {
5060 /* Emit the conditional move for the test being true if needed. */
5061 if (! rtx_equal_p (dest, src1))
5062 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5063 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5064 gen_rtx_SET (dest, src1)));
5065
5066 /* Emit the conditional move for the test being false if needed. */
5067 if (! rtx_equal_p (dest, src2))
5068 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5069 gen_rtx_EQ (cr_mode, cr_reg, const0_rtx),
5070 gen_rtx_SET (dest, src2)));
5071 }
5072
5073 /* Finish up, return sequence. */
5074 ret = get_insns ();
5075 end_sequence ();
5076 return ret;
5077 }
5078
5079
5080 /* Split an integer abs operation returning a SEQUENCE containing all of the
5081 insns. */
5082
5083 rtx
frv_split_abs(rtx operands[])5084 frv_split_abs (rtx operands[])
5085 {
5086 rtx dest = operands[0];
5087 rtx src = operands[1];
5088 rtx cc_reg = operands[2];
5089 rtx cr_reg = operands[3];
5090 rtx ret;
5091
5092 start_sequence ();
5093
5094 /* Issue the compare < 0 instruction. */
5095 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_COMPARE (CCmode, src, const0_rtx)));
5096
5097 /* Set the appropriate CCR bit. */
5098 emit_insn (gen_rtx_SET (cr_reg, gen_rtx_fmt_ee (LT, CC_CCRmode,
5099 cc_reg, const0_rtx)));
5100
5101 /* Emit the conditional negate if the value is negative. */
5102 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5103 gen_rtx_NE (CC_CCRmode, cr_reg, const0_rtx),
5104 gen_negsi2 (dest, src)));
5105
5106 /* Emit the conditional move for the test being false if needed. */
5107 if (! rtx_equal_p (dest, src))
5108 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5109 gen_rtx_EQ (CC_CCRmode, cr_reg, const0_rtx),
5110 gen_rtx_SET (dest, src)));
5111
5112 /* Finish up, return sequence. */
5113 ret = get_insns ();
5114 end_sequence ();
5115 return ret;
5116 }
5117
5118
5119 /* Initialize machine-specific if-conversion data.
5120 On the FR-V, we don't have any extra fields per se, but it is useful hook to
5121 initialize the static storage. */
5122 void
frv_ifcvt_machdep_init(void * ce_info ATTRIBUTE_UNUSED)5123 frv_ifcvt_machdep_init (void *ce_info ATTRIBUTE_UNUSED)
5124 {
5125 frv_ifcvt.added_insns_list = NULL_RTX;
5126 frv_ifcvt.cur_scratch_regs = 0;
5127 frv_ifcvt.num_nested_cond_exec = 0;
5128 frv_ifcvt.cr_reg = NULL_RTX;
5129 frv_ifcvt.nested_cc_reg = NULL_RTX;
5130 frv_ifcvt.extra_int_cr = NULL_RTX;
5131 frv_ifcvt.extra_fp_cr = NULL_RTX;
5132 frv_ifcvt.last_nested_if_cr = NULL_RTX;
5133 }
5134
5135
5136 /* Internal function to add a potential insn to the list of insns to be inserted
5137 if the conditional execution conversion is successful. */
5138
5139 static void
frv_ifcvt_add_insn(rtx pattern,rtx_insn * insn,int before_p)5140 frv_ifcvt_add_insn (rtx pattern, rtx_insn *insn, int before_p)
5141 {
5142 rtx link = alloc_EXPR_LIST (VOIDmode, pattern, insn);
5143
5144 link->jump = before_p; /* Mark to add this before or after insn. */
5145 frv_ifcvt.added_insns_list = alloc_EXPR_LIST (VOIDmode, link,
5146 frv_ifcvt.added_insns_list);
5147
5148 if (TARGET_DEBUG_COND_EXEC)
5149 {
5150 fprintf (stderr,
5151 "\n:::::::::: frv_ifcvt_add_insn: add the following %s insn %d:\n",
5152 (before_p) ? "before" : "after",
5153 (int)INSN_UID (insn));
5154
5155 debug_rtx (pattern);
5156 }
5157 }
5158
5159
5160 /* A C expression to modify the code described by the conditional if
5161 information CE_INFO, possibly updating the tests in TRUE_EXPR, and
5162 FALSE_EXPR for converting if-then and if-then-else code to conditional
5163 instructions. Set either TRUE_EXPR or FALSE_EXPR to a null pointer if the
5164 tests cannot be converted. */
5165
5166 void
frv_ifcvt_modify_tests(ce_if_block * ce_info,rtx * p_true,rtx * p_false)5167 frv_ifcvt_modify_tests (ce_if_block *ce_info, rtx *p_true, rtx *p_false)
5168 {
5169 basic_block test_bb = ce_info->test_bb; /* test basic block */
5170 basic_block then_bb = ce_info->then_bb; /* THEN */
5171 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
5172 basic_block join_bb = ce_info->join_bb; /* join block or NULL */
5173 rtx true_expr = *p_true;
5174 rtx cr;
5175 rtx cc;
5176 rtx nested_cc;
5177 machine_mode mode = GET_MODE (true_expr);
5178 int j;
5179 basic_block *bb;
5180 int num_bb;
5181 frv_tmp_reg_t *tmp_reg = &frv_ifcvt.tmp_reg;
5182 rtx check_insn;
5183 rtx sub_cond_exec_reg;
5184 enum rtx_code code;
5185 enum rtx_code code_true;
5186 enum rtx_code code_false;
5187 enum reg_class cc_class;
5188 enum reg_class cr_class;
5189 int cc_first;
5190 int cc_last;
5191 reg_set_iterator rsi;
5192
5193 /* Make sure we are only dealing with hard registers. Also honor the
5194 -mno-cond-exec switch, and -mno-nested-cond-exec switches if
5195 applicable. */
5196 if (!reload_completed || !TARGET_COND_EXEC
5197 || (!TARGET_NESTED_CE && ce_info->pass > 1))
5198 goto fail;
5199
5200 /* Figure out which registers we can allocate for our own purposes. Only
5201 consider registers that are not preserved across function calls and are
5202 not fixed. However, allow the ICC/ICR temporary registers to be allocated
5203 if we did not need to use them in reloading other registers. */
5204 memset (&tmp_reg->regs, 0, sizeof (tmp_reg->regs));
5205 tmp_reg->regs = regs_invalidated_by_call & ~fixed_reg_set;
5206 SET_HARD_REG_BIT (tmp_reg->regs, ICC_TEMP);
5207 SET_HARD_REG_BIT (tmp_reg->regs, ICR_TEMP);
5208
5209 /* If this is a nested IF, we need to discover whether the CC registers that
5210 are set/used inside of the block are used anywhere else. If not, we can
5211 change them to be the CC register that is paired with the CR register that
5212 controls the outermost IF block. */
5213 if (ce_info->pass > 1)
5214 {
5215 CLEAR_HARD_REG_SET (frv_ifcvt.nested_cc_ok_rewrite);
5216 for (j = CC_FIRST; j <= CC_LAST; j++)
5217 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5218 {
5219 if (REGNO_REG_SET_P (df_get_live_in (then_bb), j))
5220 continue;
5221
5222 if (else_bb
5223 && REGNO_REG_SET_P (df_get_live_in (else_bb), j))
5224 continue;
5225
5226 if (join_bb
5227 && REGNO_REG_SET_P (df_get_live_in (join_bb), j))
5228 continue;
5229
5230 SET_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j);
5231 }
5232 }
5233
5234 for (j = 0; j < frv_ifcvt.cur_scratch_regs; j++)
5235 frv_ifcvt.scratch_regs[j] = NULL_RTX;
5236
5237 frv_ifcvt.added_insns_list = NULL_RTX;
5238 frv_ifcvt.cur_scratch_regs = 0;
5239
5240 bb = (basic_block *) alloca ((2 + ce_info->num_multiple_test_blocks)
5241 * sizeof (basic_block));
5242
5243 if (join_bb)
5244 {
5245 unsigned int regno;
5246
5247 /* Remove anything live at the beginning of the join block from being
5248 available for allocation. */
5249 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (join_bb), 0, regno, rsi)
5250 {
5251 if (regno < FIRST_PSEUDO_REGISTER)
5252 CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
5253 }
5254 }
5255
5256 /* Add in all of the blocks in multiple &&/|| blocks to be scanned. */
5257 num_bb = 0;
5258 if (ce_info->num_multiple_test_blocks)
5259 {
5260 basic_block multiple_test_bb = ce_info->last_test_bb;
5261
5262 while (multiple_test_bb != test_bb)
5263 {
5264 bb[num_bb++] = multiple_test_bb;
5265 multiple_test_bb = EDGE_PRED (multiple_test_bb, 0)->src;
5266 }
5267 }
5268
5269 /* Add in the THEN and ELSE blocks to be scanned. */
5270 bb[num_bb++] = then_bb;
5271 if (else_bb)
5272 bb[num_bb++] = else_bb;
5273
5274 sub_cond_exec_reg = NULL_RTX;
5275 frv_ifcvt.num_nested_cond_exec = 0;
5276
5277 /* Scan all of the blocks for registers that must not be allocated. */
5278 for (j = 0; j < num_bb; j++)
5279 {
5280 rtx_insn *last_insn = BB_END (bb[j]);
5281 rtx_insn *insn = BB_HEAD (bb[j]);
5282 unsigned int regno;
5283
5284 if (dump_file)
5285 fprintf (dump_file, "Scanning %s block %d, start %d, end %d\n",
5286 (bb[j] == else_bb) ? "else" : ((bb[j] == then_bb) ? "then" : "test"),
5287 (int) bb[j]->index,
5288 (int) INSN_UID (BB_HEAD (bb[j])),
5289 (int) INSN_UID (BB_END (bb[j])));
5290
5291 /* Anything live at the beginning of the block is obviously unavailable
5292 for allocation. */
5293 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (bb[j]), 0, regno, rsi)
5294 {
5295 if (regno < FIRST_PSEUDO_REGISTER)
5296 CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
5297 }
5298
5299 /* Loop through the insns in the block. */
5300 for (;;)
5301 {
5302 /* Mark any new registers that are created as being unavailable for
5303 allocation. Also see if the CC register used in nested IFs can be
5304 reallocated. */
5305 if (INSN_P (insn))
5306 {
5307 rtx pattern;
5308 rtx set;
5309 int skip_nested_if = FALSE;
5310 HARD_REG_SET mentioned_regs;
5311
5312 CLEAR_HARD_REG_SET (mentioned_regs);
5313 find_all_hard_regs (PATTERN (insn), &mentioned_regs);
5314 tmp_reg->regs &= ~mentioned_regs;
5315
5316 pattern = PATTERN (insn);
5317 if (GET_CODE (pattern) == COND_EXEC)
5318 {
5319 rtx reg = XEXP (COND_EXEC_TEST (pattern), 0);
5320
5321 if (reg != sub_cond_exec_reg)
5322 {
5323 sub_cond_exec_reg = reg;
5324 frv_ifcvt.num_nested_cond_exec++;
5325 }
5326 }
5327
5328 set = single_set_pattern (pattern);
5329 if (set)
5330 {
5331 rtx dest = SET_DEST (set);
5332 rtx src = SET_SRC (set);
5333
5334 if (GET_CODE (dest) == REG)
5335 {
5336 int regno = REGNO (dest);
5337 enum rtx_code src_code = GET_CODE (src);
5338
5339 if (CC_P (regno) && src_code == COMPARE)
5340 skip_nested_if = TRUE;
5341
5342 else if (CR_P (regno)
5343 && (src_code == IF_THEN_ELSE
5344 || COMPARISON_P (src)))
5345 skip_nested_if = TRUE;
5346 }
5347 }
5348
5349 if (! skip_nested_if)
5350 frv_ifcvt.nested_cc_ok_rewrite &= ~mentioned_regs;
5351 }
5352
5353 if (insn == last_insn)
5354 break;
5355
5356 insn = NEXT_INSN (insn);
5357 }
5358 }
5359
5360 /* If this is a nested if, rewrite the CC registers that are available to
5361 include the ones that can be rewritten, to increase the chance of being
5362 able to allocate a paired CC/CR register combination. */
5363 if (ce_info->pass > 1)
5364 {
5365 for (j = CC_FIRST; j <= CC_LAST; j++)
5366 if (TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j))
5367 SET_HARD_REG_BIT (tmp_reg->regs, j);
5368 else
5369 CLEAR_HARD_REG_BIT (tmp_reg->regs, j);
5370 }
5371
5372 if (dump_file)
5373 {
5374 int num_gprs = 0;
5375 fprintf (dump_file, "Available GPRs: ");
5376
5377 for (j = GPR_FIRST; j <= GPR_LAST; j++)
5378 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5379 {
5380 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5381 if (++num_gprs > GPR_TEMP_NUM+2)
5382 break;
5383 }
5384
5385 fprintf (dump_file, "%s\nAvailable CRs: ",
5386 (num_gprs > GPR_TEMP_NUM+2) ? " ..." : "");
5387
5388 for (j = CR_FIRST; j <= CR_LAST; j++)
5389 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5390 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5391
5392 fputs ("\n", dump_file);
5393
5394 if (ce_info->pass > 1)
5395 {
5396 fprintf (dump_file, "Modifiable CCs: ");
5397 for (j = CC_FIRST; j <= CC_LAST; j++)
5398 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5399 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5400
5401 fprintf (dump_file, "\n%d nested COND_EXEC statements\n",
5402 frv_ifcvt.num_nested_cond_exec);
5403 }
5404 }
5405
5406 /* Allocate the appropriate temporary condition code register. Try to
5407 allocate the ICR/FCR register that corresponds to the ICC/FCC register so
5408 that conditional cmp's can be done. */
5409 if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode)
5410 {
5411 cr_class = ICR_REGS;
5412 cc_class = ICC_REGS;
5413 cc_first = ICC_FIRST;
5414 cc_last = ICC_LAST;
5415 }
5416 else if (mode == CC_FPmode)
5417 {
5418 cr_class = FCR_REGS;
5419 cc_class = FCC_REGS;
5420 cc_first = FCC_FIRST;
5421 cc_last = FCC_LAST;
5422 }
5423 else
5424 {
5425 cc_first = cc_last = 0;
5426 cr_class = cc_class = NO_REGS;
5427 }
5428
5429 cc = XEXP (true_expr, 0);
5430 nested_cc = cr = NULL_RTX;
5431 if (cc_class != NO_REGS)
5432 {
5433 /* For nested IFs and &&/||, see if we can find a CC and CR register pair
5434 so we can execute a csubcc/caddcc/cfcmps instruction. */
5435 int cc_regno;
5436
5437 for (cc_regno = cc_first; cc_regno <= cc_last; cc_regno++)
5438 {
5439 int cr_regno = cc_regno - CC_FIRST + CR_FIRST;
5440
5441 if (TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cc_regno)
5442 && TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cr_regno))
5443 {
5444 frv_ifcvt.tmp_reg.next_reg[ (int)cr_class ] = cr_regno;
5445 cr = frv_alloc_temp_reg (tmp_reg, cr_class, CC_CCRmode, TRUE,
5446 TRUE);
5447
5448 frv_ifcvt.tmp_reg.next_reg[ (int)cc_class ] = cc_regno;
5449 nested_cc = frv_alloc_temp_reg (tmp_reg, cc_class, CCmode,
5450 TRUE, TRUE);
5451 break;
5452 }
5453 }
5454 }
5455
5456 if (! cr)
5457 {
5458 if (dump_file)
5459 fprintf (dump_file, "Could not allocate a CR temporary register\n");
5460
5461 goto fail;
5462 }
5463
5464 if (dump_file)
5465 fprintf (dump_file,
5466 "Will use %s for conditional execution, %s for nested comparisons\n",
5467 reg_names[ REGNO (cr)],
5468 (nested_cc) ? reg_names[ REGNO (nested_cc) ] : "<none>");
5469
5470 /* Set the CCR bit. Note for integer tests, we reverse the condition so that
5471 in an IF-THEN-ELSE sequence, we are testing the TRUE case against the CCR
5472 bit being true. We don't do this for floating point, because of NaNs. */
5473 code = GET_CODE (true_expr);
5474 if (GET_MODE (cc) != CC_FPmode)
5475 {
5476 code = reverse_condition (code);
5477 code_true = EQ;
5478 code_false = NE;
5479 }
5480 else
5481 {
5482 code_true = NE;
5483 code_false = EQ;
5484 }
5485
5486 check_insn = gen_rtx_SET (cr, gen_rtx_fmt_ee (code, CC_CCRmode,
5487 cc, const0_rtx));
5488
5489 /* Record the check insn to be inserted later. */
5490 frv_ifcvt_add_insn (check_insn, BB_END (test_bb), TRUE);
5491
5492 /* Update the tests. */
5493 frv_ifcvt.cr_reg = cr;
5494 frv_ifcvt.nested_cc_reg = nested_cc;
5495 *p_true = gen_rtx_fmt_ee (code_true, CC_CCRmode, cr, const0_rtx);
5496 *p_false = gen_rtx_fmt_ee (code_false, CC_CCRmode, cr, const0_rtx);
5497 return;
5498
5499 /* Fail, don't do this conditional execution. */
5500 fail:
5501 *p_true = NULL_RTX;
5502 *p_false = NULL_RTX;
5503 if (dump_file)
5504 fprintf (dump_file, "Disabling this conditional execution.\n");
5505
5506 return;
5507 }
5508
5509
5510 /* A C expression to modify the code described by the conditional if
5511 information CE_INFO, for the basic block BB, possibly updating the tests in
5512 TRUE_EXPR, and FALSE_EXPR for converting the && and || parts of if-then or
5513 if-then-else code to conditional instructions. Set either TRUE_EXPR or
5514 FALSE_EXPR to a null pointer if the tests cannot be converted. */
5515
5516 /* p_true and p_false are given expressions of the form:
5517
5518 (and (eq:CC_CCR (reg:CC_CCR)
5519 (const_int 0))
5520 (eq:CC (reg:CC)
5521 (const_int 0))) */
5522
5523 void
frv_ifcvt_modify_multiple_tests(ce_if_block * ce_info,basic_block bb,rtx * p_true,rtx * p_false)5524 frv_ifcvt_modify_multiple_tests (ce_if_block *ce_info,
5525 basic_block bb,
5526 rtx *p_true,
5527 rtx *p_false)
5528 {
5529 rtx old_true = XEXP (*p_true, 0);
5530 rtx old_false = XEXP (*p_false, 0);
5531 rtx true_expr = XEXP (*p_true, 1);
5532 rtx false_expr = XEXP (*p_false, 1);
5533 rtx test_expr;
5534 rtx old_test;
5535 rtx cr = XEXP (old_true, 0);
5536 rtx check_insn;
5537 rtx new_cr = NULL_RTX;
5538 rtx *p_new_cr = (rtx *)0;
5539 rtx if_else;
5540 rtx compare;
5541 rtx cc;
5542 enum reg_class cr_class;
5543 machine_mode mode = GET_MODE (true_expr);
5544 rtx (*logical_func)(rtx, rtx, rtx);
5545
5546 if (TARGET_DEBUG_COND_EXEC)
5547 {
5548 fprintf (stderr,
5549 "\n:::::::::: frv_ifcvt_modify_multiple_tests, before modification for %s\ntrue insn:\n",
5550 ce_info->and_and_p ? "&&" : "||");
5551
5552 debug_rtx (*p_true);
5553
5554 fputs ("\nfalse insn:\n", stderr);
5555 debug_rtx (*p_false);
5556 }
5557
5558 if (!TARGET_MULTI_CE)
5559 goto fail;
5560
5561 if (GET_CODE (cr) != REG)
5562 goto fail;
5563
5564 if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode)
5565 {
5566 cr_class = ICR_REGS;
5567 p_new_cr = &frv_ifcvt.extra_int_cr;
5568 }
5569 else if (mode == CC_FPmode)
5570 {
5571 cr_class = FCR_REGS;
5572 p_new_cr = &frv_ifcvt.extra_fp_cr;
5573 }
5574 else
5575 goto fail;
5576
5577 /* Allocate a temp CR, reusing a previously allocated temp CR if we have 3 or
5578 more &&/|| tests. */
5579 new_cr = *p_new_cr;
5580 if (! new_cr)
5581 {
5582 new_cr = *p_new_cr = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, cr_class,
5583 CC_CCRmode, TRUE, TRUE);
5584 if (! new_cr)
5585 goto fail;
5586 }
5587
5588 if (ce_info->and_and_p)
5589 {
5590 old_test = old_false;
5591 test_expr = true_expr;
5592 logical_func = (GET_CODE (old_true) == EQ) ? gen_andcr : gen_andncr;
5593 *p_true = gen_rtx_NE (CC_CCRmode, cr, const0_rtx);
5594 *p_false = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx);
5595 }
5596 else
5597 {
5598 old_test = old_false;
5599 test_expr = false_expr;
5600 logical_func = (GET_CODE (old_false) == EQ) ? gen_orcr : gen_orncr;
5601 *p_true = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx);
5602 *p_false = gen_rtx_NE (CC_CCRmode, cr, const0_rtx);
5603 }
5604
5605 /* First add the andcr/andncr/orcr/orncr, which will be added after the
5606 conditional check instruction, due to frv_ifcvt_add_insn being a LIFO
5607 stack. */
5608 frv_ifcvt_add_insn ((*logical_func) (cr, cr, new_cr), BB_END (bb), TRUE);
5609
5610 /* Now add the conditional check insn. */
5611 cc = XEXP (test_expr, 0);
5612 compare = gen_rtx_fmt_ee (GET_CODE (test_expr), CC_CCRmode, cc, const0_rtx);
5613 if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, old_test, compare, const0_rtx);
5614
5615 check_insn = gen_rtx_SET (new_cr, if_else);
5616
5617 /* Add the new check insn to the list of check insns that need to be
5618 inserted. */
5619 frv_ifcvt_add_insn (check_insn, BB_END (bb), TRUE);
5620
5621 if (TARGET_DEBUG_COND_EXEC)
5622 {
5623 fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, after modification\ntrue insn:\n",
5624 stderr);
5625
5626 debug_rtx (*p_true);
5627
5628 fputs ("\nfalse insn:\n", stderr);
5629 debug_rtx (*p_false);
5630 }
5631
5632 return;
5633
5634 fail:
5635 *p_true = *p_false = NULL_RTX;
5636
5637 /* If we allocated a CR register, release it. */
5638 if (new_cr)
5639 {
5640 CLEAR_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, REGNO (new_cr));
5641 *p_new_cr = NULL_RTX;
5642 }
5643
5644 if (TARGET_DEBUG_COND_EXEC)
5645 fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, failed.\n", stderr);
5646
5647 return;
5648 }
5649
5650
5651 /* Return a register which will be loaded with a value if an IF block is
5652 converted to conditional execution. This is used to rewrite instructions
5653 that use constants to ones that just use registers. */
5654
5655 static rtx
frv_ifcvt_load_value(rtx value,rtx insn ATTRIBUTE_UNUSED)5656 frv_ifcvt_load_value (rtx value, rtx insn ATTRIBUTE_UNUSED)
5657 {
5658 int num_alloc = frv_ifcvt.cur_scratch_regs;
5659 int i;
5660 rtx reg;
5661
5662 /* We know gr0 == 0, so replace any errant uses. */
5663 if (value == const0_rtx)
5664 return gen_rtx_REG (SImode, GPR_FIRST);
5665
5666 /* First search all registers currently loaded to see if we have an
5667 applicable constant. */
5668 if (CONSTANT_P (value)
5669 || (GET_CODE (value) == REG && REGNO (value) == LR_REGNO))
5670 {
5671 for (i = 0; i < num_alloc; i++)
5672 {
5673 if (rtx_equal_p (SET_SRC (frv_ifcvt.scratch_regs[i]), value))
5674 return SET_DEST (frv_ifcvt.scratch_regs[i]);
5675 }
5676 }
5677
5678 /* Have we exhausted the number of registers available? */
5679 if (num_alloc >= GPR_TEMP_NUM)
5680 {
5681 if (dump_file)
5682 fprintf (dump_file, "Too many temporary registers allocated\n");
5683
5684 return NULL_RTX;
5685 }
5686
5687 /* Allocate the new register. */
5688 reg = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, GPR_REGS, SImode, TRUE, TRUE);
5689 if (! reg)
5690 {
5691 if (dump_file)
5692 fputs ("Could not find a scratch register\n", dump_file);
5693
5694 return NULL_RTX;
5695 }
5696
5697 frv_ifcvt.cur_scratch_regs++;
5698 frv_ifcvt.scratch_regs[num_alloc] = gen_rtx_SET (reg, value);
5699
5700 if (dump_file)
5701 {
5702 if (GET_CODE (value) == CONST_INT)
5703 fprintf (dump_file, "Register %s will hold %ld\n",
5704 reg_names[ REGNO (reg)], (long)INTVAL (value));
5705
5706 else if (GET_CODE (value) == REG && REGNO (value) == LR_REGNO)
5707 fprintf (dump_file, "Register %s will hold LR\n",
5708 reg_names[ REGNO (reg)]);
5709
5710 else
5711 fprintf (dump_file, "Register %s will hold a saved value\n",
5712 reg_names[ REGNO (reg)]);
5713 }
5714
5715 return reg;
5716 }
5717
5718
5719 /* Update a MEM used in conditional code that might contain an offset to put
5720 the offset into a scratch register, so that the conditional load/store
5721 operations can be used. This function returns the original pointer if the
5722 MEM is valid to use in conditional code, NULL if we can't load up the offset
5723 into a temporary register, or the new MEM if we were successful. */
5724
5725 static rtx
frv_ifcvt_rewrite_mem(rtx mem,machine_mode mode,rtx insn)5726 frv_ifcvt_rewrite_mem (rtx mem, machine_mode mode, rtx insn)
5727 {
5728 rtx addr = XEXP (mem, 0);
5729
5730 if (!frv_legitimate_address_p_1 (mode, addr, reload_completed, TRUE, FALSE))
5731 {
5732 if (GET_CODE (addr) == PLUS)
5733 {
5734 rtx addr_op0 = XEXP (addr, 0);
5735 rtx addr_op1 = XEXP (addr, 1);
5736
5737 if (GET_CODE (addr_op0) == REG && CONSTANT_P (addr_op1))
5738 {
5739 rtx reg = frv_ifcvt_load_value (addr_op1, insn);
5740 if (!reg)
5741 return NULL_RTX;
5742
5743 addr = gen_rtx_PLUS (Pmode, addr_op0, reg);
5744 }
5745
5746 else
5747 return NULL_RTX;
5748 }
5749
5750 else if (CONSTANT_P (addr))
5751 addr = frv_ifcvt_load_value (addr, insn);
5752
5753 else
5754 return NULL_RTX;
5755
5756 if (addr == NULL_RTX)
5757 return NULL_RTX;
5758
5759 else if (XEXP (mem, 0) != addr)
5760 return change_address (mem, mode, addr);
5761 }
5762
5763 return mem;
5764 }
5765
5766
5767 /* Given a PATTERN, return a SET expression if this PATTERN has only a single
5768 SET, possibly conditionally executed. It may also have CLOBBERs, USEs. */
5769
5770 static rtx
single_set_pattern(rtx pattern)5771 single_set_pattern (rtx pattern)
5772 {
5773 rtx set;
5774 int i;
5775
5776 if (GET_CODE (pattern) == COND_EXEC)
5777 pattern = COND_EXEC_CODE (pattern);
5778
5779 if (GET_CODE (pattern) == SET)
5780 return pattern;
5781
5782 else if (GET_CODE (pattern) == PARALLEL)
5783 {
5784 for (i = 0, set = 0; i < XVECLEN (pattern, 0); i++)
5785 {
5786 rtx sub = XVECEXP (pattern, 0, i);
5787
5788 switch (GET_CODE (sub))
5789 {
5790 case USE:
5791 case CLOBBER:
5792 break;
5793
5794 case SET:
5795 if (set)
5796 return 0;
5797 else
5798 set = sub;
5799 break;
5800
5801 default:
5802 return 0;
5803 }
5804 }
5805 return set;
5806 }
5807
5808 return 0;
5809 }
5810
5811
5812 /* A C expression to modify the code described by the conditional if
5813 information CE_INFO with the new PATTERN in INSN. If PATTERN is a null
5814 pointer after the IFCVT_MODIFY_INSN macro executes, it is assumed that that
5815 insn cannot be converted to be executed conditionally. */
5816
5817 rtx
frv_ifcvt_modify_insn(ce_if_block * ce_info,rtx pattern,rtx_insn * insn)5818 frv_ifcvt_modify_insn (ce_if_block *ce_info,
5819 rtx pattern,
5820 rtx_insn *insn)
5821 {
5822 rtx orig_ce_pattern = pattern;
5823 rtx set;
5824 rtx op0;
5825 rtx op1;
5826 rtx test;
5827
5828 gcc_assert (GET_CODE (pattern) == COND_EXEC);
5829
5830 test = COND_EXEC_TEST (pattern);
5831 if (GET_CODE (test) == AND)
5832 {
5833 rtx cr = frv_ifcvt.cr_reg;
5834 rtx test_reg;
5835
5836 op0 = XEXP (test, 0);
5837 if (! rtx_equal_p (cr, XEXP (op0, 0)))
5838 goto fail;
5839
5840 op1 = XEXP (test, 1);
5841 test_reg = XEXP (op1, 0);
5842 if (GET_CODE (test_reg) != REG)
5843 goto fail;
5844
5845 /* Is this the first nested if block in this sequence? If so, generate
5846 an andcr or andncr. */
5847 if (! frv_ifcvt.last_nested_if_cr)
5848 {
5849 rtx and_op;
5850
5851 frv_ifcvt.last_nested_if_cr = test_reg;
5852 if (GET_CODE (op0) == NE)
5853 and_op = gen_andcr (test_reg, cr, test_reg);
5854 else
5855 and_op = gen_andncr (test_reg, cr, test_reg);
5856
5857 frv_ifcvt_add_insn (and_op, insn, TRUE);
5858 }
5859
5860 /* If this isn't the first statement in the nested if sequence, see if we
5861 are dealing with the same register. */
5862 else if (! rtx_equal_p (test_reg, frv_ifcvt.last_nested_if_cr))
5863 goto fail;
5864
5865 COND_EXEC_TEST (pattern) = test = op1;
5866 }
5867
5868 /* If this isn't a nested if, reset state variables. */
5869 else
5870 {
5871 frv_ifcvt.last_nested_if_cr = NULL_RTX;
5872 }
5873
5874 set = single_set_pattern (pattern);
5875 if (set)
5876 {
5877 rtx dest = SET_DEST (set);
5878 rtx src = SET_SRC (set);
5879 machine_mode mode = GET_MODE (dest);
5880
5881 /* Check for normal binary operators. */
5882 if (mode == SImode && ARITHMETIC_P (src))
5883 {
5884 op0 = XEXP (src, 0);
5885 op1 = XEXP (src, 1);
5886
5887 if (integer_register_operand (op0, SImode) && CONSTANT_P (op1))
5888 {
5889 op1 = frv_ifcvt_load_value (op1, insn);
5890 if (op1)
5891 COND_EXEC_CODE (pattern)
5892 = gen_rtx_SET (dest, gen_rtx_fmt_ee (GET_CODE (src),
5893 GET_MODE (src),
5894 op0, op1));
5895 else
5896 goto fail;
5897 }
5898 }
5899
5900 /* For multiply by a constant, we need to handle the sign extending
5901 correctly. Add a USE of the value after the multiply to prevent flow
5902 from cratering because only one register out of the two were used. */
5903 else if (mode == DImode && GET_CODE (src) == MULT)
5904 {
5905 op0 = XEXP (src, 0);
5906 op1 = XEXP (src, 1);
5907 if (GET_CODE (op0) == SIGN_EXTEND && GET_CODE (op1) == CONST_INT)
5908 {
5909 op1 = frv_ifcvt_load_value (op1, insn);
5910 if (op1)
5911 {
5912 op1 = gen_rtx_SIGN_EXTEND (DImode, op1);
5913 COND_EXEC_CODE (pattern)
5914 = gen_rtx_SET (dest, gen_rtx_MULT (DImode, op0, op1));
5915 }
5916 else
5917 goto fail;
5918 }
5919
5920 frv_ifcvt_add_insn (gen_use (dest), insn, FALSE);
5921 }
5922
5923 /* If we are just loading a constant created for a nested conditional
5924 execution statement, just load the constant without any conditional
5925 execution, since we know that the constant will not interfere with any
5926 other registers. */
5927 else if (frv_ifcvt.scratch_insns_bitmap
5928 && bitmap_bit_p (frv_ifcvt.scratch_insns_bitmap,
5929 INSN_UID (insn))
5930 && REG_P (SET_DEST (set))
5931 /* We must not unconditionally set a scratch reg chosen
5932 for a nested if-converted block if its incoming
5933 value from the TEST block (or the result of the THEN
5934 branch) could/should propagate to the JOIN block.
5935 It suffices to test whether the register is live at
5936 the JOIN point: if it's live there, we can infer
5937 that we set it in the former JOIN block of the
5938 nested if-converted block (otherwise it wouldn't
5939 have been available as a scratch register), and it
5940 is either propagated through or set in the other
5941 conditional block. It's probably not worth trying
5942 to catch the latter case, and it could actually
5943 limit scheduling of the combined block quite
5944 severely. */
5945 && ce_info->join_bb
5946 && ! (REGNO_REG_SET_P (df_get_live_in (ce_info->join_bb),
5947 REGNO (SET_DEST (set))))
5948 /* Similarly, we must not unconditionally set a reg
5949 used as scratch in the THEN branch if the same reg
5950 is live in the ELSE branch. */
5951 && (! ce_info->else_bb
5952 || BLOCK_FOR_INSN (insn) == ce_info->else_bb
5953 || ! (REGNO_REG_SET_P (df_get_live_in (ce_info->else_bb),
5954 REGNO (SET_DEST (set))))))
5955 pattern = set;
5956
5957 else if (mode == QImode || mode == HImode || mode == SImode
5958 || mode == SFmode)
5959 {
5960 int changed_p = FALSE;
5961
5962 /* Check for just loading up a constant */
5963 if (CONSTANT_P (src) && integer_register_operand (dest, mode))
5964 {
5965 src = frv_ifcvt_load_value (src, insn);
5966 if (!src)
5967 goto fail;
5968
5969 changed_p = TRUE;
5970 }
5971
5972 /* See if we need to fix up stores */
5973 if (GET_CODE (dest) == MEM)
5974 {
5975 rtx new_mem = frv_ifcvt_rewrite_mem (dest, mode, insn);
5976
5977 if (!new_mem)
5978 goto fail;
5979
5980 else if (new_mem != dest)
5981 {
5982 changed_p = TRUE;
5983 dest = new_mem;
5984 }
5985 }
5986
5987 /* See if we need to fix up loads */
5988 if (GET_CODE (src) == MEM)
5989 {
5990 rtx new_mem = frv_ifcvt_rewrite_mem (src, mode, insn);
5991
5992 if (!new_mem)
5993 goto fail;
5994
5995 else if (new_mem != src)
5996 {
5997 changed_p = TRUE;
5998 src = new_mem;
5999 }
6000 }
6001
6002 /* If either src or destination changed, redo SET. */
6003 if (changed_p)
6004 COND_EXEC_CODE (pattern) = gen_rtx_SET (dest, src);
6005 }
6006
6007 /* Rewrite a nested set cccr in terms of IF_THEN_ELSE. Also deal with
6008 rewriting the CC register to be the same as the paired CC/CR register
6009 for nested ifs. */
6010 else if (mode == CC_CCRmode && COMPARISON_P (src))
6011 {
6012 int regno = REGNO (XEXP (src, 0));
6013 rtx if_else;
6014
6015 if (ce_info->pass > 1
6016 && regno != (int)REGNO (frv_ifcvt.nested_cc_reg)
6017 && TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, regno))
6018 {
6019 src = gen_rtx_fmt_ee (GET_CODE (src),
6020 CC_CCRmode,
6021 frv_ifcvt.nested_cc_reg,
6022 XEXP (src, 1));
6023 }
6024
6025 if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, test, src, const0_rtx);
6026 pattern = gen_rtx_SET (dest, if_else);
6027 }
6028
6029 /* Remap a nested compare instruction to use the paired CC/CR reg. */
6030 else if (ce_info->pass > 1
6031 && GET_CODE (dest) == REG
6032 && CC_P (REGNO (dest))
6033 && REGNO (dest) != REGNO (frv_ifcvt.nested_cc_reg)
6034 && TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite,
6035 REGNO (dest))
6036 && GET_CODE (src) == COMPARE)
6037 {
6038 PUT_MODE (frv_ifcvt.nested_cc_reg, GET_MODE (dest));
6039 COND_EXEC_CODE (pattern)
6040 = gen_rtx_SET (frv_ifcvt.nested_cc_reg, copy_rtx (src));
6041 }
6042 }
6043
6044 if (TARGET_DEBUG_COND_EXEC)
6045 {
6046 rtx orig_pattern = PATTERN (insn);
6047
6048 PATTERN (insn) = pattern;
6049 fprintf (stderr,
6050 "\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn after modification:\n",
6051 ce_info->pass);
6052
6053 debug_rtx (insn);
6054 PATTERN (insn) = orig_pattern;
6055 }
6056
6057 return pattern;
6058
6059 fail:
6060 if (TARGET_DEBUG_COND_EXEC)
6061 {
6062 rtx orig_pattern = PATTERN (insn);
6063
6064 PATTERN (insn) = orig_ce_pattern;
6065 fprintf (stderr,
6066 "\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn could not be modified:\n",
6067 ce_info->pass);
6068
6069 debug_rtx (insn);
6070 PATTERN (insn) = orig_pattern;
6071 }
6072
6073 return NULL_RTX;
6074 }
6075
6076
6077 /* A C expression to perform any final machine dependent modifications in
6078 converting code to conditional execution in the code described by the
6079 conditional if information CE_INFO. */
6080
6081 void
frv_ifcvt_modify_final(ce_if_block * ce_info ATTRIBUTE_UNUSED)6082 frv_ifcvt_modify_final (ce_if_block *ce_info ATTRIBUTE_UNUSED)
6083 {
6084 rtx_insn *existing_insn;
6085 rtx check_insn;
6086 rtx p = frv_ifcvt.added_insns_list;
6087 int i;
6088
6089 /* Loop inserting the check insns. The last check insn is the first test,
6090 and is the appropriate place to insert constants. */
6091 gcc_assert (p);
6092
6093 do
6094 {
6095 rtx check_and_insert_insns = XEXP (p, 0);
6096 rtx old_p = p;
6097
6098 check_insn = XEXP (check_and_insert_insns, 0);
6099 existing_insn = as_a <rtx_insn *> (XEXP (check_and_insert_insns, 1));
6100 p = XEXP (p, 1);
6101
6102 /* The jump bit is used to say that the new insn is to be inserted BEFORE
6103 the existing insn, otherwise it is to be inserted AFTER. */
6104 if (check_and_insert_insns->jump)
6105 {
6106 emit_insn_before (check_insn, existing_insn);
6107 check_and_insert_insns->jump = 0;
6108 }
6109 else
6110 emit_insn_after (check_insn, existing_insn);
6111
6112 free_EXPR_LIST_node (check_and_insert_insns);
6113 free_EXPR_LIST_node (old_p);
6114 }
6115 while (p != NULL_RTX);
6116
6117 /* Load up any constants needed into temp gprs */
6118 for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++)
6119 {
6120 rtx_insn *insn = emit_insn_before (frv_ifcvt.scratch_regs[i], existing_insn);
6121 if (! frv_ifcvt.scratch_insns_bitmap)
6122 frv_ifcvt.scratch_insns_bitmap = BITMAP_ALLOC (NULL);
6123 bitmap_set_bit (frv_ifcvt.scratch_insns_bitmap, INSN_UID (insn));
6124 frv_ifcvt.scratch_regs[i] = NULL_RTX;
6125 }
6126
6127 frv_ifcvt.added_insns_list = NULL_RTX;
6128 frv_ifcvt.cur_scratch_regs = 0;
6129 }
6130
6131
6132 /* A C expression to cancel any machine dependent modifications in converting
6133 code to conditional execution in the code described by the conditional if
6134 information CE_INFO. */
6135
6136 void
frv_ifcvt_modify_cancel(ce_if_block * ce_info ATTRIBUTE_UNUSED)6137 frv_ifcvt_modify_cancel (ce_if_block *ce_info ATTRIBUTE_UNUSED)
6138 {
6139 int i;
6140 rtx p = frv_ifcvt.added_insns_list;
6141
6142 /* Loop freeing up the EXPR_LIST's allocated. */
6143 while (p != NULL_RTX)
6144 {
6145 rtx check_and_jump = XEXP (p, 0);
6146 rtx old_p = p;
6147
6148 p = XEXP (p, 1);
6149 free_EXPR_LIST_node (check_and_jump);
6150 free_EXPR_LIST_node (old_p);
6151 }
6152
6153 /* Release any temporary gprs allocated. */
6154 for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++)
6155 frv_ifcvt.scratch_regs[i] = NULL_RTX;
6156
6157 frv_ifcvt.added_insns_list = NULL_RTX;
6158 frv_ifcvt.cur_scratch_regs = 0;
6159 return;
6160 }
6161
6162 /* A C expression for the size in bytes of the trampoline, as an integer.
6163 The template is:
6164
6165 setlo #0, <jmp_reg>
6166 setlo #0, <static_chain>
6167 sethi #0, <jmp_reg>
6168 sethi #0, <static_chain>
6169 jmpl @(gr0,<jmp_reg>) */
6170
6171 int
frv_trampoline_size(void)6172 frv_trampoline_size (void)
6173 {
6174 if (TARGET_FDPIC)
6175 /* Allocate room for the function descriptor and the lddi
6176 instruction. */
6177 return 8 + 6 * 4;
6178 return 5 /* instructions */ * 4 /* instruction size. */;
6179 }
6180
6181
6182 /* A C statement to initialize the variable parts of a trampoline. ADDR is an
6183 RTX for the address of the trampoline; FNADDR is an RTX for the address of
6184 the nested function; STATIC_CHAIN is an RTX for the static chain value that
6185 should be passed to the function when it is called.
6186
6187 The template is:
6188
6189 setlo #0, <jmp_reg>
6190 setlo #0, <static_chain>
6191 sethi #0, <jmp_reg>
6192 sethi #0, <static_chain>
6193 jmpl @(gr0,<jmp_reg>) */
6194
6195 static void
frv_trampoline_init(rtx m_tramp,tree fndecl,rtx static_chain)6196 frv_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
6197 {
6198 rtx addr = XEXP (m_tramp, 0);
6199 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
6200 rtx sc_reg = force_reg (Pmode, static_chain);
6201
6202 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
6203 LCT_NORMAL, VOIDmode,
6204 addr, Pmode,
6205 GEN_INT (frv_trampoline_size ()), SImode,
6206 fnaddr, Pmode,
6207 sc_reg, Pmode);
6208 }
6209
6210
6211 /* Many machines have some registers that cannot be copied directly to or from
6212 memory or even from other types of registers. An example is the `MQ'
6213 register, which on most machines, can only be copied to or from general
6214 registers, but not memory. Some machines allow copying all registers to and
6215 from memory, but require a scratch register for stores to some memory
6216 locations (e.g., those with symbolic address on the RT, and those with
6217 certain symbolic address on the SPARC when compiling PIC). In some cases,
6218 both an intermediate and a scratch register are required.
6219
6220 You should define these macros to indicate to the reload phase that it may
6221 need to allocate at least one register for a reload in addition to the
6222 register to contain the data. Specifically, if copying X to a register
6223 RCLASS in MODE requires an intermediate register, you should define
6224 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
6225 whose registers can be used as intermediate registers or scratch registers.
6226
6227 If copying a register RCLASS in MODE to X requires an intermediate or scratch
6228 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
6229 largest register class required. If the requirements for input and output
6230 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
6231 instead of defining both macros identically.
6232
6233 The values returned by these macros are often `GENERAL_REGS'. Return
6234 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
6235 to or from a register of RCLASS in MODE without requiring a scratch register.
6236 Do not define this macro if it would always return `NO_REGS'.
6237
6238 If a scratch register is required (either with or without an intermediate
6239 register), you should define patterns for `reload_inM' or `reload_outM', as
6240 required.. These patterns, which will normally be implemented with a
6241 `define_expand', should be similar to the `movM' patterns, except that
6242 operand 2 is the scratch register.
6243
6244 Define constraints for the reload register and scratch register that contain
6245 a single register class. If the original reload register (whose class is
6246 RCLASS) can meet the constraint given in the pattern, the value returned by
6247 these macros is used for the class of the scratch register. Otherwise, two
6248 additional reload registers are required. Their classes are obtained from
6249 the constraints in the insn pattern.
6250
6251 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
6252 either be in a hard register or in memory. Use `true_regnum' to find out;
6253 it will return -1 if the pseudo is in memory and the hard register number if
6254 it is in a register.
6255
6256 These macros should not be used in the case where a particular class of
6257 registers can only be copied to memory and not to another class of
6258 registers. In that case, secondary reload registers are not needed and
6259 would not be helpful. Instead, a stack location must be used to perform the
6260 copy and the `movM' pattern should use memory as an intermediate storage.
6261 This case often occurs between floating-point and general registers. */
6262
6263 enum reg_class
frv_secondary_reload_class(enum reg_class rclass,machine_mode mode ATTRIBUTE_UNUSED,rtx x)6264 frv_secondary_reload_class (enum reg_class rclass,
6265 machine_mode mode ATTRIBUTE_UNUSED,
6266 rtx x)
6267 {
6268 enum reg_class ret;
6269
6270 switch (rclass)
6271 {
6272 default:
6273 ret = NO_REGS;
6274 break;
6275
6276 /* Accumulators/Accumulator guard registers need to go through floating
6277 point registers. */
6278 case QUAD_REGS:
6279 case GPR_REGS:
6280 ret = NO_REGS;
6281 if (x && GET_CODE (x) == REG)
6282 {
6283 int regno = REGNO (x);
6284
6285 if (ACC_P (regno) || ACCG_P (regno))
6286 ret = FPR_REGS;
6287 }
6288 break;
6289
6290 /* Nonzero constants should be loaded into an FPR through a GPR. */
6291 case QUAD_FPR_REGS:
6292 if (x && CONSTANT_P (x) && !ZERO_P (x))
6293 ret = GPR_REGS;
6294 else
6295 ret = NO_REGS;
6296 break;
6297
6298 /* All of these types need gpr registers. */
6299 case ICC_REGS:
6300 case FCC_REGS:
6301 case CC_REGS:
6302 case ICR_REGS:
6303 case FCR_REGS:
6304 case CR_REGS:
6305 case LCR_REG:
6306 case LR_REG:
6307 ret = GPR_REGS;
6308 break;
6309
6310 /* The accumulators need fpr registers. */
6311 case QUAD_ACC_REGS:
6312 case ACCG_REGS:
6313 ret = FPR_REGS;
6314 break;
6315 }
6316
6317 return ret;
6318 }
6319
6320 /* This hook exists to catch the case where secondary_reload_class() is
6321 called from init_reg_autoinc() in regclass.c - before the reload optabs
6322 have been initialised. */
6323
6324 static reg_class_t
frv_secondary_reload(bool in_p,rtx x,reg_class_t reload_class_i,machine_mode reload_mode,secondary_reload_info * sri)6325 frv_secondary_reload (bool in_p, rtx x, reg_class_t reload_class_i,
6326 machine_mode reload_mode,
6327 secondary_reload_info * sri)
6328 {
6329 enum reg_class rclass = NO_REGS;
6330 enum reg_class reload_class = (enum reg_class) reload_class_i;
6331
6332 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
6333 {
6334 sri->icode = sri->prev_sri->t_icode;
6335 return NO_REGS;
6336 }
6337
6338 rclass = frv_secondary_reload_class (reload_class, reload_mode, x);
6339
6340 if (rclass != NO_REGS)
6341 {
6342 enum insn_code icode
6343 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
6344 reload_mode);
6345 if (icode == 0)
6346 {
6347 /* This happens when then the reload_[in|out]_optabs have
6348 not been initialised. */
6349 sri->t_icode = CODE_FOR_nothing;
6350 return rclass;
6351 }
6352 }
6353
6354 /* Fall back to the default secondary reload handler. */
6355 return default_secondary_reload (in_p, x, reload_class, reload_mode, sri);
6356
6357 }
6358
6359 /* Worker function for TARGET_CLASS_LIKELY_SPILLED_P. */
6360
6361 static bool
frv_class_likely_spilled_p(reg_class_t rclass)6362 frv_class_likely_spilled_p (reg_class_t rclass)
6363 {
6364 switch (rclass)
6365 {
6366 default:
6367 break;
6368
6369 case GR8_REGS:
6370 case GR9_REGS:
6371 case GR89_REGS:
6372 case FDPIC_FPTR_REGS:
6373 case FDPIC_REGS:
6374 case ICC_REGS:
6375 case FCC_REGS:
6376 case CC_REGS:
6377 case ICR_REGS:
6378 case FCR_REGS:
6379 case CR_REGS:
6380 case LCR_REG:
6381 case LR_REG:
6382 case SPR_REGS:
6383 case QUAD_ACC_REGS:
6384 case ACCG_REGS:
6385 return true;
6386 }
6387
6388 return false;
6389 }
6390
6391
6392 /* An expression for the alignment of a structure field FIELD if the
6393 alignment computed in the usual way is COMPUTED. GCC uses this
6394 value instead of the value in `BIGGEST_ALIGNMENT' or
6395 `BIGGEST_FIELD_ALIGNMENT', if defined, for structure fields only. */
6396
6397 /* The definition type of the bit field data is either char, short, long or
6398 long long. The maximum bit size is the number of bits of its own type.
6399
6400 The bit field data is assigned to a storage unit that has an adequate size
6401 for bit field data retention and is located at the smallest address.
6402
6403 Consecutive bit field data are packed at consecutive bits having the same
6404 storage unit, with regard to the type, beginning with the MSB and continuing
6405 toward the LSB.
6406
6407 If a field to be assigned lies over a bit field type boundary, its
6408 assignment is completed by aligning it with a boundary suitable for the
6409 type.
6410
6411 When a bit field having a bit length of 0 is declared, it is forcibly
6412 assigned to the next storage unit.
6413
6414 e.g)
6415 struct {
6416 int a:2;
6417 int b:6;
6418 char c:4;
6419 int d:10;
6420 int :0;
6421 int f:2;
6422 } x;
6423
6424 +0 +1 +2 +3
6425 &x 00000000 00000000 00000000 00000000
6426 MLM----L
6427 a b
6428 &x+4 00000000 00000000 00000000 00000000
6429 M--L
6430 c
6431 &x+8 00000000 00000000 00000000 00000000
6432 M----------L
6433 d
6434 &x+12 00000000 00000000 00000000 00000000
6435 ML
6436 f
6437 */
6438
6439 int
frv_adjust_field_align(tree field,int computed)6440 frv_adjust_field_align (tree field, int computed)
6441 {
6442 /* Make sure that the bitfield is not wider than the type. */
6443 if (field
6444 && DECL_BIT_FIELD (field)
6445 && !DECL_ARTIFICIAL (field))
6446 {
6447 tree parent = DECL_CONTEXT (field);
6448 tree prev = NULL_TREE;
6449 tree cur;
6450
6451 for (cur = TYPE_FIELDS (parent); cur && cur != field; cur = DECL_CHAIN (cur))
6452 {
6453 if (TREE_CODE (cur) != FIELD_DECL)
6454 continue;
6455
6456 prev = cur;
6457 }
6458
6459 gcc_assert (cur);
6460
6461 /* If this isn't a :0 field and if the previous element is a bitfield
6462 also, see if the type is different, if so, we will need to align the
6463 bit-field to the next boundary. */
6464 if (prev
6465 && ! DECL_PACKED (field)
6466 && ! integer_zerop (DECL_SIZE (field))
6467 && DECL_BIT_FIELD_TYPE (field) != DECL_BIT_FIELD_TYPE (prev))
6468 {
6469 int prev_align = TYPE_ALIGN (TREE_TYPE (prev));
6470 int cur_align = TYPE_ALIGN (TREE_TYPE (field));
6471 computed = (prev_align > cur_align) ? prev_align : cur_align;
6472 }
6473 }
6474
6475 return computed;
6476 }
6477
6478
6479 /* Implement TARGET_HARD_REGNO_MODE_OK. */
6480
6481 static bool
frv_hard_regno_mode_ok(unsigned int regno,machine_mode mode)6482 frv_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
6483 {
6484 int base;
6485 int mask;
6486
6487 switch (mode)
6488 {
6489 case E_CCmode:
6490 case E_CC_UNSmode:
6491 case E_CC_NZmode:
6492 return ICC_P (regno) || GPR_P (regno);
6493
6494 case E_CC_CCRmode:
6495 return CR_P (regno) || GPR_P (regno);
6496
6497 case E_CC_FPmode:
6498 return FCC_P (regno) || GPR_P (regno);
6499
6500 default:
6501 break;
6502 }
6503
6504 /* Set BASE to the first register in REGNO's class. Set MASK to the
6505 bits that must be clear in (REGNO - BASE) for the register to be
6506 well-aligned. */
6507 if (INTEGRAL_MODE_P (mode) || FLOAT_MODE_P (mode) || VECTOR_MODE_P (mode))
6508 {
6509 if (ACCG_P (regno))
6510 {
6511 /* ACCGs store one byte. Two-byte quantities must start in
6512 even-numbered registers, four-byte ones in registers whose
6513 numbers are divisible by four, and so on. */
6514 base = ACCG_FIRST;
6515 mask = GET_MODE_SIZE (mode) - 1;
6516 }
6517 else
6518 {
6519 /* The other registers store one word. */
6520 if (GPR_P (regno) || regno == AP_FIRST)
6521 base = GPR_FIRST;
6522
6523 else if (FPR_P (regno))
6524 base = FPR_FIRST;
6525
6526 else if (ACC_P (regno))
6527 base = ACC_FIRST;
6528
6529 else if (SPR_P (regno))
6530 return mode == SImode;
6531
6532 /* Fill in the table. */
6533 else
6534 return false;
6535
6536 /* Anything smaller than an SI is OK in any word-sized register. */
6537 if (GET_MODE_SIZE (mode) < 4)
6538 return true;
6539
6540 mask = (GET_MODE_SIZE (mode) / 4) - 1;
6541 }
6542 return (((regno - base) & mask) == 0);
6543 }
6544
6545 return false;
6546 }
6547
6548 /* Implement TARGET_MODES_TIEABLE_P. */
6549
6550 static bool
frv_modes_tieable_p(machine_mode mode1,machine_mode mode2)6551 frv_modes_tieable_p (machine_mode mode1, machine_mode mode2)
6552 {
6553 return mode1 == mode2;
6554 }
6555
6556
6557 /* Implement TARGET_HARD_REGNO_NREGS.
6558
6559 On the FRV, make the CC_FP mode take 3 words in the integer registers, so
6560 that we can build the appropriate instructions to properly reload the
6561 values. Also, make the byte-sized accumulator guards use one guard
6562 for each byte. */
6563
6564 static unsigned int
frv_hard_regno_nregs(unsigned int regno,machine_mode mode)6565 frv_hard_regno_nregs (unsigned int regno, machine_mode mode)
6566 {
6567 if (ACCG_P (regno))
6568 return GET_MODE_SIZE (mode);
6569 else
6570 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6571 }
6572
6573
6574 /* Implement CLASS_MAX_NREGS. */
6575
6576 int
frv_class_max_nregs(enum reg_class rclass,machine_mode mode)6577 frv_class_max_nregs (enum reg_class rclass, machine_mode mode)
6578 {
6579 if (rclass == ACCG_REGS)
6580 /* An N-byte value requires N accumulator guards. */
6581 return GET_MODE_SIZE (mode);
6582 else
6583 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6584 }
6585
6586
6587 /* A C expression that is nonzero if X is a legitimate constant for an
6588 immediate operand on the target machine. You can assume that X satisfies
6589 `CONSTANT_P', so you need not check this. In fact, `1' is a suitable
6590 definition for this macro on machines where anything `CONSTANT_P' is valid. */
6591
6592 static bool
frv_legitimate_constant_p(machine_mode mode,rtx x)6593 frv_legitimate_constant_p (machine_mode mode, rtx x)
6594 {
6595 /* frv_cannot_force_const_mem always returns true for FDPIC. This
6596 means that the move expanders will be expected to deal with most
6597 kinds of constant, regardless of what we return here.
6598
6599 However, among its other duties, frv_legitimate_constant_p decides whether
6600 a constant can be entered into reg_equiv_constant[]. If we return true,
6601 reload can create new instances of the constant whenever it likes.
6602
6603 The idea is therefore to accept as many constants as possible (to give
6604 reload more freedom) while rejecting constants that can only be created
6605 at certain times. In particular, anything with a symbolic component will
6606 require use of the pseudo FDPIC register, which is only available before
6607 reload. */
6608 if (TARGET_FDPIC)
6609 return LEGITIMATE_PIC_OPERAND_P (x);
6610
6611 /* All of the integer constants are ok. */
6612 if (GET_CODE (x) != CONST_DOUBLE)
6613 return TRUE;
6614
6615 /* double integer constants are ok. */
6616 if (GET_MODE (x) == VOIDmode || mode == DImode)
6617 return TRUE;
6618
6619 /* 0 is always ok. */
6620 if (x == CONST0_RTX (mode))
6621 return TRUE;
6622
6623 /* If floating point is just emulated, allow any constant, since it will be
6624 constructed in the GPRs. */
6625 if (!TARGET_HAS_FPRS)
6626 return TRUE;
6627
6628 if (mode == DFmode && !TARGET_DOUBLE)
6629 return TRUE;
6630
6631 /* Otherwise store the constant away and do a load. */
6632 return FALSE;
6633 }
6634
6635 /* Implement SELECT_CC_MODE. Choose CC_FP for floating-point comparisons,
6636 CC_NZ for comparisons against zero in which a single Z or N flag test
6637 is enough, CC_UNS for other unsigned comparisons, and CC for other
6638 signed comparisons. */
6639
6640 machine_mode
frv_select_cc_mode(enum rtx_code code,rtx x,rtx y)6641 frv_select_cc_mode (enum rtx_code code, rtx x, rtx y)
6642 {
6643 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
6644 return CC_FPmode;
6645
6646 switch (code)
6647 {
6648 case EQ:
6649 case NE:
6650 case LT:
6651 case GE:
6652 return y == const0_rtx ? CC_NZmode : CCmode;
6653
6654 case GTU:
6655 case GEU:
6656 case LTU:
6657 case LEU:
6658 return y == const0_rtx ? CC_NZmode : CC_UNSmode;
6659
6660 default:
6661 return CCmode;
6662 }
6663 }
6664
6665
6666 /* Worker function for TARGET_REGISTER_MOVE_COST. */
6667
6668 #define HIGH_COST 40
6669 #define MEDIUM_COST 3
6670 #define LOW_COST 1
6671
6672 static int
frv_register_move_cost(machine_mode mode ATTRIBUTE_UNUSED,reg_class_t from,reg_class_t to)6673 frv_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
6674 reg_class_t from, reg_class_t to)
6675 {
6676 switch (from)
6677 {
6678 default:
6679 break;
6680
6681 case QUAD_REGS:
6682 case GPR_REGS:
6683 case GR8_REGS:
6684 case GR9_REGS:
6685 case GR89_REGS:
6686 case FDPIC_REGS:
6687 case FDPIC_FPTR_REGS:
6688 case FDPIC_CALL_REGS:
6689 switch (to)
6690 {
6691 default:
6692 break;
6693
6694 case QUAD_REGS:
6695 case GPR_REGS:
6696 case GR8_REGS:
6697 case GR9_REGS:
6698 case GR89_REGS:
6699 case FDPIC_REGS:
6700 case FDPIC_FPTR_REGS:
6701 case FDPIC_CALL_REGS:
6702 return LOW_COST;
6703
6704 case FPR_REGS:
6705 return LOW_COST;
6706
6707 case LCR_REG:
6708 case LR_REG:
6709 case SPR_REGS:
6710 return LOW_COST;
6711 }
6712 break;
6713
6714 case QUAD_FPR_REGS:
6715 switch (to)
6716 {
6717 default:
6718 break;
6719
6720 case QUAD_REGS:
6721 case GPR_REGS:
6722 case GR8_REGS:
6723 case GR9_REGS:
6724 case GR89_REGS:
6725 case FDPIC_REGS:
6726 case FDPIC_FPTR_REGS:
6727 case FDPIC_CALL_REGS:
6728
6729 case QUAD_ACC_REGS:
6730 case ACCG_REGS:
6731 return MEDIUM_COST;
6732
6733 case QUAD_FPR_REGS:
6734 return LOW_COST;
6735 }
6736 break;
6737
6738 case LCR_REG:
6739 case LR_REG:
6740 case SPR_REGS:
6741 switch (to)
6742 {
6743 default:
6744 break;
6745
6746 case QUAD_REGS:
6747 case GPR_REGS:
6748 case GR8_REGS:
6749 case GR9_REGS:
6750 case GR89_REGS:
6751 case FDPIC_REGS:
6752 case FDPIC_FPTR_REGS:
6753 case FDPIC_CALL_REGS:
6754 return MEDIUM_COST;
6755 }
6756 break;
6757
6758 case QUAD_ACC_REGS:
6759 case ACCG_REGS:
6760 switch (to)
6761 {
6762 default:
6763 break;
6764
6765 case QUAD_FPR_REGS:
6766 return MEDIUM_COST;
6767 }
6768 break;
6769 }
6770
6771 return HIGH_COST;
6772 }
6773
6774 /* Worker function for TARGET_MEMORY_MOVE_COST. */
6775
6776 static int
frv_memory_move_cost(machine_mode mode ATTRIBUTE_UNUSED,reg_class_t rclass ATTRIBUTE_UNUSED,bool in ATTRIBUTE_UNUSED)6777 frv_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
6778 reg_class_t rclass ATTRIBUTE_UNUSED,
6779 bool in ATTRIBUTE_UNUSED)
6780 {
6781 return 4;
6782 }
6783
6784
6785 /* Implementation of TARGET_ASM_INTEGER. In the FRV case we need to
6786 use ".picptr" to generate safe relocations for PIC code. We also
6787 need a fixup entry for aligned (non-debugging) code. */
6788
6789 static bool
frv_assemble_integer(rtx value,unsigned int size,int aligned_p)6790 frv_assemble_integer (rtx value, unsigned int size, int aligned_p)
6791 {
6792 if ((flag_pic || TARGET_FDPIC) && size == UNITS_PER_WORD)
6793 {
6794 if (GET_CODE (value) == CONST
6795 || GET_CODE (value) == SYMBOL_REF
6796 || GET_CODE (value) == LABEL_REF)
6797 {
6798 if (TARGET_FDPIC && GET_CODE (value) == SYMBOL_REF
6799 && SYMBOL_REF_FUNCTION_P (value))
6800 {
6801 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
6802 output_addr_const (asm_out_file, value);
6803 fputs (")\n", asm_out_file);
6804 return true;
6805 }
6806 else if (TARGET_FDPIC && GET_CODE (value) == CONST
6807 && frv_function_symbol_referenced_p (value))
6808 return false;
6809 if (aligned_p && !TARGET_FDPIC)
6810 {
6811 static int label_num = 0;
6812 char buf[256];
6813 const char *p;
6814
6815 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", label_num++);
6816 p = (* targetm.strip_name_encoding) (buf);
6817
6818 fprintf (asm_out_file, "%s:\n", p);
6819 fprintf (asm_out_file, "%s\n", FIXUP_SECTION_ASM_OP);
6820 fprintf (asm_out_file, "\t.picptr\t%s\n", p);
6821 fprintf (asm_out_file, "\t.previous\n");
6822 }
6823 assemble_integer_with_op ("\t.picptr\t", value);
6824 return true;
6825 }
6826 if (!aligned_p)
6827 {
6828 /* We've set the unaligned SI op to NULL, so we always have to
6829 handle the unaligned case here. */
6830 assemble_integer_with_op ("\t.4byte\t", value);
6831 return true;
6832 }
6833 }
6834 return default_assemble_integer (value, size, aligned_p);
6835 }
6836
6837 /* Function to set up the backend function structure. */
6838
6839 static struct machine_function *
frv_init_machine_status(void)6840 frv_init_machine_status (void)
6841 {
6842 return ggc_cleared_alloc<machine_function> ();
6843 }
6844
6845 /* Implement TARGET_SCHED_ISSUE_RATE. */
6846
6847 int
frv_issue_rate(void)6848 frv_issue_rate (void)
6849 {
6850 if (!TARGET_PACK)
6851 return 1;
6852
6853 switch (frv_cpu_type)
6854 {
6855 default:
6856 case FRV_CPU_FR300:
6857 case FRV_CPU_SIMPLE:
6858 return 1;
6859
6860 case FRV_CPU_FR400:
6861 case FRV_CPU_FR405:
6862 case FRV_CPU_FR450:
6863 return 2;
6864
6865 case FRV_CPU_GENERIC:
6866 case FRV_CPU_FR500:
6867 case FRV_CPU_TOMCAT:
6868 return 4;
6869
6870 case FRV_CPU_FR550:
6871 return 8;
6872 }
6873 }
6874
6875 /* Return the value of INSN's acc_group attribute. */
6876
6877 int
frv_acc_group(rtx insn)6878 frv_acc_group (rtx insn)
6879 {
6880 /* This distinction only applies to the FR550 packing constraints. */
6881 if (frv_cpu_type == FRV_CPU_FR550)
6882 {
6883 subrtx_iterator::array_type array;
6884 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
6885 if (REG_P (*iter))
6886 {
6887 unsigned int regno = REGNO (*iter);
6888 /* If REGNO refers to an accumulator, return ACC_GROUP_ODD if
6889 the bit 2 of the register number is set and ACC_GROUP_EVEN if
6890 it is clear. */
6891 if (ACC_P (regno))
6892 return (regno - ACC_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN;
6893 if (ACCG_P (regno))
6894 return (regno - ACCG_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN;
6895 }
6896 }
6897 return ACC_GROUP_NONE;
6898 }
6899
6900 /* Return the index of the DFA unit in FRV_UNIT_NAMES[] that instruction
6901 INSN will try to claim first. Since this value depends only on the
6902 type attribute, we can cache the results in FRV_TYPE_TO_UNIT[]. */
6903
6904 static unsigned int
frv_insn_unit(rtx_insn * insn)6905 frv_insn_unit (rtx_insn *insn)
6906 {
6907 enum attr_type type;
6908
6909 type = get_attr_type (insn);
6910 if (frv_type_to_unit[type] == ARRAY_SIZE (frv_unit_codes))
6911 {
6912 /* We haven't seen this type of instruction before. */
6913 state_t state;
6914 unsigned int unit;
6915
6916 /* Issue the instruction on its own to see which unit it prefers. */
6917 state = alloca (state_size ());
6918 state_reset (state);
6919 state_transition (state, insn);
6920
6921 /* Find out which unit was taken. */
6922 for (unit = 0; unit < ARRAY_SIZE (frv_unit_codes); unit++)
6923 if (cpu_unit_reservation_p (state, frv_unit_codes[unit]))
6924 break;
6925
6926 gcc_assert (unit != ARRAY_SIZE (frv_unit_codes));
6927
6928 frv_type_to_unit[type] = unit;
6929 }
6930 return frv_type_to_unit[type];
6931 }
6932
6933 /* Return true if INSN issues to a branch unit. */
6934
6935 static bool
frv_issues_to_branch_unit_p(rtx_insn * insn)6936 frv_issues_to_branch_unit_p (rtx_insn *insn)
6937 {
6938 return frv_unit_groups[frv_insn_unit (insn)] == GROUP_B;
6939 }
6940
6941 /* The instructions in the packet, partitioned into groups. */
6942 struct frv_packet_group {
6943 /* How many instructions in the packet belong to this group. */
6944 unsigned int num_insns;
6945
6946 /* A list of the instructions that belong to this group, in the order
6947 they appear in the rtl stream. */
6948 rtx_insn *insns[ARRAY_SIZE (frv_unit_codes)];
6949
6950 /* The contents of INSNS after they have been sorted into the correct
6951 assembly-language order. Element X issues to unit X. The list may
6952 contain extra nops. */
6953 rtx_insn *sorted[ARRAY_SIZE (frv_unit_codes)];
6954
6955 /* The member of frv_nops[] to use in sorted[]. */
6956 rtx_insn *nop;
6957 };
6958
6959 /* The current state of the packing pass, implemented by frv_pack_insns. */
6960 static struct {
6961 /* The state of the pipeline DFA. */
6962 state_t dfa_state;
6963
6964 /* Which hardware registers are set within the current packet,
6965 and the conditions under which they are set. */
6966 regstate_t regstate[FIRST_PSEUDO_REGISTER];
6967
6968 /* The memory locations that have been modified so far in this
6969 packet. MEM is the memref and COND is the regstate_t condition
6970 under which it is set. */
6971 struct {
6972 rtx mem;
6973 regstate_t cond;
6974 } mems[2];
6975
6976 /* The number of valid entries in MEMS. The value is larger than
6977 ARRAY_SIZE (mems) if there were too many mems to record. */
6978 unsigned int num_mems;
6979
6980 /* The maximum number of instructions that can be packed together. */
6981 unsigned int issue_rate;
6982
6983 /* The instructions in the packet, partitioned into groups. */
6984 struct frv_packet_group groups[NUM_GROUPS];
6985
6986 /* The instructions that make up the current packet. */
6987 rtx_insn *insns[ARRAY_SIZE (frv_unit_codes)];
6988 unsigned int num_insns;
6989 } frv_packet;
6990
6991 /* Return the regstate_t flags for the given COND_EXEC condition.
6992 Abort if the condition isn't in the right form. */
6993
6994 static int
frv_cond_flags(rtx cond)6995 frv_cond_flags (rtx cond)
6996 {
6997 gcc_assert ((GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
6998 && GET_CODE (XEXP (cond, 0)) == REG
6999 && CR_P (REGNO (XEXP (cond, 0)))
7000 && XEXP (cond, 1) == const0_rtx);
7001 return ((REGNO (XEXP (cond, 0)) - CR_FIRST)
7002 | (GET_CODE (cond) == NE
7003 ? REGSTATE_IF_TRUE
7004 : REGSTATE_IF_FALSE));
7005 }
7006
7007
7008 /* Return true if something accessed under condition COND2 can
7009 conflict with something written under condition COND1. */
7010
7011 static bool
frv_regstate_conflict_p(regstate_t cond1,regstate_t cond2)7012 frv_regstate_conflict_p (regstate_t cond1, regstate_t cond2)
7013 {
7014 /* If either reference was unconditional, we have a conflict. */
7015 if ((cond1 & REGSTATE_IF_EITHER) == 0
7016 || (cond2 & REGSTATE_IF_EITHER) == 0)
7017 return true;
7018
7019 /* The references might conflict if they were controlled by
7020 different CRs. */
7021 if ((cond1 & REGSTATE_CC_MASK) != (cond2 & REGSTATE_CC_MASK))
7022 return true;
7023
7024 /* They definitely conflict if they are controlled by the
7025 same condition. */
7026 if ((cond1 & cond2 & REGSTATE_IF_EITHER) != 0)
7027 return true;
7028
7029 return false;
7030 }
7031
7032
7033 /* Return true if an instruction with pattern PAT depends on an
7034 instruction in the current packet. COND describes the condition
7035 under which PAT might be set or used. */
7036
7037 static bool
frv_registers_conflict_p_1(rtx pat,regstate_t cond)7038 frv_registers_conflict_p_1 (rtx pat, regstate_t cond)
7039 {
7040 subrtx_var_iterator::array_type array;
7041 FOR_EACH_SUBRTX_VAR (iter, array, pat, NONCONST)
7042 {
7043 rtx x = *iter;
7044 if (GET_CODE (x) == REG)
7045 {
7046 unsigned int regno;
7047 FOR_EACH_REGNO (regno, x)
7048 if ((frv_packet.regstate[regno] & REGSTATE_MODIFIED) != 0)
7049 if (frv_regstate_conflict_p (frv_packet.regstate[regno], cond))
7050 return true;
7051 }
7052 else if (GET_CODE (x) == MEM)
7053 {
7054 /* If we ran out of memory slots, assume a conflict. */
7055 if (frv_packet.num_mems > ARRAY_SIZE (frv_packet.mems))
7056 return 1;
7057
7058 /* Check for output or true dependencies with earlier MEMs. */
7059 for (unsigned int i = 0; i < frv_packet.num_mems; i++)
7060 if (frv_regstate_conflict_p (frv_packet.mems[i].cond, cond))
7061 {
7062 if (true_dependence (frv_packet.mems[i].mem, VOIDmode, x))
7063 return true;
7064
7065 if (output_dependence (frv_packet.mems[i].mem, x))
7066 return true;
7067 }
7068 }
7069
7070 /* The return values of calls aren't significant: they describe
7071 the effect of the call as a whole, not of the insn itself. */
7072 else if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
7073 iter.substitute (SET_SRC (x));
7074 }
7075 return false;
7076 }
7077
7078
7079 /* Return true if something in X might depend on an instruction
7080 in the current packet. */
7081
7082 static bool
frv_registers_conflict_p(rtx x)7083 frv_registers_conflict_p (rtx x)
7084 {
7085 regstate_t flags;
7086
7087 flags = 0;
7088 if (GET_CODE (x) == COND_EXEC)
7089 {
7090 if (frv_registers_conflict_p_1 (XEXP (x, 0), flags))
7091 return true;
7092
7093 flags |= frv_cond_flags (XEXP (x, 0));
7094 x = XEXP (x, 1);
7095 }
7096 return frv_registers_conflict_p_1 (x, flags);
7097 }
7098
7099
7100 /* A note_stores callback. DATA points to the regstate_t condition
7101 under which X is modified. Update FRV_PACKET accordingly. */
7102
7103 static void
frv_registers_update_1(rtx x,const_rtx pat ATTRIBUTE_UNUSED,void * data)7104 frv_registers_update_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7105 {
7106 unsigned int regno;
7107
7108 if (GET_CODE (x) == REG)
7109 FOR_EACH_REGNO (regno, x)
7110 frv_packet.regstate[regno] |= *(regstate_t *) data;
7111
7112 if (GET_CODE (x) == MEM)
7113 {
7114 if (frv_packet.num_mems < ARRAY_SIZE (frv_packet.mems))
7115 {
7116 frv_packet.mems[frv_packet.num_mems].mem = x;
7117 frv_packet.mems[frv_packet.num_mems].cond = *(regstate_t *) data;
7118 }
7119 frv_packet.num_mems++;
7120 }
7121 }
7122
7123
7124 /* Update the register state information for an instruction whose
7125 body is X. */
7126
7127 static void
frv_registers_update(rtx x)7128 frv_registers_update (rtx x)
7129 {
7130 regstate_t flags;
7131
7132 flags = REGSTATE_MODIFIED;
7133 if (GET_CODE (x) == COND_EXEC)
7134 {
7135 flags |= frv_cond_flags (XEXP (x, 0));
7136 x = XEXP (x, 1);
7137 }
7138 note_pattern_stores (x, frv_registers_update_1, &flags);
7139 }
7140
7141
7142 /* Initialize frv_packet for the start of a new packet. */
7143
7144 static void
frv_start_packet(void)7145 frv_start_packet (void)
7146 {
7147 enum frv_insn_group group;
7148
7149 memset (frv_packet.regstate, 0, sizeof (frv_packet.regstate));
7150 frv_packet.num_mems = 0;
7151 frv_packet.num_insns = 0;
7152 for (group = GROUP_I; group < NUM_GROUPS;
7153 group = (enum frv_insn_group) (group + 1))
7154 frv_packet.groups[group].num_insns = 0;
7155 }
7156
7157
7158 /* Likewise for the start of a new basic block. */
7159
7160 static void
frv_start_packet_block(void)7161 frv_start_packet_block (void)
7162 {
7163 state_reset (frv_packet.dfa_state);
7164 frv_start_packet ();
7165 }
7166
7167
7168 /* Finish the current packet, if any, and start a new one. Call
7169 HANDLE_PACKET with FRV_PACKET describing the completed packet. */
7170
7171 static void
frv_finish_packet(void (* handle_packet)(void))7172 frv_finish_packet (void (*handle_packet) (void))
7173 {
7174 if (frv_packet.num_insns > 0)
7175 {
7176 handle_packet ();
7177 state_transition (frv_packet.dfa_state, 0);
7178 frv_start_packet ();
7179 }
7180 }
7181
7182
7183 /* Return true if INSN can be added to the current packet. Update
7184 the DFA state on success. */
7185
7186 static bool
frv_pack_insn_p(rtx_insn * insn)7187 frv_pack_insn_p (rtx_insn *insn)
7188 {
7189 /* See if the packet is already as long as it can be. */
7190 if (frv_packet.num_insns == frv_packet.issue_rate)
7191 return false;
7192
7193 /* If the scheduler thought that an instruction should start a packet,
7194 it's usually a good idea to believe it. It knows much more about
7195 the latencies than we do.
7196
7197 There are some exceptions though:
7198
7199 - Conditional instructions are scheduled on the assumption that
7200 they will be executed. This is usually a good thing, since it
7201 tends to avoid unnecessary stalls in the conditional code.
7202 But we want to pack conditional instructions as tightly as
7203 possible, in order to optimize the case where they aren't
7204 executed.
7205
7206 - The scheduler will always put branches on their own, even
7207 if there's no real dependency.
7208
7209 - There's no point putting a call in its own packet unless
7210 we have to. */
7211 if (frv_packet.num_insns > 0
7212 && NONJUMP_INSN_P (insn)
7213 && GET_MODE (insn) == TImode
7214 && GET_CODE (PATTERN (insn)) != COND_EXEC)
7215 return false;
7216
7217 /* Check for register conflicts. Don't do this for setlo since any
7218 conflict will be with the partnering sethi, with which it can
7219 be packed. */
7220 if (get_attr_type (insn) != TYPE_SETLO)
7221 if (frv_registers_conflict_p (PATTERN (insn)))
7222 return false;
7223
7224 return state_transition (frv_packet.dfa_state, insn) < 0;
7225 }
7226
7227
7228 /* Add instruction INSN to the current packet. */
7229
7230 static void
frv_add_insn_to_packet(rtx_insn * insn)7231 frv_add_insn_to_packet (rtx_insn *insn)
7232 {
7233 struct frv_packet_group *packet_group;
7234
7235 packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
7236 packet_group->insns[packet_group->num_insns++] = insn;
7237 frv_packet.insns[frv_packet.num_insns++] = insn;
7238
7239 frv_registers_update (PATTERN (insn));
7240 }
7241
7242
7243 /* Insert INSN (a member of frv_nops[]) into the current packet. If the
7244 packet ends in a branch or call, insert the nop before it, otherwise
7245 add to the end. */
7246
7247 static void
frv_insert_nop_in_packet(rtx_insn * insn)7248 frv_insert_nop_in_packet (rtx_insn *insn)
7249 {
7250 struct frv_packet_group *packet_group;
7251 rtx_insn *last;
7252
7253 packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
7254 last = frv_packet.insns[frv_packet.num_insns - 1];
7255 if (! NONJUMP_INSN_P (last))
7256 {
7257 insn = emit_insn_before (PATTERN (insn), last);
7258 frv_packet.insns[frv_packet.num_insns - 1] = insn;
7259 frv_packet.insns[frv_packet.num_insns++] = last;
7260 }
7261 else
7262 {
7263 insn = emit_insn_after (PATTERN (insn), last);
7264 frv_packet.insns[frv_packet.num_insns++] = insn;
7265 }
7266 packet_group->insns[packet_group->num_insns++] = insn;
7267 }
7268
7269
7270 /* If packing is enabled, divide the instructions into packets and
7271 return true. Call HANDLE_PACKET for each complete packet. */
7272
7273 static bool
frv_for_each_packet(void (* handle_packet)(void))7274 frv_for_each_packet (void (*handle_packet) (void))
7275 {
7276 rtx_insn *insn, *next_insn;
7277
7278 frv_packet.issue_rate = frv_issue_rate ();
7279
7280 /* Early exit if we don't want to pack insns. */
7281 if (!optimize
7282 || !flag_schedule_insns_after_reload
7283 || !TARGET_VLIW_BRANCH
7284 || frv_packet.issue_rate == 1)
7285 return false;
7286
7287 /* Set up the initial packing state. */
7288 dfa_start ();
7289 frv_packet.dfa_state = alloca (state_size ());
7290
7291 frv_start_packet_block ();
7292 for (insn = get_insns (); insn != 0; insn = next_insn)
7293 {
7294 enum rtx_code code;
7295 bool eh_insn_p;
7296
7297 code = GET_CODE (insn);
7298 next_insn = NEXT_INSN (insn);
7299
7300 if (code == CODE_LABEL)
7301 {
7302 frv_finish_packet (handle_packet);
7303 frv_start_packet_block ();
7304 }
7305
7306 if (INSN_P (insn))
7307 switch (GET_CODE (PATTERN (insn)))
7308 {
7309 case USE:
7310 case CLOBBER:
7311 break;
7312
7313 default:
7314 /* Calls mustn't be packed on a TOMCAT. */
7315 if (CALL_P (insn) && frv_cpu_type == FRV_CPU_TOMCAT)
7316 frv_finish_packet (handle_packet);
7317
7318 /* Since the last instruction in a packet determines the EH
7319 region, any exception-throwing instruction must come at
7320 the end of reordered packet. Insns that issue to a
7321 branch unit are bound to come last; for others it's
7322 too hard to predict. */
7323 eh_insn_p = (find_reg_note (insn, REG_EH_REGION, NULL) != NULL);
7324 if (eh_insn_p && !frv_issues_to_branch_unit_p (insn))
7325 frv_finish_packet (handle_packet);
7326
7327 /* Finish the current packet if we can't add INSN to it.
7328 Simulate cycles until INSN is ready to issue. */
7329 if (!frv_pack_insn_p (insn))
7330 {
7331 frv_finish_packet (handle_packet);
7332 while (!frv_pack_insn_p (insn))
7333 state_transition (frv_packet.dfa_state, 0);
7334 }
7335
7336 /* Add the instruction to the packet. */
7337 frv_add_insn_to_packet (insn);
7338
7339 /* Calls and jumps end a packet, as do insns that throw
7340 an exception. */
7341 if (code == CALL_INSN || code == JUMP_INSN || eh_insn_p)
7342 frv_finish_packet (handle_packet);
7343 break;
7344 }
7345 }
7346 frv_finish_packet (handle_packet);
7347 dfa_finish ();
7348 return true;
7349 }
7350
7351 /* Subroutine of frv_sort_insn_group. We are trying to sort
7352 frv_packet.groups[GROUP].sorted[0...NUM_INSNS-1] into assembly
7353 language order. We have already picked a new position for
7354 frv_packet.groups[GROUP].sorted[X] if bit X of ISSUED is set.
7355 These instructions will occupy elements [0, LOWER_SLOT) and
7356 [UPPER_SLOT, NUM_INSNS) of the final (sorted) array. STATE is
7357 the DFA state after issuing these instructions.
7358
7359 Try filling elements [LOWER_SLOT, UPPER_SLOT) with every permutation
7360 of the unused instructions. Return true if one such permutation gives
7361 a valid ordering, leaving the successful permutation in sorted[].
7362 Do not modify sorted[] until a valid permutation is found. */
7363
7364 static bool
frv_sort_insn_group_1(enum frv_insn_group group,unsigned int lower_slot,unsigned int upper_slot,unsigned int issued,unsigned int num_insns,state_t state)7365 frv_sort_insn_group_1 (enum frv_insn_group group,
7366 unsigned int lower_slot, unsigned int upper_slot,
7367 unsigned int issued, unsigned int num_insns,
7368 state_t state)
7369 {
7370 struct frv_packet_group *packet_group;
7371 unsigned int i;
7372 state_t test_state;
7373 size_t dfa_size;
7374 rtx_insn *insn;
7375
7376 /* Early success if we've filled all the slots. */
7377 if (lower_slot == upper_slot)
7378 return true;
7379
7380 packet_group = &frv_packet.groups[group];
7381 dfa_size = state_size ();
7382 test_state = alloca (dfa_size);
7383
7384 /* Try issuing each unused instruction. */
7385 for (i = num_insns - 1; i + 1 != 0; i--)
7386 if (~issued & (1 << i))
7387 {
7388 insn = packet_group->sorted[i];
7389 memcpy (test_state, state, dfa_size);
7390 if (state_transition (test_state, insn) < 0
7391 && cpu_unit_reservation_p (test_state,
7392 NTH_UNIT (group, upper_slot - 1))
7393 && frv_sort_insn_group_1 (group, lower_slot, upper_slot - 1,
7394 issued | (1 << i), num_insns,
7395 test_state))
7396 {
7397 packet_group->sorted[upper_slot - 1] = insn;
7398 return true;
7399 }
7400 }
7401
7402 return false;
7403 }
7404
7405 /* Compare two instructions by their frv_insn_unit. */
7406
7407 static int
frv_compare_insns(const void * first,const void * second)7408 frv_compare_insns (const void *first, const void *second)
7409 {
7410 rtx_insn * const *insn1 = (rtx_insn * const *) first;
7411 rtx_insn * const *insn2 = (rtx_insn * const *) second;
7412 return frv_insn_unit (*insn1) - frv_insn_unit (*insn2);
7413 }
7414
7415 /* Copy frv_packet.groups[GROUP].insns[] to frv_packet.groups[GROUP].sorted[]
7416 and sort it into assembly language order. See frv.md for a description of
7417 the algorithm. */
7418
7419 static void
frv_sort_insn_group(enum frv_insn_group group)7420 frv_sort_insn_group (enum frv_insn_group group)
7421 {
7422 struct frv_packet_group *packet_group;
7423 unsigned int first, i, nop, max_unit, num_slots;
7424 state_t state, test_state;
7425 size_t dfa_size;
7426
7427 packet_group = &frv_packet.groups[group];
7428
7429 /* Assume no nop is needed. */
7430 packet_group->nop = 0;
7431
7432 if (packet_group->num_insns == 0)
7433 return;
7434
7435 /* Copy insns[] to sorted[]. */
7436 memcpy (packet_group->sorted, packet_group->insns,
7437 sizeof (rtx) * packet_group->num_insns);
7438
7439 /* Sort sorted[] by the unit that each insn tries to take first. */
7440 if (packet_group->num_insns > 1)
7441 qsort (packet_group->sorted, packet_group->num_insns,
7442 sizeof (rtx), frv_compare_insns);
7443
7444 /* That's always enough for branch and control insns. */
7445 if (group == GROUP_B || group == GROUP_C)
7446 return;
7447
7448 dfa_size = state_size ();
7449 state = alloca (dfa_size);
7450 test_state = alloca (dfa_size);
7451
7452 /* Find the highest FIRST such that sorted[0...FIRST-1] can issue
7453 consecutively and such that the DFA takes unit X when sorted[X]
7454 is added. Set STATE to the new DFA state. */
7455 state_reset (test_state);
7456 for (first = 0; first < packet_group->num_insns; first++)
7457 {
7458 memcpy (state, test_state, dfa_size);
7459 if (state_transition (test_state, packet_group->sorted[first]) >= 0
7460 || !cpu_unit_reservation_p (test_state, NTH_UNIT (group, first)))
7461 break;
7462 }
7463
7464 /* If all the instructions issued in ascending order, we're done. */
7465 if (first == packet_group->num_insns)
7466 return;
7467
7468 /* Add nops to the end of sorted[] and try each permutation until
7469 we find one that works. */
7470 for (nop = 0; nop < frv_num_nops; nop++)
7471 {
7472 max_unit = frv_insn_unit (frv_nops[nop]);
7473 if (frv_unit_groups[max_unit] == group)
7474 {
7475 packet_group->nop = frv_nops[nop];
7476 num_slots = UNIT_NUMBER (max_unit) + 1;
7477 for (i = packet_group->num_insns; i < num_slots; i++)
7478 packet_group->sorted[i] = frv_nops[nop];
7479 if (frv_sort_insn_group_1 (group, first, num_slots,
7480 (1 << first) - 1, num_slots, state))
7481 return;
7482 }
7483 }
7484 gcc_unreachable ();
7485 }
7486
7487 /* Sort the current packet into assembly-language order. Set packing
7488 flags as appropriate. */
7489
7490 static void
frv_reorder_packet(void)7491 frv_reorder_packet (void)
7492 {
7493 unsigned int cursor[NUM_GROUPS];
7494 rtx_insn *insns[ARRAY_SIZE (frv_unit_groups)];
7495 unsigned int unit, to, from;
7496 enum frv_insn_group group;
7497 struct frv_packet_group *packet_group;
7498
7499 /* First sort each group individually. */
7500 for (group = GROUP_I; group < NUM_GROUPS;
7501 group = (enum frv_insn_group) (group + 1))
7502 {
7503 cursor[group] = 0;
7504 frv_sort_insn_group (group);
7505 }
7506
7507 /* Go through the unit template and try add an instruction from
7508 that unit's group. */
7509 to = 0;
7510 for (unit = 0; unit < ARRAY_SIZE (frv_unit_groups); unit++)
7511 {
7512 group = frv_unit_groups[unit];
7513 packet_group = &frv_packet.groups[group];
7514 if (cursor[group] < packet_group->num_insns)
7515 {
7516 /* frv_reorg should have added nops for us. */
7517 gcc_assert (packet_group->sorted[cursor[group]]
7518 != packet_group->nop);
7519 insns[to++] = packet_group->sorted[cursor[group]++];
7520 }
7521 }
7522
7523 gcc_assert (to == frv_packet.num_insns);
7524
7525 /* Clear the last instruction's packing flag, thus marking the end of
7526 a packet. Reorder the other instructions relative to it. */
7527 CLEAR_PACKING_FLAG (insns[to - 1]);
7528 for (from = 0; from < to - 1; from++)
7529 {
7530 remove_insn (insns[from]);
7531 add_insn_before (insns[from], insns[to - 1], NULL);
7532 SET_PACKING_FLAG (insns[from]);
7533 }
7534 }
7535
7536
7537 /* Divide instructions into packets. Reorder the contents of each
7538 packet so that they are in the correct assembly-language order.
7539
7540 Since this pass can change the raw meaning of the rtl stream, it must
7541 only be called at the last minute, just before the instructions are
7542 written out. */
7543
7544 static void
frv_pack_insns(void)7545 frv_pack_insns (void)
7546 {
7547 if (frv_for_each_packet (frv_reorder_packet))
7548 frv_insn_packing_flag = 0;
7549 else
7550 frv_insn_packing_flag = -1;
7551 }
7552
7553 /* See whether we need to add nops to group GROUP in order to
7554 make a valid packet. */
7555
7556 static void
frv_fill_unused_units(enum frv_insn_group group)7557 frv_fill_unused_units (enum frv_insn_group group)
7558 {
7559 unsigned int non_nops, nops, i;
7560 struct frv_packet_group *packet_group;
7561
7562 packet_group = &frv_packet.groups[group];
7563
7564 /* Sort the instructions into assembly-language order.
7565 Use nops to fill slots that are otherwise unused. */
7566 frv_sort_insn_group (group);
7567
7568 /* See how many nops are needed before the final useful instruction. */
7569 i = nops = 0;
7570 for (non_nops = 0; non_nops < packet_group->num_insns; non_nops++)
7571 while (packet_group->sorted[i++] == packet_group->nop)
7572 nops++;
7573
7574 /* Insert that many nops into the instruction stream. */
7575 while (nops-- > 0)
7576 frv_insert_nop_in_packet (packet_group->nop);
7577 }
7578
7579 /* Return true if accesses IO1 and IO2 refer to the same doubleword. */
7580
7581 static bool
frv_same_doubleword_p(const struct frv_io * io1,const struct frv_io * io2)7582 frv_same_doubleword_p (const struct frv_io *io1, const struct frv_io *io2)
7583 {
7584 if (io1->const_address != 0 && io2->const_address != 0)
7585 return io1->const_address == io2->const_address;
7586
7587 if (io1->var_address != 0 && io2->var_address != 0)
7588 return rtx_equal_p (io1->var_address, io2->var_address);
7589
7590 return false;
7591 }
7592
7593 /* Return true if operations IO1 and IO2 are guaranteed to complete
7594 in order. */
7595
7596 static bool
frv_io_fixed_order_p(const struct frv_io * io1,const struct frv_io * io2)7597 frv_io_fixed_order_p (const struct frv_io *io1, const struct frv_io *io2)
7598 {
7599 /* The order of writes is always preserved. */
7600 if (io1->type == FRV_IO_WRITE && io2->type == FRV_IO_WRITE)
7601 return true;
7602
7603 /* The order of reads isn't preserved. */
7604 if (io1->type != FRV_IO_WRITE && io2->type != FRV_IO_WRITE)
7605 return false;
7606
7607 /* One operation is a write and the other is (or could be) a read.
7608 The order is only guaranteed if the accesses are to the same
7609 doubleword. */
7610 return frv_same_doubleword_p (io1, io2);
7611 }
7612
7613 /* Generalize I/O operation X so that it covers both X and Y. */
7614
7615 static void
frv_io_union(struct frv_io * x,const struct frv_io * y)7616 frv_io_union (struct frv_io *x, const struct frv_io *y)
7617 {
7618 if (x->type != y->type)
7619 x->type = FRV_IO_UNKNOWN;
7620 if (!frv_same_doubleword_p (x, y))
7621 {
7622 x->const_address = 0;
7623 x->var_address = 0;
7624 }
7625 }
7626
7627 /* Fill IO with information about the load or store associated with
7628 membar instruction INSN. */
7629
7630 static void
frv_extract_membar(struct frv_io * io,rtx_insn * insn)7631 frv_extract_membar (struct frv_io *io, rtx_insn *insn)
7632 {
7633 extract_insn (insn);
7634 io->type = (enum frv_io_type) INTVAL (recog_data.operand[2]);
7635 io->const_address = INTVAL (recog_data.operand[1]);
7636 io->var_address = XEXP (recog_data.operand[0], 0);
7637 }
7638
7639 /* A note_stores callback for which DATA points to an rtx. Nullify *DATA
7640 if X is a register and *DATA depends on X. */
7641
7642 static void
frv_io_check_address(rtx x,const_rtx pat ATTRIBUTE_UNUSED,void * data)7643 frv_io_check_address (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7644 {
7645 rtx *other = (rtx *) data;
7646
7647 if (REG_P (x) && *other != 0 && reg_overlap_mentioned_p (x, *other))
7648 *other = 0;
7649 }
7650
7651 /* A note_stores callback for which DATA points to a HARD_REG_SET.
7652 Remove every modified register from the set. */
7653
7654 static void
frv_io_handle_set(rtx x,const_rtx pat ATTRIBUTE_UNUSED,void * data)7655 frv_io_handle_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7656 {
7657 HARD_REG_SET *set = (HARD_REG_SET *) data;
7658 unsigned int regno;
7659
7660 if (REG_P (x))
7661 FOR_EACH_REGNO (regno, x)
7662 CLEAR_HARD_REG_BIT (*set, regno);
7663 }
7664
7665 /* A note_uses callback that adds all registers in *X to hard register
7666 set *DATA. */
7667
7668 static void
frv_io_handle_use(rtx * x,void * data)7669 frv_io_handle_use (rtx *x, void *data)
7670 {
7671 find_all_hard_regs (*x, (HARD_REG_SET *) data);
7672 }
7673
7674 /* Go through block BB looking for membars to remove. There are two
7675 cases where intra-block analysis is enough:
7676
7677 - a membar is redundant if it occurs between two consecutive I/O
7678 operations and if those operations are guaranteed to complete
7679 in order.
7680
7681 - a membar for a __builtin_read is redundant if the result is
7682 used before the next I/O operation is issued.
7683
7684 If the last membar in the block could not be removed, and there
7685 are guaranteed to be no I/O operations between that membar and
7686 the end of the block, store the membar in *LAST_MEMBAR, otherwise
7687 store null.
7688
7689 Describe the block's first I/O operation in *NEXT_IO. Describe
7690 an unknown operation if the block doesn't do any I/O. */
7691
7692 static void
frv_optimize_membar_local(basic_block bb,struct frv_io * next_io,rtx_insn ** last_membar)7693 frv_optimize_membar_local (basic_block bb, struct frv_io *next_io,
7694 rtx_insn **last_membar)
7695 {
7696 HARD_REG_SET used_regs;
7697 rtx set;
7698 rtx_insn *insn, *next_membar;
7699 bool next_is_end_p;
7700
7701 /* NEXT_IO is the next I/O operation to be performed after the current
7702 instruction. It starts off as being an unknown operation. */
7703 memset (next_io, 0, sizeof (*next_io));
7704
7705 /* NEXT_IS_END_P is true if NEXT_IO describes the end of the block. */
7706 next_is_end_p = true;
7707
7708 /* If the current instruction is a __builtin_read or __builtin_write,
7709 NEXT_MEMBAR is the membar instruction associated with it. NEXT_MEMBAR
7710 is null if the membar has already been deleted.
7711
7712 Note that the initialization here should only be needed to
7713 suppress warnings. */
7714 next_membar = 0;
7715
7716 /* USED_REGS is the set of registers that are used before the
7717 next I/O instruction. */
7718 CLEAR_HARD_REG_SET (used_regs);
7719
7720 for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
7721 if (CALL_P (insn))
7722 {
7723 /* We can't predict what a call will do to volatile memory. */
7724 memset (next_io, 0, sizeof (struct frv_io));
7725 next_is_end_p = false;
7726 CLEAR_HARD_REG_SET (used_regs);
7727 }
7728 else if (INSN_P (insn))
7729 switch (recog_memoized (insn))
7730 {
7731 case CODE_FOR_optional_membar_qi:
7732 case CODE_FOR_optional_membar_hi:
7733 case CODE_FOR_optional_membar_si:
7734 case CODE_FOR_optional_membar_di:
7735 next_membar = insn;
7736 if (next_is_end_p)
7737 {
7738 /* Local information isn't enough to decide whether this
7739 membar is needed. Stash it away for later. */
7740 *last_membar = insn;
7741 frv_extract_membar (next_io, insn);
7742 next_is_end_p = false;
7743 }
7744 else
7745 {
7746 /* Check whether the I/O operation before INSN could be
7747 reordered with one described by NEXT_IO. If it can't,
7748 INSN will not be needed. */
7749 struct frv_io prev_io;
7750
7751 frv_extract_membar (&prev_io, insn);
7752 if (frv_io_fixed_order_p (&prev_io, next_io))
7753 {
7754 if (dump_file)
7755 fprintf (dump_file,
7756 ";; [Local] Removing membar %d since order"
7757 " of accesses is guaranteed\n",
7758 INSN_UID (next_membar));
7759
7760 insn = NEXT_INSN (insn);
7761 delete_insn (next_membar);
7762 next_membar = 0;
7763 }
7764 *next_io = prev_io;
7765 }
7766 break;
7767
7768 default:
7769 /* Invalidate NEXT_IO's address if it depends on something that
7770 is clobbered by INSN. */
7771 if (next_io->var_address)
7772 note_stores (insn, frv_io_check_address, &next_io->var_address);
7773
7774 /* If the next membar is associated with a __builtin_read,
7775 see if INSN reads from that address. If it does, and if
7776 the destination register is used before the next I/O access,
7777 there is no need for the membar. */
7778 set = PATTERN (insn);
7779 if (next_io->type == FRV_IO_READ
7780 && next_io->var_address != 0
7781 && next_membar != 0
7782 && GET_CODE (set) == SET
7783 && GET_CODE (SET_DEST (set)) == REG
7784 && TEST_HARD_REG_BIT (used_regs, REGNO (SET_DEST (set))))
7785 {
7786 rtx src;
7787
7788 src = SET_SRC (set);
7789 if (GET_CODE (src) == ZERO_EXTEND)
7790 src = XEXP (src, 0);
7791
7792 if (GET_CODE (src) == MEM
7793 && rtx_equal_p (XEXP (src, 0), next_io->var_address))
7794 {
7795 if (dump_file)
7796 fprintf (dump_file,
7797 ";; [Local] Removing membar %d since the target"
7798 " of %d is used before the I/O operation\n",
7799 INSN_UID (next_membar), INSN_UID (insn));
7800
7801 if (next_membar == *last_membar)
7802 *last_membar = 0;
7803
7804 delete_insn (next_membar);
7805 next_membar = 0;
7806 }
7807 }
7808
7809 /* If INSN has volatile references, forget about any registers
7810 that are used after it. Otherwise forget about uses that
7811 are (or might be) defined by INSN. */
7812 if (volatile_refs_p (PATTERN (insn)))
7813 CLEAR_HARD_REG_SET (used_regs);
7814 else
7815 note_stores (insn, frv_io_handle_set, &used_regs);
7816
7817 note_uses (&PATTERN (insn), frv_io_handle_use, &used_regs);
7818 break;
7819 }
7820 }
7821
7822 /* See if MEMBAR, the last membar instruction in BB, can be removed.
7823 FIRST_IO[X] describes the first operation performed by basic block X. */
7824
7825 static void
frv_optimize_membar_global(basic_block bb,struct frv_io * first_io,rtx_insn * membar)7826 frv_optimize_membar_global (basic_block bb, struct frv_io *first_io,
7827 rtx_insn *membar)
7828 {
7829 struct frv_io this_io, next_io;
7830 edge succ;
7831 edge_iterator ei;
7832
7833 /* We need to keep the membar if there is an edge to the exit block. */
7834 FOR_EACH_EDGE (succ, ei, bb->succs)
7835 /* for (succ = bb->succ; succ != 0; succ = succ->succ_next) */
7836 if (succ->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7837 return;
7838
7839 /* Work out the union of all successor blocks. */
7840 ei = ei_start (bb->succs);
7841 ei_cond (ei, &succ);
7842 /* next_io = first_io[bb->succ->dest->index]; */
7843 next_io = first_io[succ->dest->index];
7844 ei = ei_start (bb->succs);
7845 if (ei_cond (ei, &succ))
7846 {
7847 for (ei_next (&ei); ei_cond (ei, &succ); ei_next (&ei))
7848 /*for (succ = bb->succ->succ_next; succ != 0; succ = succ->succ_next)*/
7849 frv_io_union (&next_io, &first_io[succ->dest->index]);
7850 }
7851 else
7852 gcc_unreachable ();
7853
7854 frv_extract_membar (&this_io, membar);
7855 if (frv_io_fixed_order_p (&this_io, &next_io))
7856 {
7857 if (dump_file)
7858 fprintf (dump_file,
7859 ";; [Global] Removing membar %d since order of accesses"
7860 " is guaranteed\n", INSN_UID (membar));
7861
7862 delete_insn (membar);
7863 }
7864 }
7865
7866 /* Remove redundant membars from the current function. */
7867
7868 static void
frv_optimize_membar(void)7869 frv_optimize_membar (void)
7870 {
7871 basic_block bb;
7872 struct frv_io *first_io;
7873 rtx_insn **last_membar;
7874
7875 compute_bb_for_insn ();
7876 first_io = XCNEWVEC (struct frv_io, last_basic_block_for_fn (cfun));
7877 last_membar = XCNEWVEC (rtx_insn *, last_basic_block_for_fn (cfun));
7878
7879 FOR_EACH_BB_FN (bb, cfun)
7880 frv_optimize_membar_local (bb, &first_io[bb->index],
7881 &last_membar[bb->index]);
7882
7883 FOR_EACH_BB_FN (bb, cfun)
7884 if (last_membar[bb->index] != 0)
7885 frv_optimize_membar_global (bb, first_io, last_membar[bb->index]);
7886
7887 free (first_io);
7888 free (last_membar);
7889 }
7890
7891 /* Used by frv_reorg to keep track of the current packet's address. */
7892 static unsigned int frv_packet_address;
7893
7894 /* If the current packet falls through to a label, try to pad the packet
7895 with nops in order to fit the label's alignment requirements. */
7896
7897 static void
frv_align_label(void)7898 frv_align_label (void)
7899 {
7900 unsigned int alignment, target, nop;
7901 rtx_insn *x, *last, *barrier, *label;
7902
7903 /* Walk forward to the start of the next packet. Set ALIGNMENT to the
7904 maximum alignment of that packet, LABEL to the last label between
7905 the packets, and BARRIER to the last barrier. */
7906 last = frv_packet.insns[frv_packet.num_insns - 1];
7907 label = barrier = 0;
7908 alignment = 4;
7909 for (x = NEXT_INSN (last); x != 0 && !INSN_P (x); x = NEXT_INSN (x))
7910 {
7911 if (LABEL_P (x))
7912 {
7913 unsigned int subalign = 1 << label_to_alignment (x).levels[0].log;
7914 alignment = MAX (alignment, subalign);
7915 label = x;
7916 }
7917 if (BARRIER_P (x))
7918 barrier = x;
7919 }
7920
7921 /* If -malign-labels, and the packet falls through to an unaligned
7922 label, try introducing a nop to align that label to 8 bytes. */
7923 if (TARGET_ALIGN_LABELS
7924 && label != 0
7925 && barrier == 0
7926 && frv_packet.num_insns < frv_packet.issue_rate)
7927 alignment = MAX (alignment, 8);
7928
7929 /* Advance the address to the end of the current packet. */
7930 frv_packet_address += frv_packet.num_insns * 4;
7931
7932 /* Work out the target address, after alignment. */
7933 target = (frv_packet_address + alignment - 1) & -alignment;
7934
7935 /* If the packet falls through to the label, try to find an efficient
7936 padding sequence. */
7937 if (barrier == 0)
7938 {
7939 /* First try adding nops to the current packet. */
7940 for (nop = 0; nop < frv_num_nops; nop++)
7941 while (frv_packet_address < target && frv_pack_insn_p (frv_nops[nop]))
7942 {
7943 frv_insert_nop_in_packet (frv_nops[nop]);
7944 frv_packet_address += 4;
7945 }
7946
7947 /* If we still haven't reached the target, add some new packets that
7948 contain only nops. If there are two types of nop, insert an
7949 alternating sequence of frv_nops[0] and frv_nops[1], which will
7950 lead to packets like:
7951
7952 nop.p
7953 mnop.p/fnop.p
7954 nop.p
7955 mnop/fnop
7956
7957 etc. Just emit frv_nops[0] if that's the only nop we have. */
7958 last = frv_packet.insns[frv_packet.num_insns - 1];
7959 nop = 0;
7960 while (frv_packet_address < target)
7961 {
7962 last = emit_insn_after (PATTERN (frv_nops[nop]), last);
7963 frv_packet_address += 4;
7964 if (frv_num_nops > 1)
7965 nop ^= 1;
7966 }
7967 }
7968
7969 frv_packet_address = target;
7970 }
7971
7972 /* Subroutine of frv_reorg, called after each packet has been constructed
7973 in frv_packet. */
7974
7975 static void
frv_reorg_packet(void)7976 frv_reorg_packet (void)
7977 {
7978 frv_fill_unused_units (GROUP_I);
7979 frv_fill_unused_units (GROUP_FM);
7980 frv_align_label ();
7981 }
7982
7983 /* Add an instruction with pattern NOP to frv_nops[]. */
7984
7985 static void
frv_register_nop(rtx nop)7986 frv_register_nop (rtx nop)
7987 {
7988 rtx_insn *nop_insn = make_insn_raw (nop);
7989 SET_NEXT_INSN (nop_insn) = 0;
7990 SET_PREV_INSN (nop_insn) = 0;
7991 frv_nops[frv_num_nops++] = nop_insn;
7992 }
7993
7994 /* Implement TARGET_MACHINE_DEPENDENT_REORG. Divide the instructions
7995 into packets and check whether we need to insert nops in order to
7996 fulfill the processor's issue requirements. Also, if the user has
7997 requested a certain alignment for a label, try to meet that alignment
7998 by inserting nops in the previous packet. */
7999
8000 static void
frv_reorg(void)8001 frv_reorg (void)
8002 {
8003 if (optimize > 0 && TARGET_OPTIMIZE_MEMBAR && cfun->machine->has_membar_p)
8004 frv_optimize_membar ();
8005
8006 frv_num_nops = 0;
8007 frv_register_nop (gen_nop ());
8008 if (TARGET_MEDIA)
8009 frv_register_nop (gen_mnop ());
8010 if (TARGET_HARD_FLOAT)
8011 frv_register_nop (gen_fnop ());
8012
8013 /* Estimate the length of each branch. Although this may change after
8014 we've inserted nops, it will only do so in big functions. */
8015 shorten_branches (get_insns ());
8016
8017 frv_packet_address = 0;
8018 frv_for_each_packet (frv_reorg_packet);
8019 }
8020
8021 #define def_builtin(name, type, code) \
8022 add_builtin_function ((name), (type), (code), BUILT_IN_MD, NULL, NULL)
8023
8024 struct builtin_description
8025 {
8026 enum insn_code icode;
8027 const char *name;
8028 enum frv_builtins code;
8029 enum rtx_code comparison;
8030 unsigned int flag;
8031 };
8032
8033 /* Media intrinsics that take a single, constant argument. */
8034
8035 static struct builtin_description bdesc_set[] =
8036 {
8037 { CODE_FOR_mhdsets, "__MHDSETS", FRV_BUILTIN_MHDSETS, UNKNOWN, 0 }
8038 };
8039
8040 /* Media intrinsics that take just one argument. */
8041
8042 static struct builtin_description bdesc_1arg[] =
8043 {
8044 { CODE_FOR_mnot, "__MNOT", FRV_BUILTIN_MNOT, UNKNOWN, 0 },
8045 { CODE_FOR_munpackh, "__MUNPACKH", FRV_BUILTIN_MUNPACKH, UNKNOWN, 0 },
8046 { CODE_FOR_mbtoh, "__MBTOH", FRV_BUILTIN_MBTOH, UNKNOWN, 0 },
8047 { CODE_FOR_mhtob, "__MHTOB", FRV_BUILTIN_MHTOB, UNKNOWN, 0},
8048 { CODE_FOR_mabshs, "__MABSHS", FRV_BUILTIN_MABSHS, UNKNOWN, 0 },
8049 { CODE_FOR_scutss, "__SCUTSS", FRV_BUILTIN_SCUTSS, UNKNOWN, 0 }
8050 };
8051
8052 /* Media intrinsics that take two arguments. */
8053
8054 static struct builtin_description bdesc_2arg[] =
8055 {
8056 { CODE_FOR_mand, "__MAND", FRV_BUILTIN_MAND, UNKNOWN, 0},
8057 { CODE_FOR_mor, "__MOR", FRV_BUILTIN_MOR, UNKNOWN, 0},
8058 { CODE_FOR_mxor, "__MXOR", FRV_BUILTIN_MXOR, UNKNOWN, 0},
8059 { CODE_FOR_maveh, "__MAVEH", FRV_BUILTIN_MAVEH, UNKNOWN, 0},
8060 { CODE_FOR_msaths, "__MSATHS", FRV_BUILTIN_MSATHS, UNKNOWN, 0},
8061 { CODE_FOR_msathu, "__MSATHU", FRV_BUILTIN_MSATHU, UNKNOWN, 0},
8062 { CODE_FOR_maddhss, "__MADDHSS", FRV_BUILTIN_MADDHSS, UNKNOWN, 0},
8063 { CODE_FOR_maddhus, "__MADDHUS", FRV_BUILTIN_MADDHUS, UNKNOWN, 0},
8064 { CODE_FOR_msubhss, "__MSUBHSS", FRV_BUILTIN_MSUBHSS, UNKNOWN, 0},
8065 { CODE_FOR_msubhus, "__MSUBHUS", FRV_BUILTIN_MSUBHUS, UNKNOWN, 0},
8066 { CODE_FOR_mqaddhss, "__MQADDHSS", FRV_BUILTIN_MQADDHSS, UNKNOWN, 0},
8067 { CODE_FOR_mqaddhus, "__MQADDHUS", FRV_BUILTIN_MQADDHUS, UNKNOWN, 0},
8068 { CODE_FOR_mqsubhss, "__MQSUBHSS", FRV_BUILTIN_MQSUBHSS, UNKNOWN, 0},
8069 { CODE_FOR_mqsubhus, "__MQSUBHUS", FRV_BUILTIN_MQSUBHUS, UNKNOWN, 0},
8070 { CODE_FOR_mpackh, "__MPACKH", FRV_BUILTIN_MPACKH, UNKNOWN, 0},
8071 { CODE_FOR_mcop1, "__Mcop1", FRV_BUILTIN_MCOP1, UNKNOWN, 0},
8072 { CODE_FOR_mcop2, "__Mcop2", FRV_BUILTIN_MCOP2, UNKNOWN, 0},
8073 { CODE_FOR_mwcut, "__MWCUT", FRV_BUILTIN_MWCUT, UNKNOWN, 0},
8074 { CODE_FOR_mqsaths, "__MQSATHS", FRV_BUILTIN_MQSATHS, UNKNOWN, 0},
8075 { CODE_FOR_mqlclrhs, "__MQLCLRHS", FRV_BUILTIN_MQLCLRHS, UNKNOWN, 0},
8076 { CODE_FOR_mqlmths, "__MQLMTHS", FRV_BUILTIN_MQLMTHS, UNKNOWN, 0},
8077 { CODE_FOR_smul, "__SMUL", FRV_BUILTIN_SMUL, UNKNOWN, 0},
8078 { CODE_FOR_umul, "__UMUL", FRV_BUILTIN_UMUL, UNKNOWN, 0},
8079 { CODE_FOR_addss, "__ADDSS", FRV_BUILTIN_ADDSS, UNKNOWN, 0},
8080 { CODE_FOR_subss, "__SUBSS", FRV_BUILTIN_SUBSS, UNKNOWN, 0},
8081 { CODE_FOR_slass, "__SLASS", FRV_BUILTIN_SLASS, UNKNOWN, 0},
8082 { CODE_FOR_scan, "__SCAN", FRV_BUILTIN_SCAN, UNKNOWN, 0}
8083 };
8084
8085 /* Integer intrinsics that take two arguments and have no return value. */
8086
8087 static struct builtin_description bdesc_int_void2arg[] =
8088 {
8089 { CODE_FOR_smass, "__SMASS", FRV_BUILTIN_SMASS, UNKNOWN, 0},
8090 { CODE_FOR_smsss, "__SMSSS", FRV_BUILTIN_SMSSS, UNKNOWN, 0},
8091 { CODE_FOR_smu, "__SMU", FRV_BUILTIN_SMU, UNKNOWN, 0}
8092 };
8093
8094 static struct builtin_description bdesc_prefetches[] =
8095 {
8096 { CODE_FOR_frv_prefetch0, "__data_prefetch0", FRV_BUILTIN_PREFETCH0, UNKNOWN,
8097 0},
8098 { CODE_FOR_frv_prefetch, "__data_prefetch", FRV_BUILTIN_PREFETCH, UNKNOWN, 0}
8099 };
8100
8101 /* Media intrinsics that take two arguments, the first being an ACC number. */
8102
8103 static struct builtin_description bdesc_cut[] =
8104 {
8105 { CODE_FOR_mcut, "__MCUT", FRV_BUILTIN_MCUT, UNKNOWN, 0},
8106 { CODE_FOR_mcutss, "__MCUTSS", FRV_BUILTIN_MCUTSS, UNKNOWN, 0},
8107 { CODE_FOR_mdcutssi, "__MDCUTSSI", FRV_BUILTIN_MDCUTSSI, UNKNOWN, 0}
8108 };
8109
8110 /* Two-argument media intrinsics with an immediate second argument. */
8111
8112 static struct builtin_description bdesc_2argimm[] =
8113 {
8114 { CODE_FOR_mrotli, "__MROTLI", FRV_BUILTIN_MROTLI, UNKNOWN, 0},
8115 { CODE_FOR_mrotri, "__MROTRI", FRV_BUILTIN_MROTRI, UNKNOWN, 0},
8116 { CODE_FOR_msllhi, "__MSLLHI", FRV_BUILTIN_MSLLHI, UNKNOWN, 0},
8117 { CODE_FOR_msrlhi, "__MSRLHI", FRV_BUILTIN_MSRLHI, UNKNOWN, 0},
8118 { CODE_FOR_msrahi, "__MSRAHI", FRV_BUILTIN_MSRAHI, UNKNOWN, 0},
8119 { CODE_FOR_mexpdhw, "__MEXPDHW", FRV_BUILTIN_MEXPDHW, UNKNOWN, 0},
8120 { CODE_FOR_mexpdhd, "__MEXPDHD", FRV_BUILTIN_MEXPDHD, UNKNOWN, 0},
8121 { CODE_FOR_mdrotli, "__MDROTLI", FRV_BUILTIN_MDROTLI, UNKNOWN, 0},
8122 { CODE_FOR_mcplhi, "__MCPLHI", FRV_BUILTIN_MCPLHI, UNKNOWN, 0},
8123 { CODE_FOR_mcpli, "__MCPLI", FRV_BUILTIN_MCPLI, UNKNOWN, 0},
8124 { CODE_FOR_mhsetlos, "__MHSETLOS", FRV_BUILTIN_MHSETLOS, UNKNOWN, 0},
8125 { CODE_FOR_mhsetloh, "__MHSETLOH", FRV_BUILTIN_MHSETLOH, UNKNOWN, 0},
8126 { CODE_FOR_mhsethis, "__MHSETHIS", FRV_BUILTIN_MHSETHIS, UNKNOWN, 0},
8127 { CODE_FOR_mhsethih, "__MHSETHIH", FRV_BUILTIN_MHSETHIH, UNKNOWN, 0},
8128 { CODE_FOR_mhdseth, "__MHDSETH", FRV_BUILTIN_MHDSETH, UNKNOWN, 0},
8129 { CODE_FOR_mqsllhi, "__MQSLLHI", FRV_BUILTIN_MQSLLHI, UNKNOWN, 0},
8130 { CODE_FOR_mqsrahi, "__MQSRAHI", FRV_BUILTIN_MQSRAHI, UNKNOWN, 0}
8131 };
8132
8133 /* Media intrinsics that take two arguments and return void, the first argument
8134 being a pointer to 4 words in memory. */
8135
8136 static struct builtin_description bdesc_void2arg[] =
8137 {
8138 { CODE_FOR_mdunpackh, "__MDUNPACKH", FRV_BUILTIN_MDUNPACKH, UNKNOWN, 0},
8139 { CODE_FOR_mbtohe, "__MBTOHE", FRV_BUILTIN_MBTOHE, UNKNOWN, 0},
8140 };
8141
8142 /* Media intrinsics that take three arguments, the first being a const_int that
8143 denotes an accumulator, and that return void. */
8144
8145 static struct builtin_description bdesc_void3arg[] =
8146 {
8147 { CODE_FOR_mcpxrs, "__MCPXRS", FRV_BUILTIN_MCPXRS, UNKNOWN, 0},
8148 { CODE_FOR_mcpxru, "__MCPXRU", FRV_BUILTIN_MCPXRU, UNKNOWN, 0},
8149 { CODE_FOR_mcpxis, "__MCPXIS", FRV_BUILTIN_MCPXIS, UNKNOWN, 0},
8150 { CODE_FOR_mcpxiu, "__MCPXIU", FRV_BUILTIN_MCPXIU, UNKNOWN, 0},
8151 { CODE_FOR_mmulhs, "__MMULHS", FRV_BUILTIN_MMULHS, UNKNOWN, 0},
8152 { CODE_FOR_mmulhu, "__MMULHU", FRV_BUILTIN_MMULHU, UNKNOWN, 0},
8153 { CODE_FOR_mmulxhs, "__MMULXHS", FRV_BUILTIN_MMULXHS, UNKNOWN, 0},
8154 { CODE_FOR_mmulxhu, "__MMULXHU", FRV_BUILTIN_MMULXHU, UNKNOWN, 0},
8155 { CODE_FOR_mmachs, "__MMACHS", FRV_BUILTIN_MMACHS, UNKNOWN, 0},
8156 { CODE_FOR_mmachu, "__MMACHU", FRV_BUILTIN_MMACHU, UNKNOWN, 0},
8157 { CODE_FOR_mmrdhs, "__MMRDHS", FRV_BUILTIN_MMRDHS, UNKNOWN, 0},
8158 { CODE_FOR_mmrdhu, "__MMRDHU", FRV_BUILTIN_MMRDHU, UNKNOWN, 0},
8159 { CODE_FOR_mqcpxrs, "__MQCPXRS", FRV_BUILTIN_MQCPXRS, UNKNOWN, 0},
8160 { CODE_FOR_mqcpxru, "__MQCPXRU", FRV_BUILTIN_MQCPXRU, UNKNOWN, 0},
8161 { CODE_FOR_mqcpxis, "__MQCPXIS", FRV_BUILTIN_MQCPXIS, UNKNOWN, 0},
8162 { CODE_FOR_mqcpxiu, "__MQCPXIU", FRV_BUILTIN_MQCPXIU, UNKNOWN, 0},
8163 { CODE_FOR_mqmulhs, "__MQMULHS", FRV_BUILTIN_MQMULHS, UNKNOWN, 0},
8164 { CODE_FOR_mqmulhu, "__MQMULHU", FRV_BUILTIN_MQMULHU, UNKNOWN, 0},
8165 { CODE_FOR_mqmulxhs, "__MQMULXHS", FRV_BUILTIN_MQMULXHS, UNKNOWN, 0},
8166 { CODE_FOR_mqmulxhu, "__MQMULXHU", FRV_BUILTIN_MQMULXHU, UNKNOWN, 0},
8167 { CODE_FOR_mqmachs, "__MQMACHS", FRV_BUILTIN_MQMACHS, UNKNOWN, 0},
8168 { CODE_FOR_mqmachu, "__MQMACHU", FRV_BUILTIN_MQMACHU, UNKNOWN, 0},
8169 { CODE_FOR_mqxmachs, "__MQXMACHS", FRV_BUILTIN_MQXMACHS, UNKNOWN, 0},
8170 { CODE_FOR_mqxmacxhs, "__MQXMACXHS", FRV_BUILTIN_MQXMACXHS, UNKNOWN, 0},
8171 { CODE_FOR_mqmacxhs, "__MQMACXHS", FRV_BUILTIN_MQMACXHS, UNKNOWN, 0}
8172 };
8173
8174 /* Media intrinsics that take two accumulator numbers as argument and
8175 return void. */
8176
8177 static struct builtin_description bdesc_voidacc[] =
8178 {
8179 { CODE_FOR_maddaccs, "__MADDACCS", FRV_BUILTIN_MADDACCS, UNKNOWN, 0},
8180 { CODE_FOR_msubaccs, "__MSUBACCS", FRV_BUILTIN_MSUBACCS, UNKNOWN, 0},
8181 { CODE_FOR_masaccs, "__MASACCS", FRV_BUILTIN_MASACCS, UNKNOWN, 0},
8182 { CODE_FOR_mdaddaccs, "__MDADDACCS", FRV_BUILTIN_MDADDACCS, UNKNOWN, 0},
8183 { CODE_FOR_mdsubaccs, "__MDSUBACCS", FRV_BUILTIN_MDSUBACCS, UNKNOWN, 0},
8184 { CODE_FOR_mdasaccs, "__MDASACCS", FRV_BUILTIN_MDASACCS, UNKNOWN, 0}
8185 };
8186
8187 /* Intrinsics that load a value and then issue a MEMBAR. The load is
8188 a normal move and the ICODE is for the membar. */
8189
8190 static struct builtin_description bdesc_loads[] =
8191 {
8192 { CODE_FOR_optional_membar_qi, "__builtin_read8",
8193 FRV_BUILTIN_READ8, UNKNOWN, 0},
8194 { CODE_FOR_optional_membar_hi, "__builtin_read16",
8195 FRV_BUILTIN_READ16, UNKNOWN, 0},
8196 { CODE_FOR_optional_membar_si, "__builtin_read32",
8197 FRV_BUILTIN_READ32, UNKNOWN, 0},
8198 { CODE_FOR_optional_membar_di, "__builtin_read64",
8199 FRV_BUILTIN_READ64, UNKNOWN, 0}
8200 };
8201
8202 /* Likewise stores. */
8203
8204 static struct builtin_description bdesc_stores[] =
8205 {
8206 { CODE_FOR_optional_membar_qi, "__builtin_write8",
8207 FRV_BUILTIN_WRITE8, UNKNOWN, 0},
8208 { CODE_FOR_optional_membar_hi, "__builtin_write16",
8209 FRV_BUILTIN_WRITE16, UNKNOWN, 0},
8210 { CODE_FOR_optional_membar_si, "__builtin_write32",
8211 FRV_BUILTIN_WRITE32, UNKNOWN, 0},
8212 { CODE_FOR_optional_membar_di, "__builtin_write64",
8213 FRV_BUILTIN_WRITE64, UNKNOWN, 0},
8214 };
8215
8216 /* Initialize media builtins. */
8217
8218 static void
frv_init_builtins(void)8219 frv_init_builtins (void)
8220 {
8221 tree accumulator = integer_type_node;
8222 tree integer = integer_type_node;
8223 tree voidt = void_type_node;
8224 tree uhalf = short_unsigned_type_node;
8225 tree sword1 = long_integer_type_node;
8226 tree uword1 = long_unsigned_type_node;
8227 tree sword2 = long_long_integer_type_node;
8228 tree uword2 = long_long_unsigned_type_node;
8229 tree uword4 = build_pointer_type (uword1);
8230 tree vptr = build_pointer_type (build_type_variant (void_type_node, 0, 1));
8231 tree ubyte = unsigned_char_type_node;
8232 tree iacc = integer_type_node;
8233
8234 #define UNARY(RET, T1) \
8235 build_function_type_list (RET, T1, NULL_TREE)
8236
8237 #define BINARY(RET, T1, T2) \
8238 build_function_type_list (RET, T1, T2, NULL_TREE)
8239
8240 #define TRINARY(RET, T1, T2, T3) \
8241 build_function_type_list (RET, T1, T2, T3, NULL_TREE)
8242
8243 #define QUAD(RET, T1, T2, T3, T4) \
8244 build_function_type_list (RET, T1, T2, T3, T4, NULL_TREE)
8245
8246 tree void_ftype_void = build_function_type_list (voidt, NULL_TREE);
8247
8248 tree void_ftype_acc = UNARY (voidt, accumulator);
8249 tree void_ftype_uw4_uw1 = BINARY (voidt, uword4, uword1);
8250 tree void_ftype_uw4_uw2 = BINARY (voidt, uword4, uword2);
8251 tree void_ftype_acc_uw1 = BINARY (voidt, accumulator, uword1);
8252 tree void_ftype_acc_acc = BINARY (voidt, accumulator, accumulator);
8253 tree void_ftype_acc_uw1_uw1 = TRINARY (voidt, accumulator, uword1, uword1);
8254 tree void_ftype_acc_sw1_sw1 = TRINARY (voidt, accumulator, sword1, sword1);
8255 tree void_ftype_acc_uw2_uw2 = TRINARY (voidt, accumulator, uword2, uword2);
8256 tree void_ftype_acc_sw2_sw2 = TRINARY (voidt, accumulator, sword2, sword2);
8257
8258 tree uw1_ftype_uw1 = UNARY (uword1, uword1);
8259 tree uw1_ftype_sw1 = UNARY (uword1, sword1);
8260 tree uw1_ftype_uw2 = UNARY (uword1, uword2);
8261 tree uw1_ftype_acc = UNARY (uword1, accumulator);
8262 tree uw1_ftype_uh_uh = BINARY (uword1, uhalf, uhalf);
8263 tree uw1_ftype_uw1_uw1 = BINARY (uword1, uword1, uword1);
8264 tree uw1_ftype_uw1_int = BINARY (uword1, uword1, integer);
8265 tree uw1_ftype_acc_uw1 = BINARY (uword1, accumulator, uword1);
8266 tree uw1_ftype_acc_sw1 = BINARY (uword1, accumulator, sword1);
8267 tree uw1_ftype_uw2_uw1 = BINARY (uword1, uword2, uword1);
8268 tree uw1_ftype_uw2_int = BINARY (uword1, uword2, integer);
8269
8270 tree sw1_ftype_int = UNARY (sword1, integer);
8271 tree sw1_ftype_sw1_sw1 = BINARY (sword1, sword1, sword1);
8272 tree sw1_ftype_sw1_int = BINARY (sword1, sword1, integer);
8273
8274 tree uw2_ftype_uw1 = UNARY (uword2, uword1);
8275 tree uw2_ftype_uw1_int = BINARY (uword2, uword1, integer);
8276 tree uw2_ftype_uw2_uw2 = BINARY (uword2, uword2, uword2);
8277 tree uw2_ftype_uw2_int = BINARY (uword2, uword2, integer);
8278 tree uw2_ftype_acc_int = BINARY (uword2, accumulator, integer);
8279 tree uw2_ftype_uh_uh_uh_uh = QUAD (uword2, uhalf, uhalf, uhalf, uhalf);
8280
8281 tree sw2_ftype_sw2_sw2 = BINARY (sword2, sword2, sword2);
8282 tree sw2_ftype_sw2_int = BINARY (sword2, sword2, integer);
8283 tree uw2_ftype_uw1_uw1 = BINARY (uword2, uword1, uword1);
8284 tree sw2_ftype_sw1_sw1 = BINARY (sword2, sword1, sword1);
8285 tree void_ftype_sw1_sw1 = BINARY (voidt, sword1, sword1);
8286 tree void_ftype_iacc_sw2 = BINARY (voidt, iacc, sword2);
8287 tree void_ftype_iacc_sw1 = BINARY (voidt, iacc, sword1);
8288 tree sw1_ftype_sw1 = UNARY (sword1, sword1);
8289 tree sw2_ftype_iacc = UNARY (sword2, iacc);
8290 tree sw1_ftype_iacc = UNARY (sword1, iacc);
8291 tree void_ftype_ptr = UNARY (voidt, const_ptr_type_node);
8292 tree uw1_ftype_vptr = UNARY (uword1, vptr);
8293 tree uw2_ftype_vptr = UNARY (uword2, vptr);
8294 tree void_ftype_vptr_ub = BINARY (voidt, vptr, ubyte);
8295 tree void_ftype_vptr_uh = BINARY (voidt, vptr, uhalf);
8296 tree void_ftype_vptr_uw1 = BINARY (voidt, vptr, uword1);
8297 tree void_ftype_vptr_uw2 = BINARY (voidt, vptr, uword2);
8298
8299 def_builtin ("__MAND", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAND);
8300 def_builtin ("__MOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MOR);
8301 def_builtin ("__MXOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MXOR);
8302 def_builtin ("__MNOT", uw1_ftype_uw1, FRV_BUILTIN_MNOT);
8303 def_builtin ("__MROTLI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTLI);
8304 def_builtin ("__MROTRI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTRI);
8305 def_builtin ("__MWCUT", uw1_ftype_uw2_uw1, FRV_BUILTIN_MWCUT);
8306 def_builtin ("__MAVEH", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAVEH);
8307 def_builtin ("__MSLLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSLLHI);
8308 def_builtin ("__MSRLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSRLHI);
8309 def_builtin ("__MSRAHI", sw1_ftype_sw1_int, FRV_BUILTIN_MSRAHI);
8310 def_builtin ("__MSATHS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSATHS);
8311 def_builtin ("__MSATHU", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSATHU);
8312 def_builtin ("__MADDHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MADDHSS);
8313 def_builtin ("__MADDHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MADDHUS);
8314 def_builtin ("__MSUBHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSUBHSS);
8315 def_builtin ("__MSUBHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSUBHUS);
8316 def_builtin ("__MMULHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULHS);
8317 def_builtin ("__MMULHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULHU);
8318 def_builtin ("__MMULXHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULXHS);
8319 def_builtin ("__MMULXHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULXHU);
8320 def_builtin ("__MMACHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMACHS);
8321 def_builtin ("__MMACHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMACHU);
8322 def_builtin ("__MMRDHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMRDHS);
8323 def_builtin ("__MMRDHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMRDHU);
8324 def_builtin ("__MQADDHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQADDHSS);
8325 def_builtin ("__MQADDHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQADDHUS);
8326 def_builtin ("__MQSUBHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSUBHSS);
8327 def_builtin ("__MQSUBHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQSUBHUS);
8328 def_builtin ("__MQMULHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULHS);
8329 def_builtin ("__MQMULHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULHU);
8330 def_builtin ("__MQMULXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULXHS);
8331 def_builtin ("__MQMULXHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULXHU);
8332 def_builtin ("__MQMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACHS);
8333 def_builtin ("__MQMACHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMACHU);
8334 def_builtin ("__MCPXRS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXRS);
8335 def_builtin ("__MCPXRU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXRU);
8336 def_builtin ("__MCPXIS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXIS);
8337 def_builtin ("__MCPXIU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXIU);
8338 def_builtin ("__MQCPXRS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXRS);
8339 def_builtin ("__MQCPXRU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXRU);
8340 def_builtin ("__MQCPXIS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXIS);
8341 def_builtin ("__MQCPXIU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXIU);
8342 def_builtin ("__MCUT", uw1_ftype_acc_uw1, FRV_BUILTIN_MCUT);
8343 def_builtin ("__MCUTSS", uw1_ftype_acc_sw1, FRV_BUILTIN_MCUTSS);
8344 def_builtin ("__MEXPDHW", uw1_ftype_uw1_int, FRV_BUILTIN_MEXPDHW);
8345 def_builtin ("__MEXPDHD", uw2_ftype_uw1_int, FRV_BUILTIN_MEXPDHD);
8346 def_builtin ("__MPACKH", uw1_ftype_uh_uh, FRV_BUILTIN_MPACKH);
8347 def_builtin ("__MUNPACKH", uw2_ftype_uw1, FRV_BUILTIN_MUNPACKH);
8348 def_builtin ("__MDPACKH", uw2_ftype_uh_uh_uh_uh, FRV_BUILTIN_MDPACKH);
8349 def_builtin ("__MDUNPACKH", void_ftype_uw4_uw2, FRV_BUILTIN_MDUNPACKH);
8350 def_builtin ("__MBTOH", uw2_ftype_uw1, FRV_BUILTIN_MBTOH);
8351 def_builtin ("__MHTOB", uw1_ftype_uw2, FRV_BUILTIN_MHTOB);
8352 def_builtin ("__MBTOHE", void_ftype_uw4_uw1, FRV_BUILTIN_MBTOHE);
8353 def_builtin ("__MCLRACC", void_ftype_acc, FRV_BUILTIN_MCLRACC);
8354 def_builtin ("__MCLRACCA", void_ftype_void, FRV_BUILTIN_MCLRACCA);
8355 def_builtin ("__MRDACC", uw1_ftype_acc, FRV_BUILTIN_MRDACC);
8356 def_builtin ("__MRDACCG", uw1_ftype_acc, FRV_BUILTIN_MRDACCG);
8357 def_builtin ("__MWTACC", void_ftype_acc_uw1, FRV_BUILTIN_MWTACC);
8358 def_builtin ("__MWTACCG", void_ftype_acc_uw1, FRV_BUILTIN_MWTACCG);
8359 def_builtin ("__Mcop1", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP1);
8360 def_builtin ("__Mcop2", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP2);
8361 def_builtin ("__MTRAP", void_ftype_void, FRV_BUILTIN_MTRAP);
8362 def_builtin ("__MQXMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACHS);
8363 def_builtin ("__MQXMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACXHS);
8364 def_builtin ("__MQMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACXHS);
8365 def_builtin ("__MADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MADDACCS);
8366 def_builtin ("__MSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MSUBACCS);
8367 def_builtin ("__MASACCS", void_ftype_acc_acc, FRV_BUILTIN_MASACCS);
8368 def_builtin ("__MDADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MDADDACCS);
8369 def_builtin ("__MDSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MDSUBACCS);
8370 def_builtin ("__MDASACCS", void_ftype_acc_acc, FRV_BUILTIN_MDASACCS);
8371 def_builtin ("__MABSHS", uw1_ftype_sw1, FRV_BUILTIN_MABSHS);
8372 def_builtin ("__MDROTLI", uw2_ftype_uw2_int, FRV_BUILTIN_MDROTLI);
8373 def_builtin ("__MCPLHI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLHI);
8374 def_builtin ("__MCPLI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLI);
8375 def_builtin ("__MDCUTSSI", uw2_ftype_acc_int, FRV_BUILTIN_MDCUTSSI);
8376 def_builtin ("__MQSATHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSATHS);
8377 def_builtin ("__MHSETLOS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETLOS);
8378 def_builtin ("__MHSETHIS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETHIS);
8379 def_builtin ("__MHDSETS", sw1_ftype_int, FRV_BUILTIN_MHDSETS);
8380 def_builtin ("__MHSETLOH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETLOH);
8381 def_builtin ("__MHSETHIH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETHIH);
8382 def_builtin ("__MHDSETH", uw1_ftype_uw1_int, FRV_BUILTIN_MHDSETH);
8383 def_builtin ("__MQLCLRHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLCLRHS);
8384 def_builtin ("__MQLMTHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLMTHS);
8385 def_builtin ("__MQSLLHI", uw2_ftype_uw2_int, FRV_BUILTIN_MQSLLHI);
8386 def_builtin ("__MQSRAHI", sw2_ftype_sw2_int, FRV_BUILTIN_MQSRAHI);
8387 def_builtin ("__SMUL", sw2_ftype_sw1_sw1, FRV_BUILTIN_SMUL);
8388 def_builtin ("__UMUL", uw2_ftype_uw1_uw1, FRV_BUILTIN_UMUL);
8389 def_builtin ("__SMASS", void_ftype_sw1_sw1, FRV_BUILTIN_SMASS);
8390 def_builtin ("__SMSSS", void_ftype_sw1_sw1, FRV_BUILTIN_SMSSS);
8391 def_builtin ("__SMU", void_ftype_sw1_sw1, FRV_BUILTIN_SMU);
8392 def_builtin ("__ADDSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_ADDSS);
8393 def_builtin ("__SUBSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SUBSS);
8394 def_builtin ("__SLASS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SLASS);
8395 def_builtin ("__SCAN", sw1_ftype_sw1_sw1, FRV_BUILTIN_SCAN);
8396 def_builtin ("__SCUTSS", sw1_ftype_sw1, FRV_BUILTIN_SCUTSS);
8397 def_builtin ("__IACCreadll", sw2_ftype_iacc, FRV_BUILTIN_IACCreadll);
8398 def_builtin ("__IACCreadl", sw1_ftype_iacc, FRV_BUILTIN_IACCreadl);
8399 def_builtin ("__IACCsetll", void_ftype_iacc_sw2, FRV_BUILTIN_IACCsetll);
8400 def_builtin ("__IACCsetl", void_ftype_iacc_sw1, FRV_BUILTIN_IACCsetl);
8401 def_builtin ("__data_prefetch0", void_ftype_ptr, FRV_BUILTIN_PREFETCH0);
8402 def_builtin ("__data_prefetch", void_ftype_ptr, FRV_BUILTIN_PREFETCH);
8403 def_builtin ("__builtin_read8", uw1_ftype_vptr, FRV_BUILTIN_READ8);
8404 def_builtin ("__builtin_read16", uw1_ftype_vptr, FRV_BUILTIN_READ16);
8405 def_builtin ("__builtin_read32", uw1_ftype_vptr, FRV_BUILTIN_READ32);
8406 def_builtin ("__builtin_read64", uw2_ftype_vptr, FRV_BUILTIN_READ64);
8407
8408 def_builtin ("__builtin_write8", void_ftype_vptr_ub, FRV_BUILTIN_WRITE8);
8409 def_builtin ("__builtin_write16", void_ftype_vptr_uh, FRV_BUILTIN_WRITE16);
8410 def_builtin ("__builtin_write32", void_ftype_vptr_uw1, FRV_BUILTIN_WRITE32);
8411 def_builtin ("__builtin_write64", void_ftype_vptr_uw2, FRV_BUILTIN_WRITE64);
8412
8413 #undef UNARY
8414 #undef BINARY
8415 #undef TRINARY
8416 #undef QUAD
8417 }
8418
8419 /* Set the names for various arithmetic operations according to the
8420 FRV ABI. */
8421 static void
frv_init_libfuncs(void)8422 frv_init_libfuncs (void)
8423 {
8424 set_optab_libfunc (smod_optab, SImode, "__modi");
8425 set_optab_libfunc (umod_optab, SImode, "__umodi");
8426
8427 set_optab_libfunc (add_optab, DImode, "__addll");
8428 set_optab_libfunc (sub_optab, DImode, "__subll");
8429 set_optab_libfunc (smul_optab, DImode, "__mulll");
8430 set_optab_libfunc (sdiv_optab, DImode, "__divll");
8431 set_optab_libfunc (smod_optab, DImode, "__modll");
8432 set_optab_libfunc (umod_optab, DImode, "__umodll");
8433 set_optab_libfunc (and_optab, DImode, "__andll");
8434 set_optab_libfunc (ior_optab, DImode, "__orll");
8435 set_optab_libfunc (xor_optab, DImode, "__xorll");
8436 set_optab_libfunc (one_cmpl_optab, DImode, "__notll");
8437
8438 set_optab_libfunc (add_optab, SFmode, "__addf");
8439 set_optab_libfunc (sub_optab, SFmode, "__subf");
8440 set_optab_libfunc (smul_optab, SFmode, "__mulf");
8441 set_optab_libfunc (sdiv_optab, SFmode, "__divf");
8442
8443 set_optab_libfunc (add_optab, DFmode, "__addd");
8444 set_optab_libfunc (sub_optab, DFmode, "__subd");
8445 set_optab_libfunc (smul_optab, DFmode, "__muld");
8446 set_optab_libfunc (sdiv_optab, DFmode, "__divd");
8447
8448 set_conv_libfunc (sext_optab, DFmode, SFmode, "__ftod");
8449 set_conv_libfunc (trunc_optab, SFmode, DFmode, "__dtof");
8450
8451 set_conv_libfunc (sfix_optab, SImode, SFmode, "__ftoi");
8452 set_conv_libfunc (sfix_optab, DImode, SFmode, "__ftoll");
8453 set_conv_libfunc (sfix_optab, SImode, DFmode, "__dtoi");
8454 set_conv_libfunc (sfix_optab, DImode, DFmode, "__dtoll");
8455
8456 set_conv_libfunc (ufix_optab, SImode, SFmode, "__ftoui");
8457 set_conv_libfunc (ufix_optab, DImode, SFmode, "__ftoull");
8458 set_conv_libfunc (ufix_optab, SImode, DFmode, "__dtoui");
8459 set_conv_libfunc (ufix_optab, DImode, DFmode, "__dtoull");
8460
8461 set_conv_libfunc (sfloat_optab, SFmode, SImode, "__itof");
8462 set_conv_libfunc (sfloat_optab, SFmode, DImode, "__lltof");
8463 set_conv_libfunc (sfloat_optab, DFmode, SImode, "__itod");
8464 set_conv_libfunc (sfloat_optab, DFmode, DImode, "__lltod");
8465 }
8466
8467 /* Convert an integer constant to an accumulator register. ICODE is the
8468 code of the target instruction, OPNUM is the number of the
8469 accumulator operand and OPVAL is the constant integer. Try both
8470 ACC and ACCG registers; only report an error if neither fit the
8471 instruction. */
8472
8473 static rtx
frv_int_to_acc(enum insn_code icode,int opnum,rtx opval)8474 frv_int_to_acc (enum insn_code icode, int opnum, rtx opval)
8475 {
8476 rtx reg;
8477 int i;
8478
8479 /* ACCs and ACCGs are implicit global registers if media intrinsics
8480 are being used. We set up this lazily to avoid creating lots of
8481 unnecessary call_insn rtl in non-media code. */
8482 for (i = 0; i <= ACC_MASK; i++)
8483 if ((i & ACC_MASK) == i)
8484 global_regs[i + ACC_FIRST] = global_regs[i + ACCG_FIRST] = 1;
8485
8486 if (GET_CODE (opval) != CONST_INT)
8487 {
8488 error ("accumulator is not a constant integer");
8489 return NULL_RTX;
8490 }
8491 if ((INTVAL (opval) & ~ACC_MASK) != 0)
8492 {
8493 error ("accumulator number is out of bounds");
8494 return NULL_RTX;
8495 }
8496
8497 reg = gen_rtx_REG (insn_data[icode].operand[opnum].mode,
8498 ACC_FIRST + INTVAL (opval));
8499 if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
8500 SET_REGNO (reg, ACCG_FIRST + INTVAL (opval));
8501
8502 if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
8503 {
8504 error ("inappropriate accumulator for %qs", insn_data[icode].name);
8505 return NULL_RTX;
8506 }
8507 return reg;
8508 }
8509
8510 /* If an ACC rtx has mode MODE, return the mode that the matching ACCG
8511 should have. */
8512
8513 static machine_mode
frv_matching_accg_mode(machine_mode mode)8514 frv_matching_accg_mode (machine_mode mode)
8515 {
8516 switch (mode)
8517 {
8518 case E_V4SImode:
8519 return V4QImode;
8520
8521 case E_DImode:
8522 return HImode;
8523
8524 case E_SImode:
8525 return QImode;
8526
8527 default:
8528 gcc_unreachable ();
8529 }
8530 }
8531
8532 /* Given that a __builtin_read or __builtin_write function is accessing
8533 address ADDRESS, return the value that should be used as operand 1
8534 of the membar. */
8535
8536 static rtx
frv_io_address_cookie(rtx address)8537 frv_io_address_cookie (rtx address)
8538 {
8539 return (GET_CODE (address) == CONST_INT
8540 ? GEN_INT (INTVAL (address) / 8 * 8)
8541 : const0_rtx);
8542 }
8543
8544 /* Return the accumulator guard that should be paired with accumulator
8545 register ACC. The mode of the returned register is in the same
8546 class as ACC, but is four times smaller. */
8547
8548 rtx
frv_matching_accg_for_acc(rtx acc)8549 frv_matching_accg_for_acc (rtx acc)
8550 {
8551 return gen_rtx_REG (frv_matching_accg_mode (GET_MODE (acc)),
8552 REGNO (acc) - ACC_FIRST + ACCG_FIRST);
8553 }
8554
8555 /* Read the requested argument from the call EXP given by INDEX.
8556 Return the value as an rtx. */
8557
8558 static rtx
frv_read_argument(tree exp,unsigned int index)8559 frv_read_argument (tree exp, unsigned int index)
8560 {
8561 return expand_normal (CALL_EXPR_ARG (exp, index));
8562 }
8563
8564 /* Like frv_read_argument, but interpret the argument as the number
8565 of an IACC register and return a (reg:MODE ...) rtx for it. */
8566
8567 static rtx
frv_read_iacc_argument(machine_mode mode,tree call,unsigned int index)8568 frv_read_iacc_argument (machine_mode mode, tree call,
8569 unsigned int index)
8570 {
8571 int i, regno;
8572 rtx op;
8573
8574 op = frv_read_argument (call, index);
8575 if (GET_CODE (op) != CONST_INT
8576 || INTVAL (op) < 0
8577 || INTVAL (op) > IACC_LAST - IACC_FIRST
8578 || ((INTVAL (op) * 4) & (GET_MODE_SIZE (mode) - 1)) != 0)
8579 {
8580 error ("invalid IACC argument");
8581 op = const0_rtx;
8582 }
8583
8584 /* IACCs are implicit global registers. We set up this lazily to
8585 avoid creating lots of unnecessary call_insn rtl when IACCs aren't
8586 being used. */
8587 regno = INTVAL (op) + IACC_FIRST;
8588 for (i = 0; i < hard_regno_nregs (regno, mode); i++)
8589 global_regs[regno + i] = 1;
8590
8591 return gen_rtx_REG (mode, regno);
8592 }
8593
8594 /* Return true if OPVAL can be used for operand OPNUM of instruction ICODE.
8595 The instruction should require a constant operand of some sort. The
8596 function prints an error if OPVAL is not valid. */
8597
8598 static int
frv_check_constant_argument(enum insn_code icode,int opnum,rtx opval)8599 frv_check_constant_argument (enum insn_code icode, int opnum, rtx opval)
8600 {
8601 if (GET_CODE (opval) != CONST_INT)
8602 {
8603 error ("%qs expects a constant argument", insn_data[icode].name);
8604 return FALSE;
8605 }
8606 if (! (*insn_data[icode].operand[opnum].predicate) (opval, VOIDmode))
8607 {
8608 error ("constant argument out of range for %qs", insn_data[icode].name);
8609 return FALSE;
8610 }
8611 return TRUE;
8612 }
8613
8614 /* Return a legitimate rtx for instruction ICODE's return value. Use TARGET
8615 if it's not null, has the right mode, and satisfies operand 0's
8616 predicate. */
8617
8618 static rtx
frv_legitimize_target(enum insn_code icode,rtx target)8619 frv_legitimize_target (enum insn_code icode, rtx target)
8620 {
8621 machine_mode mode = insn_data[icode].operand[0].mode;
8622
8623 if (! target
8624 || GET_MODE (target) != mode
8625 || ! (*insn_data[icode].operand[0].predicate) (target, mode))
8626 return gen_reg_rtx (mode);
8627 else
8628 return target;
8629 }
8630
8631 /* Given that ARG is being passed as operand OPNUM to instruction ICODE,
8632 check whether ARG satisfies the operand's constraints. If it doesn't,
8633 copy ARG to a temporary register and return that. Otherwise return ARG
8634 itself. */
8635
8636 static rtx
frv_legitimize_argument(enum insn_code icode,int opnum,rtx arg)8637 frv_legitimize_argument (enum insn_code icode, int opnum, rtx arg)
8638 {
8639 machine_mode mode = insn_data[icode].operand[opnum].mode;
8640
8641 if ((*insn_data[icode].operand[opnum].predicate) (arg, mode))
8642 return arg;
8643 else
8644 return copy_to_mode_reg (mode, arg);
8645 }
8646
8647 /* Return a volatile memory reference of mode MODE whose address is ARG. */
8648
8649 static rtx
frv_volatile_memref(machine_mode mode,rtx arg)8650 frv_volatile_memref (machine_mode mode, rtx arg)
8651 {
8652 rtx mem;
8653
8654 mem = gen_rtx_MEM (mode, memory_address (mode, arg));
8655 MEM_VOLATILE_P (mem) = 1;
8656 return mem;
8657 }
8658
8659 /* Expand builtins that take a single, constant argument. At the moment,
8660 only MHDSETS falls into this category. */
8661
8662 static rtx
frv_expand_set_builtin(enum insn_code icode,tree call,rtx target)8663 frv_expand_set_builtin (enum insn_code icode, tree call, rtx target)
8664 {
8665 rtx pat;
8666 rtx op0 = frv_read_argument (call, 0);
8667
8668 if (! frv_check_constant_argument (icode, 1, op0))
8669 return NULL_RTX;
8670
8671 target = frv_legitimize_target (icode, target);
8672 pat = GEN_FCN (icode) (target, op0);
8673 if (! pat)
8674 return NULL_RTX;
8675
8676 emit_insn (pat);
8677 return target;
8678 }
8679
8680 /* Expand builtins that take one operand. */
8681
8682 static rtx
frv_expand_unop_builtin(enum insn_code icode,tree call,rtx target)8683 frv_expand_unop_builtin (enum insn_code icode, tree call, rtx target)
8684 {
8685 rtx pat;
8686 rtx op0 = frv_read_argument (call, 0);
8687
8688 target = frv_legitimize_target (icode, target);
8689 op0 = frv_legitimize_argument (icode, 1, op0);
8690 pat = GEN_FCN (icode) (target, op0);
8691 if (! pat)
8692 return NULL_RTX;
8693
8694 emit_insn (pat);
8695 return target;
8696 }
8697
8698 /* Expand builtins that take two operands. */
8699
8700 static rtx
frv_expand_binop_builtin(enum insn_code icode,tree call,rtx target)8701 frv_expand_binop_builtin (enum insn_code icode, tree call, rtx target)
8702 {
8703 rtx pat;
8704 rtx op0 = frv_read_argument (call, 0);
8705 rtx op1 = frv_read_argument (call, 1);
8706
8707 target = frv_legitimize_target (icode, target);
8708 op0 = frv_legitimize_argument (icode, 1, op0);
8709 op1 = frv_legitimize_argument (icode, 2, op1);
8710 pat = GEN_FCN (icode) (target, op0, op1);
8711 if (! pat)
8712 return NULL_RTX;
8713
8714 emit_insn (pat);
8715 return target;
8716 }
8717
8718 /* Expand cut-style builtins, which take two operands and an implicit ACCG
8719 one. */
8720
8721 static rtx
frv_expand_cut_builtin(enum insn_code icode,tree call,rtx target)8722 frv_expand_cut_builtin (enum insn_code icode, tree call, rtx target)
8723 {
8724 rtx pat;
8725 rtx op0 = frv_read_argument (call, 0);
8726 rtx op1 = frv_read_argument (call, 1);
8727 rtx op2;
8728
8729 target = frv_legitimize_target (icode, target);
8730 op0 = frv_int_to_acc (icode, 1, op0);
8731 if (! op0)
8732 return NULL_RTX;
8733
8734 if (icode == CODE_FOR_mdcutssi || GET_CODE (op1) == CONST_INT)
8735 {
8736 if (! frv_check_constant_argument (icode, 2, op1))
8737 return NULL_RTX;
8738 }
8739 else
8740 op1 = frv_legitimize_argument (icode, 2, op1);
8741
8742 op2 = frv_matching_accg_for_acc (op0);
8743 pat = GEN_FCN (icode) (target, op0, op1, op2);
8744 if (! pat)
8745 return NULL_RTX;
8746
8747 emit_insn (pat);
8748 return target;
8749 }
8750
8751 /* Expand builtins that take two operands and the second is immediate. */
8752
8753 static rtx
frv_expand_binopimm_builtin(enum insn_code icode,tree call,rtx target)8754 frv_expand_binopimm_builtin (enum insn_code icode, tree call, rtx target)
8755 {
8756 rtx pat;
8757 rtx op0 = frv_read_argument (call, 0);
8758 rtx op1 = frv_read_argument (call, 1);
8759
8760 if (! frv_check_constant_argument (icode, 2, op1))
8761 return NULL_RTX;
8762
8763 target = frv_legitimize_target (icode, target);
8764 op0 = frv_legitimize_argument (icode, 1, op0);
8765 pat = GEN_FCN (icode) (target, op0, op1);
8766 if (! pat)
8767 return NULL_RTX;
8768
8769 emit_insn (pat);
8770 return target;
8771 }
8772
8773 /* Expand builtins that take two operands, the first operand being a pointer to
8774 ints and return void. */
8775
8776 static rtx
frv_expand_voidbinop_builtin(enum insn_code icode,tree call)8777 frv_expand_voidbinop_builtin (enum insn_code icode, tree call)
8778 {
8779 rtx pat;
8780 rtx op0 = frv_read_argument (call, 0);
8781 rtx op1 = frv_read_argument (call, 1);
8782 machine_mode mode0 = insn_data[icode].operand[0].mode;
8783 rtx addr;
8784
8785 if (GET_CODE (op0) != MEM)
8786 {
8787 rtx reg = op0;
8788
8789 if (! offsettable_address_p (0, mode0, op0))
8790 {
8791 reg = gen_reg_rtx (Pmode);
8792 emit_insn (gen_rtx_SET (reg, op0));
8793 }
8794
8795 op0 = gen_rtx_MEM (SImode, reg);
8796 }
8797
8798 addr = XEXP (op0, 0);
8799 if (! offsettable_address_p (0, mode0, addr))
8800 addr = copy_to_mode_reg (Pmode, op0);
8801
8802 op0 = change_address (op0, V4SImode, addr);
8803 op1 = frv_legitimize_argument (icode, 1, op1);
8804 pat = GEN_FCN (icode) (op0, op1);
8805 if (! pat)
8806 return 0;
8807
8808 emit_insn (pat);
8809 return 0;
8810 }
8811
8812 /* Expand builtins that take two long operands and return void. */
8813
8814 static rtx
frv_expand_int_void2arg(enum insn_code icode,tree call)8815 frv_expand_int_void2arg (enum insn_code icode, tree call)
8816 {
8817 rtx pat;
8818 rtx op0 = frv_read_argument (call, 0);
8819 rtx op1 = frv_read_argument (call, 1);
8820
8821 op0 = frv_legitimize_argument (icode, 1, op0);
8822 op1 = frv_legitimize_argument (icode, 1, op1);
8823 pat = GEN_FCN (icode) (op0, op1);
8824 if (! pat)
8825 return NULL_RTX;
8826
8827 emit_insn (pat);
8828 return NULL_RTX;
8829 }
8830
8831 /* Expand prefetch builtins. These take a single address as argument. */
8832
8833 static rtx
frv_expand_prefetches(enum insn_code icode,tree call)8834 frv_expand_prefetches (enum insn_code icode, tree call)
8835 {
8836 rtx pat;
8837 rtx op0 = frv_read_argument (call, 0);
8838
8839 pat = GEN_FCN (icode) (force_reg (Pmode, op0));
8840 if (! pat)
8841 return 0;
8842
8843 emit_insn (pat);
8844 return 0;
8845 }
8846
8847 /* Expand builtins that take three operands and return void. The first
8848 argument must be a constant that describes a pair or quad accumulators. A
8849 fourth argument is created that is the accumulator guard register that
8850 corresponds to the accumulator. */
8851
8852 static rtx
frv_expand_voidtriop_builtin(enum insn_code icode,tree call)8853 frv_expand_voidtriop_builtin (enum insn_code icode, tree call)
8854 {
8855 rtx pat;
8856 rtx op0 = frv_read_argument (call, 0);
8857 rtx op1 = frv_read_argument (call, 1);
8858 rtx op2 = frv_read_argument (call, 2);
8859 rtx op3;
8860
8861 op0 = frv_int_to_acc (icode, 0, op0);
8862 if (! op0)
8863 return NULL_RTX;
8864
8865 op1 = frv_legitimize_argument (icode, 1, op1);
8866 op2 = frv_legitimize_argument (icode, 2, op2);
8867 op3 = frv_matching_accg_for_acc (op0);
8868 pat = GEN_FCN (icode) (op0, op1, op2, op3);
8869 if (! pat)
8870 return NULL_RTX;
8871
8872 emit_insn (pat);
8873 return NULL_RTX;
8874 }
8875
8876 /* Expand builtins that perform accumulator-to-accumulator operations.
8877 These builtins take two accumulator numbers as argument and return
8878 void. */
8879
8880 static rtx
frv_expand_voidaccop_builtin(enum insn_code icode,tree call)8881 frv_expand_voidaccop_builtin (enum insn_code icode, tree call)
8882 {
8883 rtx pat;
8884 rtx op0 = frv_read_argument (call, 0);
8885 rtx op1 = frv_read_argument (call, 1);
8886 rtx op2;
8887 rtx op3;
8888
8889 op0 = frv_int_to_acc (icode, 0, op0);
8890 if (! op0)
8891 return NULL_RTX;
8892
8893 op1 = frv_int_to_acc (icode, 1, op1);
8894 if (! op1)
8895 return NULL_RTX;
8896
8897 op2 = frv_matching_accg_for_acc (op0);
8898 op3 = frv_matching_accg_for_acc (op1);
8899 pat = GEN_FCN (icode) (op0, op1, op2, op3);
8900 if (! pat)
8901 return NULL_RTX;
8902
8903 emit_insn (pat);
8904 return NULL_RTX;
8905 }
8906
8907 /* Expand a __builtin_read* function. ICODE is the instruction code for the
8908 membar and TARGET_MODE is the mode that the loaded value should have. */
8909
8910 static rtx
frv_expand_load_builtin(enum insn_code icode,machine_mode target_mode,tree call,rtx target)8911 frv_expand_load_builtin (enum insn_code icode, machine_mode target_mode,
8912 tree call, rtx target)
8913 {
8914 rtx op0 = frv_read_argument (call, 0);
8915 rtx cookie = frv_io_address_cookie (op0);
8916
8917 if (target == 0 || !REG_P (target))
8918 target = gen_reg_rtx (target_mode);
8919 op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0);
8920 convert_move (target, op0, 1);
8921 emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_READ)));
8922 cfun->machine->has_membar_p = 1;
8923 return target;
8924 }
8925
8926 /* Likewise __builtin_write* functions. */
8927
8928 static rtx
frv_expand_store_builtin(enum insn_code icode,tree call)8929 frv_expand_store_builtin (enum insn_code icode, tree call)
8930 {
8931 rtx op0 = frv_read_argument (call, 0);
8932 rtx op1 = frv_read_argument (call, 1);
8933 rtx cookie = frv_io_address_cookie (op0);
8934
8935 op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0);
8936 convert_move (op0, force_reg (insn_data[icode].operand[0].mode, op1), 1);
8937 emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_WRITE)));
8938 cfun->machine->has_membar_p = 1;
8939 return NULL_RTX;
8940 }
8941
8942 /* Expand the MDPACKH builtin. It takes four unsigned short arguments and
8943 each argument forms one word of the two double-word input registers.
8944 CALL is the tree for the call and TARGET, if nonnull, suggests a good place
8945 to put the return value. */
8946
8947 static rtx
frv_expand_mdpackh_builtin(tree call,rtx target)8948 frv_expand_mdpackh_builtin (tree call, rtx target)
8949 {
8950 enum insn_code icode = CODE_FOR_mdpackh;
8951 rtx pat, op0, op1;
8952 rtx arg1 = frv_read_argument (call, 0);
8953 rtx arg2 = frv_read_argument (call, 1);
8954 rtx arg3 = frv_read_argument (call, 2);
8955 rtx arg4 = frv_read_argument (call, 3);
8956
8957 target = frv_legitimize_target (icode, target);
8958 op0 = gen_reg_rtx (DImode);
8959 op1 = gen_reg_rtx (DImode);
8960
8961 /* The high half of each word is not explicitly initialized, so indicate
8962 that the input operands are not live before this point. */
8963 emit_clobber (op0);
8964 emit_clobber (op1);
8965
8966 /* Move each argument into the low half of its associated input word. */
8967 emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 2), arg1);
8968 emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 6), arg2);
8969 emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 2), arg3);
8970 emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 6), arg4);
8971
8972 pat = GEN_FCN (icode) (target, op0, op1);
8973 if (! pat)
8974 return NULL_RTX;
8975
8976 emit_insn (pat);
8977 return target;
8978 }
8979
8980 /* Expand the MCLRACC builtin. This builtin takes a single accumulator
8981 number as argument. */
8982
8983 static rtx
frv_expand_mclracc_builtin(tree call)8984 frv_expand_mclracc_builtin (tree call)
8985 {
8986 enum insn_code icode = CODE_FOR_mclracc;
8987 rtx pat;
8988 rtx op0 = frv_read_argument (call, 0);
8989
8990 op0 = frv_int_to_acc (icode, 0, op0);
8991 if (! op0)
8992 return NULL_RTX;
8993
8994 pat = GEN_FCN (icode) (op0);
8995 if (pat)
8996 emit_insn (pat);
8997
8998 return NULL_RTX;
8999 }
9000
9001 /* Expand builtins that take no arguments. */
9002
9003 static rtx
frv_expand_noargs_builtin(enum insn_code icode)9004 frv_expand_noargs_builtin (enum insn_code icode)
9005 {
9006 rtx pat = GEN_FCN (icode) (const0_rtx);
9007 if (pat)
9008 emit_insn (pat);
9009
9010 return NULL_RTX;
9011 }
9012
9013 /* Expand MRDACC and MRDACCG. These builtins take a single accumulator
9014 number or accumulator guard number as argument and return an SI integer. */
9015
9016 static rtx
frv_expand_mrdacc_builtin(enum insn_code icode,tree call)9017 frv_expand_mrdacc_builtin (enum insn_code icode, tree call)
9018 {
9019 rtx pat;
9020 rtx target = gen_reg_rtx (SImode);
9021 rtx op0 = frv_read_argument (call, 0);
9022
9023 op0 = frv_int_to_acc (icode, 1, op0);
9024 if (! op0)
9025 return NULL_RTX;
9026
9027 pat = GEN_FCN (icode) (target, op0);
9028 if (! pat)
9029 return NULL_RTX;
9030
9031 emit_insn (pat);
9032 return target;
9033 }
9034
9035 /* Expand MWTACC and MWTACCG. These builtins take an accumulator or
9036 accumulator guard as their first argument and an SImode value as their
9037 second. */
9038
9039 static rtx
frv_expand_mwtacc_builtin(enum insn_code icode,tree call)9040 frv_expand_mwtacc_builtin (enum insn_code icode, tree call)
9041 {
9042 rtx pat;
9043 rtx op0 = frv_read_argument (call, 0);
9044 rtx op1 = frv_read_argument (call, 1);
9045
9046 op0 = frv_int_to_acc (icode, 0, op0);
9047 if (! op0)
9048 return NULL_RTX;
9049
9050 op1 = frv_legitimize_argument (icode, 1, op1);
9051 pat = GEN_FCN (icode) (op0, op1);
9052 if (pat)
9053 emit_insn (pat);
9054
9055 return NULL_RTX;
9056 }
9057
9058 /* Emit a move from SRC to DEST in SImode chunks. This can be used
9059 to move DImode values into and out of IACC0. */
9060
9061 static void
frv_split_iacc_move(rtx dest,rtx src)9062 frv_split_iacc_move (rtx dest, rtx src)
9063 {
9064 machine_mode inner;
9065 int i;
9066
9067 inner = GET_MODE (dest);
9068 for (i = 0; i < GET_MODE_SIZE (inner); i += GET_MODE_SIZE (SImode))
9069 emit_move_insn (simplify_gen_subreg (SImode, dest, inner, i),
9070 simplify_gen_subreg (SImode, src, inner, i));
9071 }
9072
9073 /* Expand builtins. */
9074
9075 static rtx
frv_expand_builtin(tree exp,rtx target,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)9076 frv_expand_builtin (tree exp,
9077 rtx target,
9078 rtx subtarget ATTRIBUTE_UNUSED,
9079 machine_mode mode ATTRIBUTE_UNUSED,
9080 int ignore ATTRIBUTE_UNUSED)
9081 {
9082 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
9083 unsigned fcode = DECL_MD_FUNCTION_CODE (fndecl);
9084 unsigned i;
9085 struct builtin_description *d;
9086
9087 if (fcode < FRV_BUILTIN_FIRST_NONMEDIA && !TARGET_MEDIA)
9088 {
9089 error ("media functions are not available unless %<-mmedia%> is used");
9090 return NULL_RTX;
9091 }
9092
9093 switch (fcode)
9094 {
9095 case FRV_BUILTIN_MCOP1:
9096 case FRV_BUILTIN_MCOP2:
9097 case FRV_BUILTIN_MDUNPACKH:
9098 case FRV_BUILTIN_MBTOHE:
9099 if (! TARGET_MEDIA_REV1)
9100 {
9101 error ("this media function is only available on the fr500");
9102 return NULL_RTX;
9103 }
9104 break;
9105
9106 case FRV_BUILTIN_MQXMACHS:
9107 case FRV_BUILTIN_MQXMACXHS:
9108 case FRV_BUILTIN_MQMACXHS:
9109 case FRV_BUILTIN_MADDACCS:
9110 case FRV_BUILTIN_MSUBACCS:
9111 case FRV_BUILTIN_MASACCS:
9112 case FRV_BUILTIN_MDADDACCS:
9113 case FRV_BUILTIN_MDSUBACCS:
9114 case FRV_BUILTIN_MDASACCS:
9115 case FRV_BUILTIN_MABSHS:
9116 case FRV_BUILTIN_MDROTLI:
9117 case FRV_BUILTIN_MCPLHI:
9118 case FRV_BUILTIN_MCPLI:
9119 case FRV_BUILTIN_MDCUTSSI:
9120 case FRV_BUILTIN_MQSATHS:
9121 case FRV_BUILTIN_MHSETLOS:
9122 case FRV_BUILTIN_MHSETLOH:
9123 case FRV_BUILTIN_MHSETHIS:
9124 case FRV_BUILTIN_MHSETHIH:
9125 case FRV_BUILTIN_MHDSETS:
9126 case FRV_BUILTIN_MHDSETH:
9127 if (! TARGET_MEDIA_REV2)
9128 {
9129 error ("this media function is only available on the fr400"
9130 " and fr550");
9131 return NULL_RTX;
9132 }
9133 break;
9134
9135 case FRV_BUILTIN_SMASS:
9136 case FRV_BUILTIN_SMSSS:
9137 case FRV_BUILTIN_SMU:
9138 case FRV_BUILTIN_ADDSS:
9139 case FRV_BUILTIN_SUBSS:
9140 case FRV_BUILTIN_SLASS:
9141 case FRV_BUILTIN_SCUTSS:
9142 case FRV_BUILTIN_IACCreadll:
9143 case FRV_BUILTIN_IACCreadl:
9144 case FRV_BUILTIN_IACCsetll:
9145 case FRV_BUILTIN_IACCsetl:
9146 if (!TARGET_FR405_BUILTINS)
9147 {
9148 error ("this builtin function is only available"
9149 " on the fr405 and fr450");
9150 return NULL_RTX;
9151 }
9152 break;
9153
9154 case FRV_BUILTIN_PREFETCH:
9155 if (!TARGET_FR500_FR550_BUILTINS)
9156 {
9157 error ("this builtin function is only available on the fr500"
9158 " and fr550");
9159 return NULL_RTX;
9160 }
9161 break;
9162
9163 case FRV_BUILTIN_MQLCLRHS:
9164 case FRV_BUILTIN_MQLMTHS:
9165 case FRV_BUILTIN_MQSLLHI:
9166 case FRV_BUILTIN_MQSRAHI:
9167 if (!TARGET_MEDIA_FR450)
9168 {
9169 error ("this builtin function is only available on the fr450");
9170 return NULL_RTX;
9171 }
9172 break;
9173
9174 default:
9175 break;
9176 }
9177
9178 /* Expand unique builtins. */
9179
9180 switch (fcode)
9181 {
9182 case FRV_BUILTIN_MTRAP:
9183 return frv_expand_noargs_builtin (CODE_FOR_mtrap);
9184
9185 case FRV_BUILTIN_MCLRACC:
9186 return frv_expand_mclracc_builtin (exp);
9187
9188 case FRV_BUILTIN_MCLRACCA:
9189 if (TARGET_ACC_8)
9190 return frv_expand_noargs_builtin (CODE_FOR_mclracca8);
9191 else
9192 return frv_expand_noargs_builtin (CODE_FOR_mclracca4);
9193
9194 case FRV_BUILTIN_MRDACC:
9195 return frv_expand_mrdacc_builtin (CODE_FOR_mrdacc, exp);
9196
9197 case FRV_BUILTIN_MRDACCG:
9198 return frv_expand_mrdacc_builtin (CODE_FOR_mrdaccg, exp);
9199
9200 case FRV_BUILTIN_MWTACC:
9201 return frv_expand_mwtacc_builtin (CODE_FOR_mwtacc, exp);
9202
9203 case FRV_BUILTIN_MWTACCG:
9204 return frv_expand_mwtacc_builtin (CODE_FOR_mwtaccg, exp);
9205
9206 case FRV_BUILTIN_MDPACKH:
9207 return frv_expand_mdpackh_builtin (exp, target);
9208
9209 case FRV_BUILTIN_IACCreadll:
9210 {
9211 rtx src = frv_read_iacc_argument (DImode, exp, 0);
9212 if (target == 0 || !REG_P (target))
9213 target = gen_reg_rtx (DImode);
9214 frv_split_iacc_move (target, src);
9215 return target;
9216 }
9217
9218 case FRV_BUILTIN_IACCreadl:
9219 return frv_read_iacc_argument (SImode, exp, 0);
9220
9221 case FRV_BUILTIN_IACCsetll:
9222 {
9223 rtx dest = frv_read_iacc_argument (DImode, exp, 0);
9224 rtx src = frv_read_argument (exp, 1);
9225 frv_split_iacc_move (dest, force_reg (DImode, src));
9226 return 0;
9227 }
9228
9229 case FRV_BUILTIN_IACCsetl:
9230 {
9231 rtx dest = frv_read_iacc_argument (SImode, exp, 0);
9232 rtx src = frv_read_argument (exp, 1);
9233 emit_move_insn (dest, force_reg (SImode, src));
9234 return 0;
9235 }
9236
9237 default:
9238 break;
9239 }
9240
9241 /* Expand groups of builtins. */
9242
9243 for (i = 0, d = bdesc_set; i < ARRAY_SIZE (bdesc_set); i++, d++)
9244 if (d->code == fcode)
9245 return frv_expand_set_builtin (d->icode, exp, target);
9246
9247 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9248 if (d->code == fcode)
9249 return frv_expand_unop_builtin (d->icode, exp, target);
9250
9251 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9252 if (d->code == fcode)
9253 return frv_expand_binop_builtin (d->icode, exp, target);
9254
9255 for (i = 0, d = bdesc_cut; i < ARRAY_SIZE (bdesc_cut); i++, d++)
9256 if (d->code == fcode)
9257 return frv_expand_cut_builtin (d->icode, exp, target);
9258
9259 for (i = 0, d = bdesc_2argimm; i < ARRAY_SIZE (bdesc_2argimm); i++, d++)
9260 if (d->code == fcode)
9261 return frv_expand_binopimm_builtin (d->icode, exp, target);
9262
9263 for (i = 0, d = bdesc_void2arg; i < ARRAY_SIZE (bdesc_void2arg); i++, d++)
9264 if (d->code == fcode)
9265 return frv_expand_voidbinop_builtin (d->icode, exp);
9266
9267 for (i = 0, d = bdesc_void3arg; i < ARRAY_SIZE (bdesc_void3arg); i++, d++)
9268 if (d->code == fcode)
9269 return frv_expand_voidtriop_builtin (d->icode, exp);
9270
9271 for (i = 0, d = bdesc_voidacc; i < ARRAY_SIZE (bdesc_voidacc); i++, d++)
9272 if (d->code == fcode)
9273 return frv_expand_voidaccop_builtin (d->icode, exp);
9274
9275 for (i = 0, d = bdesc_int_void2arg;
9276 i < ARRAY_SIZE (bdesc_int_void2arg); i++, d++)
9277 if (d->code == fcode)
9278 return frv_expand_int_void2arg (d->icode, exp);
9279
9280 for (i = 0, d = bdesc_prefetches;
9281 i < ARRAY_SIZE (bdesc_prefetches); i++, d++)
9282 if (d->code == fcode)
9283 return frv_expand_prefetches (d->icode, exp);
9284
9285 for (i = 0, d = bdesc_loads; i < ARRAY_SIZE (bdesc_loads); i++, d++)
9286 if (d->code == fcode)
9287 return frv_expand_load_builtin (d->icode, TYPE_MODE (TREE_TYPE (exp)),
9288 exp, target);
9289
9290 for (i = 0, d = bdesc_stores; i < ARRAY_SIZE (bdesc_stores); i++, d++)
9291 if (d->code == fcode)
9292 return frv_expand_store_builtin (d->icode, exp);
9293
9294 return 0;
9295 }
9296
9297 static bool
frv_in_small_data_p(const_tree decl)9298 frv_in_small_data_p (const_tree decl)
9299 {
9300 HOST_WIDE_INT size;
9301 const char *section_name;
9302
9303 /* Don't apply the -G flag to internal compiler structures. We
9304 should leave such structures in the main data section, partly
9305 for efficiency and partly because the size of some of them
9306 (such as C++ typeinfos) is not known until later. */
9307 if (TREE_CODE (decl) != VAR_DECL || DECL_ARTIFICIAL (decl))
9308 return false;
9309
9310 /* If we already know which section the decl should be in, see if
9311 it's a small data section. */
9312 section_name = DECL_SECTION_NAME (decl);
9313 if (section_name)
9314 {
9315 if (frv_string_begins_with (section_name, ".sdata"))
9316 return true;
9317 if (frv_string_begins_with (section_name, ".sbss"))
9318 return true;
9319 return false;
9320 }
9321
9322 size = int_size_in_bytes (TREE_TYPE (decl));
9323 if (size > 0 && size <= g_switch_value)
9324 return true;
9325
9326 return false;
9327 }
9328
9329 static bool
frv_rtx_costs(rtx x,machine_mode mode,int outer_code,int opno ATTRIBUTE_UNUSED,int * total,bool speed ATTRIBUTE_UNUSED)9330 frv_rtx_costs (rtx x,
9331 machine_mode mode,
9332 int outer_code,
9333 int opno ATTRIBUTE_UNUSED,
9334 int *total,
9335 bool speed ATTRIBUTE_UNUSED)
9336 {
9337 int code = GET_CODE (x);
9338
9339 if (outer_code == MEM)
9340 {
9341 /* Don't differentiate between memory addresses. All the ones
9342 we accept have equal cost. */
9343 *total = COSTS_N_INSNS (0);
9344 return true;
9345 }
9346
9347 switch (code)
9348 {
9349 case CONST_INT:
9350 /* Make 12-bit integers really cheap. */
9351 if (IN_RANGE (INTVAL (x), -2048, 2047))
9352 {
9353 *total = 0;
9354 return true;
9355 }
9356 /* Fall through. */
9357
9358 case CONST:
9359 case LABEL_REF:
9360 case SYMBOL_REF:
9361 case CONST_DOUBLE:
9362 *total = COSTS_N_INSNS (2);
9363 return true;
9364
9365 case PLUS:
9366 case MINUS:
9367 case AND:
9368 case IOR:
9369 case XOR:
9370 case ASHIFT:
9371 case ASHIFTRT:
9372 case LSHIFTRT:
9373 case NOT:
9374 case NEG:
9375 case COMPARE:
9376 if (mode == SImode)
9377 *total = COSTS_N_INSNS (1);
9378 else if (mode == DImode)
9379 *total = COSTS_N_INSNS (2);
9380 else
9381 *total = COSTS_N_INSNS (3);
9382 return true;
9383
9384 case MULT:
9385 if (mode == SImode)
9386 *total = COSTS_N_INSNS (2);
9387 else
9388 *total = COSTS_N_INSNS (6); /* guess */
9389 return true;
9390
9391 case DIV:
9392 case UDIV:
9393 case MOD:
9394 case UMOD:
9395 *total = COSTS_N_INSNS (18);
9396 return true;
9397
9398 case MEM:
9399 *total = COSTS_N_INSNS (3);
9400 return true;
9401
9402 default:
9403 return false;
9404 }
9405 }
9406
9407 static void
frv_asm_out_constructor(rtx symbol,int priority ATTRIBUTE_UNUSED)9408 frv_asm_out_constructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
9409 {
9410 switch_to_section (ctors_section);
9411 assemble_align (POINTER_SIZE);
9412 if (TARGET_FDPIC)
9413 {
9414 int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1);
9415
9416 gcc_assert (ok);
9417 return;
9418 }
9419 assemble_integer_with_op ("\t.picptr\t", symbol);
9420 }
9421
9422 static void
frv_asm_out_destructor(rtx symbol,int priority ATTRIBUTE_UNUSED)9423 frv_asm_out_destructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
9424 {
9425 switch_to_section (dtors_section);
9426 assemble_align (POINTER_SIZE);
9427 if (TARGET_FDPIC)
9428 {
9429 int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1);
9430
9431 gcc_assert (ok);
9432 return;
9433 }
9434 assemble_integer_with_op ("\t.picptr\t", symbol);
9435 }
9436
9437 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
9438
9439 static rtx
frv_struct_value_rtx(tree fntype ATTRIBUTE_UNUSED,int incoming ATTRIBUTE_UNUSED)9440 frv_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
9441 int incoming ATTRIBUTE_UNUSED)
9442 {
9443 return gen_rtx_REG (Pmode, FRV_STRUCT_VALUE_REGNUM);
9444 }
9445
9446 #define TLS_BIAS (2048 - 16)
9447
9448 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
9449 We need to emit DTP-relative relocations. */
9450
9451 static void
frv_output_dwarf_dtprel(FILE * file,int size,rtx x)9452 frv_output_dwarf_dtprel (FILE *file, int size, rtx x)
9453 {
9454 gcc_assert (size == 4);
9455 fputs ("\t.picptr\ttlsmoff(", file);
9456 /* We want the unbiased TLS offset, so add the bias to the
9457 expression, such that the implicit biasing cancels out. */
9458 output_addr_const (file, plus_constant (Pmode, x, TLS_BIAS));
9459 fputs (")", file);
9460 }
9461
9462 #include "gt-frv.h"
9463