xref: /openbsd/gnu/gcc/gcc/config/arc/arc.c (revision 404b540a)
1*404b540aSrobert /* Subroutines used for code generation on the Argonaut ARC cpu.
2*404b540aSrobert    Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3*404b540aSrobert    2005
4*404b540aSrobert    Free Software Foundation, Inc.
5*404b540aSrobert 
6*404b540aSrobert This file is part of GCC.
7*404b540aSrobert 
8*404b540aSrobert GCC is free software; you can redistribute it and/or modify
9*404b540aSrobert it under the terms of the GNU General Public License as published by
10*404b540aSrobert the Free Software Foundation; either version 2, or (at your option)
11*404b540aSrobert any later version.
12*404b540aSrobert 
13*404b540aSrobert GCC is distributed in the hope that it will be useful,
14*404b540aSrobert but WITHOUT ANY WARRANTY; without even the implied warranty of
15*404b540aSrobert MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16*404b540aSrobert GNU General Public License for more details.
17*404b540aSrobert 
18*404b540aSrobert You should have received a copy of the GNU General Public License
19*404b540aSrobert along with GCC; see the file COPYING.  If not, write to
20*404b540aSrobert the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21*404b540aSrobert Boston, MA 02110-1301, USA.  */
22*404b540aSrobert 
23*404b540aSrobert /* ??? This is an old port, and is undoubtedly suffering from bit rot.  */
24*404b540aSrobert 
25*404b540aSrobert #include "config.h"
26*404b540aSrobert #include "system.h"
27*404b540aSrobert #include "coretypes.h"
28*404b540aSrobert #include "tm.h"
29*404b540aSrobert #include "tree.h"
30*404b540aSrobert #include "rtl.h"
31*404b540aSrobert #include "regs.h"
32*404b540aSrobert #include "hard-reg-set.h"
33*404b540aSrobert #include "real.h"
34*404b540aSrobert #include "insn-config.h"
35*404b540aSrobert #include "conditions.h"
36*404b540aSrobert #include "output.h"
37*404b540aSrobert #include "insn-attr.h"
38*404b540aSrobert #include "flags.h"
39*404b540aSrobert #include "function.h"
40*404b540aSrobert #include "expr.h"
41*404b540aSrobert #include "recog.h"
42*404b540aSrobert #include "toplev.h"
43*404b540aSrobert #include "tm_p.h"
44*404b540aSrobert #include "target.h"
45*404b540aSrobert #include "target-def.h"
46*404b540aSrobert 
47*404b540aSrobert /* Which cpu we're compiling for.  */
48*404b540aSrobert int arc_cpu_type;
49*404b540aSrobert 
50*404b540aSrobert /* Name of mangle string to add to symbols to separate code compiled for each
51*404b540aSrobert    cpu (or NULL).  */
52*404b540aSrobert const char *arc_mangle_cpu;
53*404b540aSrobert 
54*404b540aSrobert /* Save the operands last given to a compare for use when we
55*404b540aSrobert    generate a scc or bcc insn.  */
56*404b540aSrobert rtx arc_compare_op0, arc_compare_op1;
57*404b540aSrobert 
58*404b540aSrobert /* Name of text, data, and rodata sections used in varasm.c.  */
59*404b540aSrobert const char *arc_text_section;
60*404b540aSrobert const char *arc_data_section;
61*404b540aSrobert const char *arc_rodata_section;
62*404b540aSrobert 
63*404b540aSrobert /* Array of valid operand punctuation characters.  */
64*404b540aSrobert char arc_punct_chars[256];
65*404b540aSrobert 
66*404b540aSrobert /* Variables used by arc_final_prescan_insn to implement conditional
67*404b540aSrobert    execution.  */
68*404b540aSrobert static int arc_ccfsm_state;
69*404b540aSrobert static int arc_ccfsm_current_cc;
70*404b540aSrobert static rtx arc_ccfsm_target_insn;
71*404b540aSrobert static int arc_ccfsm_target_label;
72*404b540aSrobert 
73*404b540aSrobert /* The maximum number of insns skipped which will be conditionalised if
74*404b540aSrobert    possible.  */
75*404b540aSrobert #define MAX_INSNS_SKIPPED 3
76*404b540aSrobert 
77*404b540aSrobert /* A nop is needed between a 4 byte insn that sets the condition codes and
78*404b540aSrobert    a branch that uses them (the same isn't true for an 8 byte insn that sets
79*404b540aSrobert    the condition codes).  Set by arc_final_prescan_insn.  Used by
80*404b540aSrobert    arc_print_operand.  */
81*404b540aSrobert static int last_insn_set_cc_p;
82*404b540aSrobert static int current_insn_set_cc_p;
83*404b540aSrobert static bool arc_handle_option (size_t, const char *, int);
84*404b540aSrobert static void record_cc_ref (rtx);
85*404b540aSrobert static void arc_init_reg_tables (void);
86*404b540aSrobert static int get_arc_condition_code (rtx);
87*404b540aSrobert const struct attribute_spec arc_attribute_table[];
88*404b540aSrobert static tree arc_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
89*404b540aSrobert static bool arc_assemble_integer (rtx, unsigned int, int);
90*404b540aSrobert static void arc_output_function_prologue (FILE *, HOST_WIDE_INT);
91*404b540aSrobert static void arc_output_function_epilogue (FILE *, HOST_WIDE_INT);
92*404b540aSrobert static void arc_file_start (void);
93*404b540aSrobert static void arc_internal_label (FILE *, const char *, unsigned long);
94*404b540aSrobert static void arc_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
95*404b540aSrobert 					tree, int *, int);
96*404b540aSrobert static bool arc_rtx_costs (rtx, int, int, int *);
97*404b540aSrobert static int arc_address_cost (rtx);
98*404b540aSrobert static void arc_external_libcall (rtx);
99*404b540aSrobert static bool arc_return_in_memory (tree, tree);
100*404b540aSrobert static bool arc_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
101*404b540aSrobert 				   tree, bool);
102*404b540aSrobert 
103*404b540aSrobert /* Initialize the GCC target structure.  */
104*404b540aSrobert #undef TARGET_ASM_ALIGNED_HI_OP
105*404b540aSrobert #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
106*404b540aSrobert #undef TARGET_ASM_ALIGNED_SI_OP
107*404b540aSrobert #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
108*404b540aSrobert #undef TARGET_ASM_INTEGER
109*404b540aSrobert #define TARGET_ASM_INTEGER arc_assemble_integer
110*404b540aSrobert 
111*404b540aSrobert #undef TARGET_ASM_FUNCTION_PROLOGUE
112*404b540aSrobert #define TARGET_ASM_FUNCTION_PROLOGUE arc_output_function_prologue
113*404b540aSrobert #undef TARGET_ASM_FUNCTION_EPILOGUE
114*404b540aSrobert #define TARGET_ASM_FUNCTION_EPILOGUE arc_output_function_epilogue
115*404b540aSrobert #undef TARGET_ASM_FILE_START
116*404b540aSrobert #define TARGET_ASM_FILE_START arc_file_start
117*404b540aSrobert #undef TARGET_ATTRIBUTE_TABLE
118*404b540aSrobert #define TARGET_ATTRIBUTE_TABLE arc_attribute_table
119*404b540aSrobert #undef TARGET_ASM_INTERNAL_LABEL
120*404b540aSrobert #define TARGET_ASM_INTERNAL_LABEL arc_internal_label
121*404b540aSrobert #undef TARGET_ASM_EXTERNAL_LIBCALL
122*404b540aSrobert #define TARGET_ASM_EXTERNAL_LIBCALL arc_external_libcall
123*404b540aSrobert 
124*404b540aSrobert #undef TARGET_HANDLE_OPTION
125*404b540aSrobert #define TARGET_HANDLE_OPTION arc_handle_option
126*404b540aSrobert 
127*404b540aSrobert #undef TARGET_RTX_COSTS
128*404b540aSrobert #define TARGET_RTX_COSTS arc_rtx_costs
129*404b540aSrobert #undef TARGET_ADDRESS_COST
130*404b540aSrobert #define TARGET_ADDRESS_COST arc_address_cost
131*404b540aSrobert 
132*404b540aSrobert #undef TARGET_PROMOTE_FUNCTION_ARGS
133*404b540aSrobert #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
134*404b540aSrobert #undef TARGET_PROMOTE_FUNCTION_RETURN
135*404b540aSrobert #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
136*404b540aSrobert #undef TARGET_PROMOTE_PROTOTYPES
137*404b540aSrobert #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
138*404b540aSrobert 
139*404b540aSrobert #undef TARGET_RETURN_IN_MEMORY
140*404b540aSrobert #define TARGET_RETURN_IN_MEMORY arc_return_in_memory
141*404b540aSrobert #undef TARGET_PASS_BY_REFERENCE
142*404b540aSrobert #define TARGET_PASS_BY_REFERENCE arc_pass_by_reference
143*404b540aSrobert #undef TARGET_CALLEE_COPIES
144*404b540aSrobert #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
145*404b540aSrobert 
146*404b540aSrobert #undef TARGET_SETUP_INCOMING_VARARGS
147*404b540aSrobert #define TARGET_SETUP_INCOMING_VARARGS arc_setup_incoming_varargs
148*404b540aSrobert 
149*404b540aSrobert struct gcc_target targetm = TARGET_INITIALIZER;
150*404b540aSrobert 
151*404b540aSrobert /* Implement TARGET_HANDLE_OPTION.  */
152*404b540aSrobert 
153*404b540aSrobert static bool
arc_handle_option(size_t code,const char * arg,int value ATTRIBUTE_UNUSED)154*404b540aSrobert arc_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
155*404b540aSrobert {
156*404b540aSrobert   switch (code)
157*404b540aSrobert     {
158*404b540aSrobert     case OPT_mcpu_:
159*404b540aSrobert       return strcmp (arg, "base") == 0 || ARC_EXTENSION_CPU (arg);
160*404b540aSrobert 
161*404b540aSrobert     default:
162*404b540aSrobert       return true;
163*404b540aSrobert     }
164*404b540aSrobert }
165*404b540aSrobert 
166*404b540aSrobert /* Called by OVERRIDE_OPTIONS to initialize various things.  */
167*404b540aSrobert 
168*404b540aSrobert void
arc_init(void)169*404b540aSrobert arc_init (void)
170*404b540aSrobert {
171*404b540aSrobert   char *tmp;
172*404b540aSrobert 
173*404b540aSrobert   /* Set the pseudo-ops for the various standard sections.  */
174*404b540aSrobert   arc_text_section = tmp = xmalloc (strlen (arc_text_string) + sizeof (ARC_SECTION_FORMAT) + 1);
175*404b540aSrobert   sprintf (tmp, ARC_SECTION_FORMAT, arc_text_string);
176*404b540aSrobert   arc_data_section = tmp = xmalloc (strlen (arc_data_string) + sizeof (ARC_SECTION_FORMAT) + 1);
177*404b540aSrobert   sprintf (tmp, ARC_SECTION_FORMAT, arc_data_string);
178*404b540aSrobert   arc_rodata_section = tmp = xmalloc (strlen (arc_rodata_string) + sizeof (ARC_SECTION_FORMAT) + 1);
179*404b540aSrobert   sprintf (tmp, ARC_SECTION_FORMAT, arc_rodata_string);
180*404b540aSrobert 
181*404b540aSrobert   arc_init_reg_tables ();
182*404b540aSrobert 
183*404b540aSrobert   /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P.  */
184*404b540aSrobert   memset (arc_punct_chars, 0, sizeof (arc_punct_chars));
185*404b540aSrobert   arc_punct_chars['#'] = 1;
186*404b540aSrobert   arc_punct_chars['*'] = 1;
187*404b540aSrobert   arc_punct_chars['?'] = 1;
188*404b540aSrobert   arc_punct_chars['!'] = 1;
189*404b540aSrobert   arc_punct_chars['~'] = 1;
190*404b540aSrobert }
191*404b540aSrobert 
192*404b540aSrobert /* The condition codes of the ARC, and the inverse function.  */
193*404b540aSrobert static const char *const arc_condition_codes[] =
194*404b540aSrobert {
195*404b540aSrobert   "al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv",
196*404b540aSrobert   "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
197*404b540aSrobert };
198*404b540aSrobert 
199*404b540aSrobert #define ARC_INVERSE_CONDITION_CODE(X)  ((X) ^ 1)
200*404b540aSrobert 
201*404b540aSrobert /* Returns the index of the ARC condition code string in
202*404b540aSrobert    `arc_condition_codes'.  COMPARISON should be an rtx like
203*404b540aSrobert    `(eq (...) (...))'.  */
204*404b540aSrobert 
205*404b540aSrobert static int
get_arc_condition_code(rtx comparison)206*404b540aSrobert get_arc_condition_code (rtx comparison)
207*404b540aSrobert {
208*404b540aSrobert   switch (GET_CODE (comparison))
209*404b540aSrobert     {
210*404b540aSrobert     case EQ : return 2;
211*404b540aSrobert     case NE : return 3;
212*404b540aSrobert     case GT : return 10;
213*404b540aSrobert     case LE : return 11;
214*404b540aSrobert     case GE : return 12;
215*404b540aSrobert     case LT : return 13;
216*404b540aSrobert     case GTU : return 14;
217*404b540aSrobert     case LEU : return 15;
218*404b540aSrobert     case LTU : return 6;
219*404b540aSrobert     case GEU : return 7;
220*404b540aSrobert     default : gcc_unreachable ();
221*404b540aSrobert     }
222*404b540aSrobert   /*NOTREACHED*/
223*404b540aSrobert   return (42);
224*404b540aSrobert }
225*404b540aSrobert 
226*404b540aSrobert /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
227*404b540aSrobert    return the mode to be used for the comparison.  */
228*404b540aSrobert 
229*404b540aSrobert enum machine_mode
arc_select_cc_mode(enum rtx_code op,rtx x ATTRIBUTE_UNUSED,rtx y ATTRIBUTE_UNUSED)230*404b540aSrobert arc_select_cc_mode (enum rtx_code op,
231*404b540aSrobert 	            rtx x ATTRIBUTE_UNUSED,
232*404b540aSrobert                     rtx y ATTRIBUTE_UNUSED)
233*404b540aSrobert {
234*404b540aSrobert   switch (op)
235*404b540aSrobert     {
236*404b540aSrobert     case EQ :
237*404b540aSrobert     case NE :
238*404b540aSrobert       return CCZNmode;
239*404b540aSrobert     default :
240*404b540aSrobert       switch (GET_CODE (x))
241*404b540aSrobert 	{
242*404b540aSrobert 	case AND :
243*404b540aSrobert 	case IOR :
244*404b540aSrobert 	case XOR :
245*404b540aSrobert 	case SIGN_EXTEND :
246*404b540aSrobert 	case ZERO_EXTEND :
247*404b540aSrobert 	  return CCZNmode;
248*404b540aSrobert 	case ASHIFT :
249*404b540aSrobert 	case ASHIFTRT :
250*404b540aSrobert 	case LSHIFTRT :
251*404b540aSrobert 	  return CCZNCmode;
252*404b540aSrobert 	default:
253*404b540aSrobert 	  break;
254*404b540aSrobert 	}
255*404b540aSrobert     }
256*404b540aSrobert   return CCmode;
257*404b540aSrobert }
258*404b540aSrobert 
259*404b540aSrobert /* Vectors to keep interesting information about registers where it can easily
260*404b540aSrobert    be got.  We use to use the actual mode value as the bit number, but there
261*404b540aSrobert    is (or may be) more than 32 modes now.  Instead we use two tables: one
262*404b540aSrobert    indexed by hard register number, and one indexed by mode.  */
263*404b540aSrobert 
264*404b540aSrobert /* The purpose of arc_mode_class is to shrink the range of modes so that
265*404b540aSrobert    they all fit (as bit numbers) in a 32 bit word (again).  Each real mode is
266*404b540aSrobert    mapped into one arc_mode_class mode.  */
267*404b540aSrobert 
268*404b540aSrobert enum arc_mode_class {
269*404b540aSrobert   C_MODE,
270*404b540aSrobert   S_MODE, D_MODE, T_MODE, O_MODE,
271*404b540aSrobert   SF_MODE, DF_MODE, TF_MODE, OF_MODE
272*404b540aSrobert };
273*404b540aSrobert 
274*404b540aSrobert /* Modes for condition codes.  */
275*404b540aSrobert #define C_MODES (1 << (int) C_MODE)
276*404b540aSrobert 
277*404b540aSrobert /* Modes for single-word and smaller quantities.  */
278*404b540aSrobert #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
279*404b540aSrobert 
280*404b540aSrobert /* Modes for double-word and smaller quantities.  */
281*404b540aSrobert #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
282*404b540aSrobert 
283*404b540aSrobert /* Modes for quad-word and smaller quantities.  */
284*404b540aSrobert #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
285*404b540aSrobert 
286*404b540aSrobert /* Value is 1 if register/mode pair is acceptable on arc.  */
287*404b540aSrobert 
288*404b540aSrobert const unsigned int arc_hard_regno_mode_ok[] = {
289*404b540aSrobert   T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
290*404b540aSrobert   T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
291*404b540aSrobert   T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, D_MODES,
292*404b540aSrobert   D_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
293*404b540aSrobert 
294*404b540aSrobert   /* ??? Leave these as S_MODES for now.  */
295*404b540aSrobert   S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
296*404b540aSrobert   S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
297*404b540aSrobert   S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
298*404b540aSrobert   S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, C_MODES
299*404b540aSrobert };
300*404b540aSrobert 
301*404b540aSrobert unsigned int arc_mode_class [NUM_MACHINE_MODES];
302*404b540aSrobert 
303*404b540aSrobert enum reg_class arc_regno_reg_class[FIRST_PSEUDO_REGISTER];
304*404b540aSrobert 
305*404b540aSrobert static void
arc_init_reg_tables(void)306*404b540aSrobert arc_init_reg_tables (void)
307*404b540aSrobert {
308*404b540aSrobert   int i;
309*404b540aSrobert 
310*404b540aSrobert   for (i = 0; i < NUM_MACHINE_MODES; i++)
311*404b540aSrobert     {
312*404b540aSrobert       switch (GET_MODE_CLASS (i))
313*404b540aSrobert 	{
314*404b540aSrobert 	case MODE_INT:
315*404b540aSrobert 	case MODE_PARTIAL_INT:
316*404b540aSrobert 	case MODE_COMPLEX_INT:
317*404b540aSrobert 	  if (GET_MODE_SIZE (i) <= 4)
318*404b540aSrobert 	    arc_mode_class[i] = 1 << (int) S_MODE;
319*404b540aSrobert 	  else if (GET_MODE_SIZE (i) == 8)
320*404b540aSrobert 	    arc_mode_class[i] = 1 << (int) D_MODE;
321*404b540aSrobert 	  else if (GET_MODE_SIZE (i) == 16)
322*404b540aSrobert 	    arc_mode_class[i] = 1 << (int) T_MODE;
323*404b540aSrobert 	  else if (GET_MODE_SIZE (i) == 32)
324*404b540aSrobert 	    arc_mode_class[i] = 1 << (int) O_MODE;
325*404b540aSrobert 	  else
326*404b540aSrobert 	    arc_mode_class[i] = 0;
327*404b540aSrobert 	  break;
328*404b540aSrobert 	case MODE_FLOAT:
329*404b540aSrobert 	case MODE_COMPLEX_FLOAT:
330*404b540aSrobert 	  if (GET_MODE_SIZE (i) <= 4)
331*404b540aSrobert 	    arc_mode_class[i] = 1 << (int) SF_MODE;
332*404b540aSrobert 	  else if (GET_MODE_SIZE (i) == 8)
333*404b540aSrobert 	    arc_mode_class[i] = 1 << (int) DF_MODE;
334*404b540aSrobert 	  else if (GET_MODE_SIZE (i) == 16)
335*404b540aSrobert 	    arc_mode_class[i] = 1 << (int) TF_MODE;
336*404b540aSrobert 	  else if (GET_MODE_SIZE (i) == 32)
337*404b540aSrobert 	    arc_mode_class[i] = 1 << (int) OF_MODE;
338*404b540aSrobert 	  else
339*404b540aSrobert 	    arc_mode_class[i] = 0;
340*404b540aSrobert 	  break;
341*404b540aSrobert 	case MODE_CC:
342*404b540aSrobert 	  arc_mode_class[i] = 1 << (int) C_MODE;
343*404b540aSrobert 	  break;
344*404b540aSrobert 	default:
345*404b540aSrobert 	  arc_mode_class[i] = 0;
346*404b540aSrobert 	  break;
347*404b540aSrobert 	}
348*404b540aSrobert     }
349*404b540aSrobert 
350*404b540aSrobert   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
351*404b540aSrobert     {
352*404b540aSrobert       if (i < 60)
353*404b540aSrobert 	arc_regno_reg_class[i] = GENERAL_REGS;
354*404b540aSrobert       else if (i == 60)
355*404b540aSrobert 	arc_regno_reg_class[i] = LPCOUNT_REG;
356*404b540aSrobert       else if (i == 61)
357*404b540aSrobert 	arc_regno_reg_class[i] = NO_REGS /* CC_REG: must be NO_REGS */;
358*404b540aSrobert       else
359*404b540aSrobert 	arc_regno_reg_class[i] = NO_REGS;
360*404b540aSrobert     }
361*404b540aSrobert }
362*404b540aSrobert 
363*404b540aSrobert /* ARC specific attribute support.
364*404b540aSrobert 
365*404b540aSrobert    The ARC has these attributes:
366*404b540aSrobert    interrupt - for interrupt functions
367*404b540aSrobert */
368*404b540aSrobert 
369*404b540aSrobert const struct attribute_spec arc_attribute_table[] =
370*404b540aSrobert {
371*404b540aSrobert   /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
372*404b540aSrobert   { "interrupt", 1, 1, true,  false, false, arc_handle_interrupt_attribute },
373*404b540aSrobert   { NULL,        0, 0, false, false, false, NULL }
374*404b540aSrobert };
375*404b540aSrobert 
376*404b540aSrobert /* Handle an "interrupt" attribute; arguments as in
377*404b540aSrobert    struct attribute_spec.handler.  */
378*404b540aSrobert static tree
arc_handle_interrupt_attribute(tree * node ATTRIBUTE_UNUSED,tree name,tree args,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)379*404b540aSrobert arc_handle_interrupt_attribute (tree *node ATTRIBUTE_UNUSED,
380*404b540aSrobert                                 tree name,
381*404b540aSrobert                                 tree args,
382*404b540aSrobert                                 int flags ATTRIBUTE_UNUSED,
383*404b540aSrobert                                 bool *no_add_attrs)
384*404b540aSrobert {
385*404b540aSrobert   tree value = TREE_VALUE (args);
386*404b540aSrobert 
387*404b540aSrobert   if (TREE_CODE (value) != STRING_CST)
388*404b540aSrobert     {
389*404b540aSrobert       warning (OPT_Wattributes,
390*404b540aSrobert 	       "argument of %qs attribute is not a string constant",
391*404b540aSrobert 	       IDENTIFIER_POINTER (name));
392*404b540aSrobert       *no_add_attrs = true;
393*404b540aSrobert     }
394*404b540aSrobert   else if (strcmp (TREE_STRING_POINTER (value), "ilink1")
395*404b540aSrobert 	   && strcmp (TREE_STRING_POINTER (value), "ilink2"))
396*404b540aSrobert     {
397*404b540aSrobert       warning (OPT_Wattributes,
398*404b540aSrobert 	       "argument of %qs attribute is not \"ilink1\" or \"ilink2\"",
399*404b540aSrobert 	       IDENTIFIER_POINTER (name));
400*404b540aSrobert       *no_add_attrs = true;
401*404b540aSrobert     }
402*404b540aSrobert 
403*404b540aSrobert   return NULL_TREE;
404*404b540aSrobert }
405*404b540aSrobert 
406*404b540aSrobert 
407*404b540aSrobert /* Acceptable arguments to the call insn.  */
408*404b540aSrobert 
409*404b540aSrobert int
call_address_operand(rtx op,enum machine_mode mode)410*404b540aSrobert call_address_operand (rtx op, enum machine_mode mode)
411*404b540aSrobert {
412*404b540aSrobert   return (symbolic_operand (op, mode)
413*404b540aSrobert 	  || (GET_CODE (op) == CONST_INT && LEGITIMATE_CONSTANT_P (op))
414*404b540aSrobert 	  || (GET_CODE (op) == REG));
415*404b540aSrobert }
416*404b540aSrobert 
417*404b540aSrobert int
call_operand(rtx op,enum machine_mode mode)418*404b540aSrobert call_operand (rtx op, enum machine_mode mode)
419*404b540aSrobert {
420*404b540aSrobert   if (GET_CODE (op) != MEM)
421*404b540aSrobert     return 0;
422*404b540aSrobert   op = XEXP (op, 0);
423*404b540aSrobert   return call_address_operand (op, mode);
424*404b540aSrobert }
425*404b540aSrobert 
426*404b540aSrobert /* Returns 1 if OP is a symbol reference.  */
427*404b540aSrobert 
428*404b540aSrobert int
symbolic_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)429*404b540aSrobert symbolic_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
430*404b540aSrobert {
431*404b540aSrobert   switch (GET_CODE (op))
432*404b540aSrobert     {
433*404b540aSrobert     case SYMBOL_REF:
434*404b540aSrobert     case LABEL_REF:
435*404b540aSrobert     case CONST :
436*404b540aSrobert       return 1;
437*404b540aSrobert     default:
438*404b540aSrobert       return 0;
439*404b540aSrobert     }
440*404b540aSrobert }
441*404b540aSrobert 
442*404b540aSrobert /* Return truth value of statement that OP is a symbolic memory
443*404b540aSrobert    operand of mode MODE.  */
444*404b540aSrobert 
445*404b540aSrobert int
symbolic_memory_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)446*404b540aSrobert symbolic_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
447*404b540aSrobert {
448*404b540aSrobert   if (GET_CODE (op) == SUBREG)
449*404b540aSrobert     op = SUBREG_REG (op);
450*404b540aSrobert   if (GET_CODE (op) != MEM)
451*404b540aSrobert     return 0;
452*404b540aSrobert   op = XEXP (op, 0);
453*404b540aSrobert   return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
454*404b540aSrobert 	  || GET_CODE (op) == LABEL_REF);
455*404b540aSrobert }
456*404b540aSrobert 
457*404b540aSrobert /* Return true if OP is a short immediate (shimm) value.  */
458*404b540aSrobert 
459*404b540aSrobert int
short_immediate_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)460*404b540aSrobert short_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
461*404b540aSrobert {
462*404b540aSrobert   if (GET_CODE (op) != CONST_INT)
463*404b540aSrobert     return 0;
464*404b540aSrobert   return SMALL_INT (INTVAL (op));
465*404b540aSrobert }
466*404b540aSrobert 
467*404b540aSrobert /* Return true if OP will require a long immediate (limm) value.
468*404b540aSrobert    This is currently only used when calculating length attributes.  */
469*404b540aSrobert 
470*404b540aSrobert int
long_immediate_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)471*404b540aSrobert long_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
472*404b540aSrobert {
473*404b540aSrobert   switch (GET_CODE (op))
474*404b540aSrobert     {
475*404b540aSrobert     case SYMBOL_REF :
476*404b540aSrobert     case LABEL_REF :
477*404b540aSrobert     case CONST :
478*404b540aSrobert       return 1;
479*404b540aSrobert     case CONST_INT :
480*404b540aSrobert       return !SMALL_INT (INTVAL (op));
481*404b540aSrobert     case CONST_DOUBLE :
482*404b540aSrobert       /* These can happen because large unsigned 32 bit constants are
483*404b540aSrobert 	 represented this way (the multiplication patterns can cause these
484*404b540aSrobert 	 to be generated).  They also occur for SFmode values.  */
485*404b540aSrobert       return 1;
486*404b540aSrobert     default:
487*404b540aSrobert       break;
488*404b540aSrobert     }
489*404b540aSrobert   return 0;
490*404b540aSrobert }
491*404b540aSrobert 
492*404b540aSrobert /* Return true if OP is a MEM that when used as a load or store address will
493*404b540aSrobert    require an 8 byte insn.
494*404b540aSrobert    Load and store instructions don't allow the same possibilities but they're
495*404b540aSrobert    similar enough that this one function will do.
496*404b540aSrobert    This is currently only used when calculating length attributes.  */
497*404b540aSrobert 
498*404b540aSrobert int
long_immediate_loadstore_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)499*404b540aSrobert long_immediate_loadstore_operand (rtx op,
500*404b540aSrobert                                   enum machine_mode mode ATTRIBUTE_UNUSED)
501*404b540aSrobert {
502*404b540aSrobert   if (GET_CODE (op) != MEM)
503*404b540aSrobert     return 0;
504*404b540aSrobert 
505*404b540aSrobert   op = XEXP (op, 0);
506*404b540aSrobert   switch (GET_CODE (op))
507*404b540aSrobert     {
508*404b540aSrobert     case SYMBOL_REF :
509*404b540aSrobert     case LABEL_REF :
510*404b540aSrobert     case CONST :
511*404b540aSrobert       return 1;
512*404b540aSrobert     case CONST_INT :
513*404b540aSrobert       /* This must be handled as "st c,[limm]".  Ditto for load.
514*404b540aSrobert 	 Technically, the assembler could translate some possibilities to
515*404b540aSrobert 	 "st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't
516*404b540aSrobert 	 assume that it does.  */
517*404b540aSrobert       return 1;
518*404b540aSrobert     case CONST_DOUBLE :
519*404b540aSrobert       /* These can happen because large unsigned 32 bit constants are
520*404b540aSrobert 	 represented this way (the multiplication patterns can cause these
521*404b540aSrobert 	 to be generated).  They also occur for SFmode values.  */
522*404b540aSrobert       return 1;
523*404b540aSrobert     case REG :
524*404b540aSrobert       return 0;
525*404b540aSrobert     case PLUS :
526*404b540aSrobert       if (GET_CODE (XEXP (op, 1)) == CONST_INT
527*404b540aSrobert 	  && !SMALL_INT (INTVAL (XEXP (op, 1))))
528*404b540aSrobert 	return 1;
529*404b540aSrobert       return 0;
530*404b540aSrobert     default:
531*404b540aSrobert       break;
532*404b540aSrobert     }
533*404b540aSrobert   return 0;
534*404b540aSrobert }
535*404b540aSrobert 
536*404b540aSrobert /* Return true if OP is an acceptable argument for a single word
537*404b540aSrobert    move source.  */
538*404b540aSrobert 
539*404b540aSrobert int
move_src_operand(rtx op,enum machine_mode mode)540*404b540aSrobert move_src_operand (rtx op, enum machine_mode mode)
541*404b540aSrobert {
542*404b540aSrobert   switch (GET_CODE (op))
543*404b540aSrobert     {
544*404b540aSrobert     case SYMBOL_REF :
545*404b540aSrobert     case LABEL_REF :
546*404b540aSrobert     case CONST :
547*404b540aSrobert       return 1;
548*404b540aSrobert     case CONST_INT :
549*404b540aSrobert       return (LARGE_INT (INTVAL (op)));
550*404b540aSrobert     case CONST_DOUBLE :
551*404b540aSrobert       /* We can handle DImode integer constants in SImode if the value
552*404b540aSrobert 	 (signed or unsigned) will fit in 32 bits.  This is needed because
553*404b540aSrobert 	 large unsigned 32 bit constants are represented as CONST_DOUBLEs.  */
554*404b540aSrobert       if (mode == SImode)
555*404b540aSrobert 	return arc_double_limm_p (op);
556*404b540aSrobert       /* We can handle 32 bit floating point constants.  */
557*404b540aSrobert       if (mode == SFmode)
558*404b540aSrobert 	return GET_MODE (op) == SFmode;
559*404b540aSrobert       return 0;
560*404b540aSrobert     case REG :
561*404b540aSrobert       return register_operand (op, mode);
562*404b540aSrobert     case SUBREG :
563*404b540aSrobert       /* (subreg (mem ...) ...) can occur here if the inner part was once a
564*404b540aSrobert 	 pseudo-reg and is now a stack slot.  */
565*404b540aSrobert       if (GET_CODE (SUBREG_REG (op)) == MEM)
566*404b540aSrobert 	return address_operand (XEXP (SUBREG_REG (op), 0), mode);
567*404b540aSrobert       else
568*404b540aSrobert 	return register_operand (op, mode);
569*404b540aSrobert     case MEM :
570*404b540aSrobert       return address_operand (XEXP (op, 0), mode);
571*404b540aSrobert     default :
572*404b540aSrobert       return 0;
573*404b540aSrobert     }
574*404b540aSrobert }
575*404b540aSrobert 
576*404b540aSrobert /* Return true if OP is an acceptable argument for a double word
577*404b540aSrobert    move source.  */
578*404b540aSrobert 
579*404b540aSrobert int
move_double_src_operand(rtx op,enum machine_mode mode)580*404b540aSrobert move_double_src_operand (rtx op, enum machine_mode mode)
581*404b540aSrobert {
582*404b540aSrobert   switch (GET_CODE (op))
583*404b540aSrobert     {
584*404b540aSrobert     case REG :
585*404b540aSrobert       return register_operand (op, mode);
586*404b540aSrobert     case SUBREG :
587*404b540aSrobert       /* (subreg (mem ...) ...) can occur here if the inner part was once a
588*404b540aSrobert 	 pseudo-reg and is now a stack slot.  */
589*404b540aSrobert       if (GET_CODE (SUBREG_REG (op)) == MEM)
590*404b540aSrobert 	return move_double_src_operand (SUBREG_REG (op), mode);
591*404b540aSrobert       else
592*404b540aSrobert 	return register_operand (op, mode);
593*404b540aSrobert     case MEM :
594*404b540aSrobert       /* Disallow auto inc/dec for now.  */
595*404b540aSrobert       if (GET_CODE (XEXP (op, 0)) == PRE_DEC
596*404b540aSrobert 	  || GET_CODE (XEXP (op, 0)) == PRE_INC)
597*404b540aSrobert 	return 0;
598*404b540aSrobert       return address_operand (XEXP (op, 0), mode);
599*404b540aSrobert     case CONST_INT :
600*404b540aSrobert     case CONST_DOUBLE :
601*404b540aSrobert       return 1;
602*404b540aSrobert     default :
603*404b540aSrobert       return 0;
604*404b540aSrobert     }
605*404b540aSrobert }
606*404b540aSrobert 
607*404b540aSrobert /* Return true if OP is an acceptable argument for a move destination.  */
608*404b540aSrobert 
609*404b540aSrobert int
move_dest_operand(rtx op,enum machine_mode mode)610*404b540aSrobert move_dest_operand (rtx op, enum machine_mode mode)
611*404b540aSrobert {
612*404b540aSrobert   switch (GET_CODE (op))
613*404b540aSrobert     {
614*404b540aSrobert     case REG :
615*404b540aSrobert       return register_operand (op, mode);
616*404b540aSrobert     case SUBREG :
617*404b540aSrobert       /* (subreg (mem ...) ...) can occur here if the inner part was once a
618*404b540aSrobert 	 pseudo-reg and is now a stack slot.  */
619*404b540aSrobert       if (GET_CODE (SUBREG_REG (op)) == MEM)
620*404b540aSrobert 	return address_operand (XEXP (SUBREG_REG (op), 0), mode);
621*404b540aSrobert       else
622*404b540aSrobert 	return register_operand (op, mode);
623*404b540aSrobert     case MEM :
624*404b540aSrobert       return address_operand (XEXP (op, 0), mode);
625*404b540aSrobert     default :
626*404b540aSrobert       return 0;
627*404b540aSrobert     }
628*404b540aSrobert }
629*404b540aSrobert 
630*404b540aSrobert /* Return true if OP is valid load with update operand.  */
631*404b540aSrobert 
632*404b540aSrobert int
load_update_operand(rtx op,enum machine_mode mode)633*404b540aSrobert load_update_operand (rtx op, enum machine_mode mode)
634*404b540aSrobert {
635*404b540aSrobert   if (GET_CODE (op) != MEM
636*404b540aSrobert       || GET_MODE (op) != mode)
637*404b540aSrobert     return 0;
638*404b540aSrobert   op = XEXP (op, 0);
639*404b540aSrobert   if (GET_CODE (op) != PLUS
640*404b540aSrobert       || GET_MODE (op) != Pmode
641*404b540aSrobert       || !register_operand (XEXP (op, 0), Pmode)
642*404b540aSrobert       || !nonmemory_operand (XEXP (op, 1), Pmode))
643*404b540aSrobert     return 0;
644*404b540aSrobert   return 1;
645*404b540aSrobert }
646*404b540aSrobert 
647*404b540aSrobert /* Return true if OP is valid store with update operand.  */
648*404b540aSrobert 
649*404b540aSrobert int
store_update_operand(rtx op,enum machine_mode mode)650*404b540aSrobert store_update_operand (rtx op, enum machine_mode mode)
651*404b540aSrobert {
652*404b540aSrobert   if (GET_CODE (op) != MEM
653*404b540aSrobert       || GET_MODE (op) != mode)
654*404b540aSrobert     return 0;
655*404b540aSrobert   op = XEXP (op, 0);
656*404b540aSrobert   if (GET_CODE (op) != PLUS
657*404b540aSrobert       || GET_MODE (op) != Pmode
658*404b540aSrobert       || !register_operand (XEXP (op, 0), Pmode)
659*404b540aSrobert       || !(GET_CODE (XEXP (op, 1)) == CONST_INT
660*404b540aSrobert 	   && SMALL_INT (INTVAL (XEXP (op, 1)))))
661*404b540aSrobert     return 0;
662*404b540aSrobert   return 1;
663*404b540aSrobert }
664*404b540aSrobert 
665*404b540aSrobert /* Return true if OP is a non-volatile non-immediate operand.
666*404b540aSrobert    Volatile memory refs require a special "cache-bypass" instruction
667*404b540aSrobert    and only the standard movXX patterns are set up to handle them.  */
668*404b540aSrobert 
669*404b540aSrobert int
nonvol_nonimm_operand(rtx op,enum machine_mode mode)670*404b540aSrobert nonvol_nonimm_operand (rtx op, enum machine_mode mode)
671*404b540aSrobert {
672*404b540aSrobert   if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
673*404b540aSrobert     return 0;
674*404b540aSrobert   return nonimmediate_operand (op, mode);
675*404b540aSrobert }
676*404b540aSrobert 
677*404b540aSrobert /* Accept integer operands in the range -0x80000000..0x7fffffff.  We have
678*404b540aSrobert    to check the range carefully since this predicate is used in DImode
679*404b540aSrobert    contexts.  */
680*404b540aSrobert 
681*404b540aSrobert int
const_sint32_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)682*404b540aSrobert const_sint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
683*404b540aSrobert {
684*404b540aSrobert   /* All allowed constants will fit a CONST_INT.  */
685*404b540aSrobert   return (GET_CODE (op) == CONST_INT
686*404b540aSrobert 	  && (INTVAL (op) >= (-0x7fffffff - 1) && INTVAL (op) <= 0x7fffffff));
687*404b540aSrobert }
688*404b540aSrobert 
689*404b540aSrobert /* Accept integer operands in the range 0..0xffffffff.  We have to check the
690*404b540aSrobert    range carefully since this predicate is used in DImode contexts.  Also, we
691*404b540aSrobert    need some extra crud to make it work when hosted on 64-bit machines.  */
692*404b540aSrobert 
693*404b540aSrobert int
const_uint32_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)694*404b540aSrobert const_uint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
695*404b540aSrobert {
696*404b540aSrobert #if HOST_BITS_PER_WIDE_INT > 32
697*404b540aSrobert   /* All allowed constants will fit a CONST_INT.  */
698*404b540aSrobert   return (GET_CODE (op) == CONST_INT
699*404b540aSrobert 	  && (INTVAL (op) >= 0 && INTVAL (op) <= 0xffffffffL));
700*404b540aSrobert #else
701*404b540aSrobert   return ((GET_CODE (op) == CONST_INT && INTVAL (op) >= 0)
702*404b540aSrobert 	  || (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0));
703*404b540aSrobert #endif
704*404b540aSrobert }
705*404b540aSrobert 
706*404b540aSrobert /* Return 1 if OP is a comparison operator valid for the mode of CC.
707*404b540aSrobert    This allows the use of MATCH_OPERATOR to recognize all the branch insns.
708*404b540aSrobert 
709*404b540aSrobert    Some insns only set a few bits in the condition code.  So only allow those
710*404b540aSrobert    comparisons that use the bits that are valid.  */
711*404b540aSrobert 
712*404b540aSrobert int
proper_comparison_operator(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)713*404b540aSrobert proper_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
714*404b540aSrobert {
715*404b540aSrobert   enum rtx_code code;
716*404b540aSrobert   if (!COMPARISON_P (op))
717*404b540aSrobert     return 0;
718*404b540aSrobert 
719*404b540aSrobert   code = GET_CODE (op);
720*404b540aSrobert   if (GET_MODE (XEXP (op, 0)) == CCZNmode)
721*404b540aSrobert     return (code == EQ || code == NE);
722*404b540aSrobert   if (GET_MODE (XEXP (op, 0)) == CCZNCmode)
723*404b540aSrobert     return (code == EQ || code == NE
724*404b540aSrobert 	    || code == LTU || code == GEU || code == GTU || code == LEU);
725*404b540aSrobert   return 1;
726*404b540aSrobert }
727*404b540aSrobert 
728*404b540aSrobert /* Misc. utilities.  */
729*404b540aSrobert 
730*404b540aSrobert /* X and Y are two things to compare using CODE.  Emit the compare insn and
731*404b540aSrobert    return the rtx for the cc reg in the proper mode.  */
732*404b540aSrobert 
733*404b540aSrobert rtx
gen_compare_reg(enum rtx_code code,rtx x,rtx y)734*404b540aSrobert gen_compare_reg (enum rtx_code code, rtx x, rtx y)
735*404b540aSrobert {
736*404b540aSrobert   enum machine_mode mode = SELECT_CC_MODE (code, x, y);
737*404b540aSrobert   rtx cc_reg;
738*404b540aSrobert 
739*404b540aSrobert   cc_reg = gen_rtx_REG (mode, 61);
740*404b540aSrobert 
741*404b540aSrobert   emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
742*404b540aSrobert 			  gen_rtx_COMPARE (mode, x, y)));
743*404b540aSrobert 
744*404b540aSrobert   return cc_reg;
745*404b540aSrobert }
746*404b540aSrobert 
747*404b540aSrobert /* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number).
748*404b540aSrobert    We assume the value can be either signed or unsigned.  */
749*404b540aSrobert 
750*404b540aSrobert int
arc_double_limm_p(rtx value)751*404b540aSrobert arc_double_limm_p (rtx value)
752*404b540aSrobert {
753*404b540aSrobert   HOST_WIDE_INT low, high;
754*404b540aSrobert 
755*404b540aSrobert   gcc_assert (GET_CODE (value) == CONST_DOUBLE);
756*404b540aSrobert 
757*404b540aSrobert   low = CONST_DOUBLE_LOW (value);
758*404b540aSrobert   high = CONST_DOUBLE_HIGH (value);
759*404b540aSrobert 
760*404b540aSrobert   if (low & 0x80000000)
761*404b540aSrobert     {
762*404b540aSrobert       return (((unsigned HOST_WIDE_INT) low <= 0xffffffff && high == 0)
763*404b540aSrobert 	      || (((low & - (unsigned HOST_WIDE_INT) 0x80000000)
764*404b540aSrobert 		   == - (unsigned HOST_WIDE_INT) 0x80000000)
765*404b540aSrobert 		  && high == -1));
766*404b540aSrobert     }
767*404b540aSrobert   else
768*404b540aSrobert     {
769*404b540aSrobert       return (unsigned HOST_WIDE_INT) low <= 0x7fffffff && high == 0;
770*404b540aSrobert     }
771*404b540aSrobert }
772*404b540aSrobert 
773*404b540aSrobert /* Do any needed setup for a variadic function.  For the ARC, we must
774*404b540aSrobert    create a register parameter block, and then copy any anonymous arguments
775*404b540aSrobert    in registers to memory.
776*404b540aSrobert 
777*404b540aSrobert    CUM has not been updated for the last named argument which has type TYPE
778*404b540aSrobert    and mode MODE, and we rely on this fact.
779*404b540aSrobert 
780*404b540aSrobert    We do things a little weird here.  We're supposed to only allocate space
781*404b540aSrobert    for the anonymous arguments.  However we need to keep the stack eight byte
782*404b540aSrobert    aligned.  So we round the space up if necessary, and leave it to va_start
783*404b540aSrobert    to compensate.  */
784*404b540aSrobert 
785*404b540aSrobert static void
arc_setup_incoming_varargs(CUMULATIVE_ARGS * cum,enum machine_mode mode,tree type ATTRIBUTE_UNUSED,int * pretend_size,int no_rtl)786*404b540aSrobert arc_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
787*404b540aSrobert                             enum machine_mode mode,
788*404b540aSrobert                             tree type ATTRIBUTE_UNUSED,
789*404b540aSrobert                             int *pretend_size,
790*404b540aSrobert                             int no_rtl)
791*404b540aSrobert {
792*404b540aSrobert   int first_anon_arg;
793*404b540aSrobert 
794*404b540aSrobert   /* All BLKmode values are passed by reference.  */
795*404b540aSrobert   gcc_assert (mode != BLKmode);
796*404b540aSrobert 
797*404b540aSrobert   first_anon_arg = *cum + ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1)
798*404b540aSrobert 			   / UNITS_PER_WORD);
799*404b540aSrobert 
800*404b540aSrobert   if (first_anon_arg < MAX_ARC_PARM_REGS && !no_rtl)
801*404b540aSrobert     {
802*404b540aSrobert       /* Note that first_reg_offset < MAX_ARC_PARM_REGS.  */
803*404b540aSrobert       int first_reg_offset = first_anon_arg;
804*404b540aSrobert       /* Size in words to "pretend" allocate.  */
805*404b540aSrobert       int size = MAX_ARC_PARM_REGS - first_reg_offset;
806*404b540aSrobert       /* Extra slop to keep stack eight byte aligned.  */
807*404b540aSrobert       int align_slop = size & 1;
808*404b540aSrobert       rtx regblock;
809*404b540aSrobert 
810*404b540aSrobert       regblock = gen_rtx_MEM (BLKmode,
811*404b540aSrobert 			      plus_constant (arg_pointer_rtx,
812*404b540aSrobert 					     FIRST_PARM_OFFSET (0)
813*404b540aSrobert 					     + align_slop * UNITS_PER_WORD));
814*404b540aSrobert       set_mem_alias_set (regblock, get_varargs_alias_set ());
815*404b540aSrobert       set_mem_align (regblock, BITS_PER_WORD);
816*404b540aSrobert       move_block_from_reg (first_reg_offset, regblock,
817*404b540aSrobert 			   MAX_ARC_PARM_REGS - first_reg_offset);
818*404b540aSrobert 
819*404b540aSrobert       *pretend_size = ((MAX_ARC_PARM_REGS - first_reg_offset + align_slop)
820*404b540aSrobert 		       * UNITS_PER_WORD);
821*404b540aSrobert     }
822*404b540aSrobert }
823*404b540aSrobert 
824*404b540aSrobert /* Cost functions.  */
825*404b540aSrobert 
826*404b540aSrobert /* Compute a (partial) cost for rtx X.  Return true if the complete
827*404b540aSrobert    cost has been computed, and false if subexpressions should be
828*404b540aSrobert    scanned.  In either case, *TOTAL contains the cost result.  */
829*404b540aSrobert 
830*404b540aSrobert static bool
arc_rtx_costs(rtx x,int code,int outer_code ATTRIBUTE_UNUSED,int * total)831*404b540aSrobert arc_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
832*404b540aSrobert {
833*404b540aSrobert   switch (code)
834*404b540aSrobert     {
835*404b540aSrobert       /* Small integers are as cheap as registers.  4 byte values can
836*404b540aSrobert 	 be fetched as immediate constants - let's give that the cost
837*404b540aSrobert 	 of an extra insn.  */
838*404b540aSrobert     case CONST_INT:
839*404b540aSrobert       if (SMALL_INT (INTVAL (x)))
840*404b540aSrobert 	{
841*404b540aSrobert 	  *total = 0;
842*404b540aSrobert 	  return true;
843*404b540aSrobert 	}
844*404b540aSrobert       /* FALLTHRU */
845*404b540aSrobert 
846*404b540aSrobert     case CONST:
847*404b540aSrobert     case LABEL_REF:
848*404b540aSrobert     case SYMBOL_REF:
849*404b540aSrobert       *total = COSTS_N_INSNS (1);
850*404b540aSrobert       return true;
851*404b540aSrobert 
852*404b540aSrobert     case CONST_DOUBLE:
853*404b540aSrobert       {
854*404b540aSrobert         rtx high, low;
855*404b540aSrobert         split_double (x, &high, &low);
856*404b540aSrobert 	*total = COSTS_N_INSNS (!SMALL_INT (INTVAL (high))
857*404b540aSrobert 				+ !SMALL_INT (INTVAL (low)));
858*404b540aSrobert 	return true;
859*404b540aSrobert       }
860*404b540aSrobert 
861*404b540aSrobert     /* Encourage synth_mult to find a synthetic multiply when reasonable.
862*404b540aSrobert        If we need more than 12 insns to do a multiply, then go out-of-line,
863*404b540aSrobert        since the call overhead will be < 10% of the cost of the multiply.  */
864*404b540aSrobert     case ASHIFT:
865*404b540aSrobert     case ASHIFTRT:
866*404b540aSrobert     case LSHIFTRT:
867*404b540aSrobert       if (TARGET_SHIFTER)
868*404b540aSrobert         *total = COSTS_N_INSNS (1);
869*404b540aSrobert       else if (GET_CODE (XEXP (x, 1)) != CONST_INT)
870*404b540aSrobert         *total = COSTS_N_INSNS (16);
871*404b540aSrobert       else
872*404b540aSrobert         *total = COSTS_N_INSNS (INTVAL (XEXP ((x), 1)));
873*404b540aSrobert       return false;
874*404b540aSrobert 
875*404b540aSrobert     default:
876*404b540aSrobert       return false;
877*404b540aSrobert     }
878*404b540aSrobert }
879*404b540aSrobert 
880*404b540aSrobert 
881*404b540aSrobert /* Provide the costs of an addressing mode that contains ADDR.
882*404b540aSrobert    If ADDR is not a valid address, its cost is irrelevant.  */
883*404b540aSrobert 
884*404b540aSrobert static int
arc_address_cost(rtx addr)885*404b540aSrobert arc_address_cost (rtx addr)
886*404b540aSrobert {
887*404b540aSrobert   switch (GET_CODE (addr))
888*404b540aSrobert     {
889*404b540aSrobert     case REG :
890*404b540aSrobert       return 1;
891*404b540aSrobert 
892*404b540aSrobert     case LABEL_REF :
893*404b540aSrobert     case SYMBOL_REF :
894*404b540aSrobert     case CONST :
895*404b540aSrobert       return 2;
896*404b540aSrobert 
897*404b540aSrobert     case PLUS :
898*404b540aSrobert       {
899*404b540aSrobert 	register rtx plus0 = XEXP (addr, 0);
900*404b540aSrobert 	register rtx plus1 = XEXP (addr, 1);
901*404b540aSrobert 
902*404b540aSrobert 	if (GET_CODE (plus0) != REG)
903*404b540aSrobert 	  break;
904*404b540aSrobert 
905*404b540aSrobert 	switch (GET_CODE (plus1))
906*404b540aSrobert 	  {
907*404b540aSrobert 	  case CONST_INT :
908*404b540aSrobert 	    return SMALL_INT (plus1) ? 1 : 2;
909*404b540aSrobert 	  case CONST :
910*404b540aSrobert 	  case SYMBOL_REF :
911*404b540aSrobert 	  case LABEL_REF :
912*404b540aSrobert 	    return 2;
913*404b540aSrobert 	  default:
914*404b540aSrobert 	    break;
915*404b540aSrobert 	  }
916*404b540aSrobert 	break;
917*404b540aSrobert       }
918*404b540aSrobert     default:
919*404b540aSrobert       break;
920*404b540aSrobert     }
921*404b540aSrobert 
922*404b540aSrobert   return 4;
923*404b540aSrobert }
924*404b540aSrobert 
925*404b540aSrobert /* Function prologue/epilogue handlers.  */
926*404b540aSrobert 
927*404b540aSrobert /* ARC stack frames look like:
928*404b540aSrobert 
929*404b540aSrobert              Before call                       After call
930*404b540aSrobert         +-----------------------+       +-----------------------+
931*404b540aSrobert         |                       |       |                       |
932*404b540aSrobert    high |  local variables,     |       |  local variables,     |
933*404b540aSrobert    mem  |  reg save area, etc.  |       |  reg save area, etc.  |
934*404b540aSrobert         |                       |       |                       |
935*404b540aSrobert         +-----------------------+       +-----------------------+
936*404b540aSrobert         |                       |       |                       |
937*404b540aSrobert         |  arguments on stack.  |       |  arguments on stack.  |
938*404b540aSrobert         |                       |       |                       |
939*404b540aSrobert  SP+16->+-----------------------+FP+48->+-----------------------+
940*404b540aSrobert         | 4 word save area for  |       |  reg parm save area,  |
941*404b540aSrobert         | return addr, prev %fp |       |  only created for     |
942*404b540aSrobert   SP+0->+-----------------------+       |  variable argument    |
943*404b540aSrobert                                         |  functions            |
944*404b540aSrobert                                  FP+16->+-----------------------+
945*404b540aSrobert                                         | 4 word save area for  |
946*404b540aSrobert                                         | return addr, prev %fp |
947*404b540aSrobert                                   FP+0->+-----------------------+
948*404b540aSrobert                                         |                       |
949*404b540aSrobert                                         |  local variables      |
950*404b540aSrobert                                         |                       |
951*404b540aSrobert                                         +-----------------------+
952*404b540aSrobert                                         |                       |
953*404b540aSrobert                                         |  register save area   |
954*404b540aSrobert                                         |                       |
955*404b540aSrobert                                         +-----------------------+
956*404b540aSrobert                                         |                       |
957*404b540aSrobert                                         |  alloca allocations   |
958*404b540aSrobert                                         |                       |
959*404b540aSrobert                                         +-----------------------+
960*404b540aSrobert                                         |                       |
961*404b540aSrobert                                         |  arguments on stack   |
962*404b540aSrobert                                         |                       |
963*404b540aSrobert                                  SP+16->+-----------------------+
964*404b540aSrobert    low                                  | 4 word save area for  |
965*404b540aSrobert    memory                               | return addr, prev %fp |
966*404b540aSrobert                                   SP+0->+-----------------------+
967*404b540aSrobert 
968*404b540aSrobert Notes:
969*404b540aSrobert 1) The "reg parm save area" does not exist for non variable argument fns.
970*404b540aSrobert    The "reg parm save area" can be eliminated completely if we created our
971*404b540aSrobert    own va-arc.h, but that has tradeoffs as well (so it's not done).  */
972*404b540aSrobert 
973*404b540aSrobert /* Structure to be filled in by arc_compute_frame_size with register
974*404b540aSrobert    save masks, and offsets for the current function.  */
975*404b540aSrobert struct arc_frame_info
976*404b540aSrobert {
977*404b540aSrobert   unsigned int total_size;	/* # bytes that the entire frame takes up.  */
978*404b540aSrobert   unsigned int extra_size;	/* # bytes of extra stuff.  */
979*404b540aSrobert   unsigned int pretend_size;	/* # bytes we push and pretend caller did.  */
980*404b540aSrobert   unsigned int args_size;	/* # bytes that outgoing arguments take up.  */
981*404b540aSrobert   unsigned int reg_size;	/* # bytes needed to store regs.  */
982*404b540aSrobert   unsigned int var_size;	/* # bytes that variables take up.  */
983*404b540aSrobert   unsigned int reg_offset;	/* Offset from new sp to store regs.  */
984*404b540aSrobert   unsigned int gmask;		/* Mask of saved gp registers.  */
985*404b540aSrobert   int          initialized;	/* Nonzero if frame size already calculated.  */
986*404b540aSrobert };
987*404b540aSrobert 
988*404b540aSrobert /* Current frame information calculated by arc_compute_frame_size.  */
989*404b540aSrobert static struct arc_frame_info current_frame_info;
990*404b540aSrobert 
991*404b540aSrobert /* Zero structure to initialize current_frame_info.  */
992*404b540aSrobert static struct arc_frame_info zero_frame_info;
993*404b540aSrobert 
994*404b540aSrobert /* Type of function DECL.
995*404b540aSrobert 
996*404b540aSrobert    The result is cached.  To reset the cache at the end of a function,
997*404b540aSrobert    call with DECL = NULL_TREE.  */
998*404b540aSrobert 
999*404b540aSrobert enum arc_function_type
arc_compute_function_type(tree decl)1000*404b540aSrobert arc_compute_function_type (tree decl)
1001*404b540aSrobert {
1002*404b540aSrobert   tree a;
1003*404b540aSrobert   /* Cached value.  */
1004*404b540aSrobert   static enum arc_function_type fn_type = ARC_FUNCTION_UNKNOWN;
1005*404b540aSrobert   /* Last function we were called for.  */
1006*404b540aSrobert   static tree last_fn = NULL_TREE;
1007*404b540aSrobert 
1008*404b540aSrobert   /* Resetting the cached value?  */
1009*404b540aSrobert   if (decl == NULL_TREE)
1010*404b540aSrobert     {
1011*404b540aSrobert       fn_type = ARC_FUNCTION_UNKNOWN;
1012*404b540aSrobert       last_fn = NULL_TREE;
1013*404b540aSrobert       return fn_type;
1014*404b540aSrobert     }
1015*404b540aSrobert 
1016*404b540aSrobert   if (decl == last_fn && fn_type != ARC_FUNCTION_UNKNOWN)
1017*404b540aSrobert     return fn_type;
1018*404b540aSrobert 
1019*404b540aSrobert   /* Assume we have a normal function (not an interrupt handler).  */
1020*404b540aSrobert   fn_type = ARC_FUNCTION_NORMAL;
1021*404b540aSrobert 
1022*404b540aSrobert   /* Now see if this is an interrupt handler.  */
1023*404b540aSrobert   for (a = DECL_ATTRIBUTES (current_function_decl);
1024*404b540aSrobert        a;
1025*404b540aSrobert        a = TREE_CHAIN (a))
1026*404b540aSrobert     {
1027*404b540aSrobert       tree name = TREE_PURPOSE (a), args = TREE_VALUE (a);
1028*404b540aSrobert 
1029*404b540aSrobert       if (name == get_identifier ("__interrupt__")
1030*404b540aSrobert 	  && list_length (args) == 1
1031*404b540aSrobert 	  && TREE_CODE (TREE_VALUE (args)) == STRING_CST)
1032*404b540aSrobert 	{
1033*404b540aSrobert 	  tree value = TREE_VALUE (args);
1034*404b540aSrobert 
1035*404b540aSrobert 	  if (!strcmp (TREE_STRING_POINTER (value), "ilink1"))
1036*404b540aSrobert 	    fn_type = ARC_FUNCTION_ILINK1;
1037*404b540aSrobert 	  else if (!strcmp (TREE_STRING_POINTER (value), "ilink2"))
1038*404b540aSrobert 	    fn_type = ARC_FUNCTION_ILINK2;
1039*404b540aSrobert 	  else
1040*404b540aSrobert 	    gcc_unreachable ();
1041*404b540aSrobert 	  break;
1042*404b540aSrobert 	}
1043*404b540aSrobert     }
1044*404b540aSrobert 
1045*404b540aSrobert   last_fn = decl;
1046*404b540aSrobert   return fn_type;
1047*404b540aSrobert }
1048*404b540aSrobert 
1049*404b540aSrobert #define ILINK1_REGNUM 29
1050*404b540aSrobert #define ILINK2_REGNUM 30
1051*404b540aSrobert #define RETURN_ADDR_REGNUM 31
1052*404b540aSrobert #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1053*404b540aSrobert #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1054*404b540aSrobert 
1055*404b540aSrobert /* Tell prologue and epilogue if register REGNO should be saved / restored.
1056*404b540aSrobert    The return address and frame pointer are treated separately.
1057*404b540aSrobert    Don't consider them here.  */
1058*404b540aSrobert #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1059*404b540aSrobert ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1060*404b540aSrobert  && (regs_ever_live[regno] && (!call_used_regs[regno] || interrupt_p)))
1061*404b540aSrobert 
1062*404b540aSrobert #define MUST_SAVE_RETURN_ADDR (regs_ever_live[RETURN_ADDR_REGNUM])
1063*404b540aSrobert 
1064*404b540aSrobert /* Return the bytes needed to compute the frame pointer from the current
1065*404b540aSrobert    stack pointer.
1066*404b540aSrobert 
1067*404b540aSrobert    SIZE is the size needed for local variables.  */
1068*404b540aSrobert 
1069*404b540aSrobert unsigned int
arc_compute_frame_size(int size)1070*404b540aSrobert arc_compute_frame_size (int size /* # of var. bytes allocated.  */)
1071*404b540aSrobert {
1072*404b540aSrobert   int regno;
1073*404b540aSrobert   unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1074*404b540aSrobert   unsigned int reg_size, reg_offset;
1075*404b540aSrobert   unsigned int gmask;
1076*404b540aSrobert   enum arc_function_type fn_type;
1077*404b540aSrobert   int interrupt_p;
1078*404b540aSrobert 
1079*404b540aSrobert   var_size	= size;
1080*404b540aSrobert   args_size	= current_function_outgoing_args_size;
1081*404b540aSrobert   pretend_size	= current_function_pretend_args_size;
1082*404b540aSrobert   extra_size	= FIRST_PARM_OFFSET (0);
1083*404b540aSrobert   total_size	= extra_size + pretend_size + args_size + var_size;
1084*404b540aSrobert   reg_offset	= FIRST_PARM_OFFSET(0) + current_function_outgoing_args_size;
1085*404b540aSrobert   reg_size	= 0;
1086*404b540aSrobert   gmask		= 0;
1087*404b540aSrobert 
1088*404b540aSrobert   /* See if this is an interrupt handler.  Call used registers must be saved
1089*404b540aSrobert      for them too.  */
1090*404b540aSrobert   fn_type = arc_compute_function_type (current_function_decl);
1091*404b540aSrobert   interrupt_p = ARC_INTERRUPT_P (fn_type);
1092*404b540aSrobert 
1093*404b540aSrobert   /* Calculate space needed for registers.
1094*404b540aSrobert      ??? We ignore the extension registers for now.  */
1095*404b540aSrobert 
1096*404b540aSrobert   for (regno = 0; regno <= 31; regno++)
1097*404b540aSrobert     {
1098*404b540aSrobert       if (MUST_SAVE_REGISTER (regno, interrupt_p))
1099*404b540aSrobert 	{
1100*404b540aSrobert 	  reg_size += UNITS_PER_WORD;
1101*404b540aSrobert 	  gmask |= 1 << regno;
1102*404b540aSrobert 	}
1103*404b540aSrobert     }
1104*404b540aSrobert 
1105*404b540aSrobert   total_size += reg_size;
1106*404b540aSrobert 
1107*404b540aSrobert   /* If the only space to allocate is the fp/blink save area this is an
1108*404b540aSrobert      empty frame.  However, if we'll be making a function call we need to
1109*404b540aSrobert      allocate a stack frame for our callee's fp/blink save area.  */
1110*404b540aSrobert   if (total_size == extra_size
1111*404b540aSrobert       && !MUST_SAVE_RETURN_ADDR)
1112*404b540aSrobert     total_size = extra_size = 0;
1113*404b540aSrobert 
1114*404b540aSrobert   total_size = ARC_STACK_ALIGN (total_size);
1115*404b540aSrobert 
1116*404b540aSrobert   /* Save computed information.  */
1117*404b540aSrobert   current_frame_info.total_size   = total_size;
1118*404b540aSrobert   current_frame_info.extra_size   = extra_size;
1119*404b540aSrobert   current_frame_info.pretend_size = pretend_size;
1120*404b540aSrobert   current_frame_info.var_size     = var_size;
1121*404b540aSrobert   current_frame_info.args_size    = args_size;
1122*404b540aSrobert   current_frame_info.reg_size	  = reg_size;
1123*404b540aSrobert   current_frame_info.reg_offset	  = reg_offset;
1124*404b540aSrobert   current_frame_info.gmask	  = gmask;
1125*404b540aSrobert   current_frame_info.initialized  = reload_completed;
1126*404b540aSrobert 
1127*404b540aSrobert   /* Ok, we're done.  */
1128*404b540aSrobert   return total_size;
1129*404b540aSrobert }
1130*404b540aSrobert 
1131*404b540aSrobert /* Common code to save/restore registers.  */
1132*404b540aSrobert 
1133*404b540aSrobert void
arc_save_restore(FILE * file,const char * base_reg,unsigned int offset,unsigned int gmask,const char * op)1134*404b540aSrobert arc_save_restore (FILE *file,
1135*404b540aSrobert                   const char *base_reg,
1136*404b540aSrobert                   unsigned int offset,
1137*404b540aSrobert                   unsigned int gmask,
1138*404b540aSrobert                   const char *op)
1139*404b540aSrobert {
1140*404b540aSrobert   int regno;
1141*404b540aSrobert 
1142*404b540aSrobert   if (gmask == 0)
1143*404b540aSrobert     return;
1144*404b540aSrobert 
1145*404b540aSrobert   for (regno = 0; regno <= 31; regno++)
1146*404b540aSrobert     {
1147*404b540aSrobert       if ((gmask & (1L << regno)) != 0)
1148*404b540aSrobert 	{
1149*404b540aSrobert 	  fprintf (file, "\t%s %s,[%s,%d]\n",
1150*404b540aSrobert 		     op, reg_names[regno], base_reg, offset);
1151*404b540aSrobert 	  offset += UNITS_PER_WORD;
1152*404b540aSrobert 	}
1153*404b540aSrobert     }
1154*404b540aSrobert }
1155*404b540aSrobert 
1156*404b540aSrobert /* Target hook to assemble an integer object.  The ARC version needs to
1157*404b540aSrobert    emit a special directive for references to labels and function
1158*404b540aSrobert    symbols.  */
1159*404b540aSrobert 
1160*404b540aSrobert static bool
arc_assemble_integer(rtx x,unsigned int size,int aligned_p)1161*404b540aSrobert arc_assemble_integer (rtx x, unsigned int size, int aligned_p)
1162*404b540aSrobert {
1163*404b540aSrobert   if (size == UNITS_PER_WORD && aligned_p
1164*404b540aSrobert       && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1165*404b540aSrobert 	  || GET_CODE (x) == LABEL_REF))
1166*404b540aSrobert     {
1167*404b540aSrobert       fputs ("\t.word\t%st(", asm_out_file);
1168*404b540aSrobert       output_addr_const (asm_out_file, x);
1169*404b540aSrobert       fputs (")\n", asm_out_file);
1170*404b540aSrobert       return true;
1171*404b540aSrobert     }
1172*404b540aSrobert   return default_assemble_integer (x, size, aligned_p);
1173*404b540aSrobert }
1174*404b540aSrobert 
1175*404b540aSrobert /* Set up the stack and frame pointer (if desired) for the function.  */
1176*404b540aSrobert 
1177*404b540aSrobert static void
arc_output_function_prologue(FILE * file,HOST_WIDE_INT size)1178*404b540aSrobert arc_output_function_prologue (FILE *file, HOST_WIDE_INT size)
1179*404b540aSrobert {
1180*404b540aSrobert   const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1181*404b540aSrobert   const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1182*404b540aSrobert   unsigned int gmask = current_frame_info.gmask;
1183*404b540aSrobert   enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1184*404b540aSrobert 
1185*404b540aSrobert   /* If this is an interrupt handler, set up our stack frame.
1186*404b540aSrobert      ??? Optimize later.  */
1187*404b540aSrobert   if (ARC_INTERRUPT_P (fn_type))
1188*404b540aSrobert     {
1189*404b540aSrobert       fprintf (file, "\t%s interrupt handler\n",
1190*404b540aSrobert 	       ASM_COMMENT_START);
1191*404b540aSrobert       fprintf (file, "\tsub %s,%s,16\n", sp_str, sp_str);
1192*404b540aSrobert     }
1193*404b540aSrobert 
1194*404b540aSrobert   /* This is only for the human reader.  */
1195*404b540aSrobert   fprintf (file, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n",
1196*404b540aSrobert 	   ASM_COMMENT_START, ASM_COMMENT_START,
1197*404b540aSrobert 	   current_frame_info.var_size,
1198*404b540aSrobert 	   current_frame_info.reg_size / 4,
1199*404b540aSrobert 	   current_frame_info.args_size,
1200*404b540aSrobert 	   current_frame_info.extra_size);
1201*404b540aSrobert 
1202*404b540aSrobert   size = ARC_STACK_ALIGN (size);
1203*404b540aSrobert   size = (! current_frame_info.initialized
1204*404b540aSrobert 	   ? arc_compute_frame_size (size)
1205*404b540aSrobert 	   : current_frame_info.total_size);
1206*404b540aSrobert 
1207*404b540aSrobert   /* These cases shouldn't happen.  Catch them now.  */
1208*404b540aSrobert   gcc_assert (size || !gmask);
1209*404b540aSrobert 
1210*404b540aSrobert   /* Allocate space for register arguments if this is a variadic function.  */
1211*404b540aSrobert   if (current_frame_info.pretend_size != 0)
1212*404b540aSrobert     fprintf (file, "\tsub %s,%s,%d\n",
1213*404b540aSrobert 	     sp_str, sp_str, current_frame_info.pretend_size);
1214*404b540aSrobert 
1215*404b540aSrobert   /* The home-grown ABI says link register is saved first.  */
1216*404b540aSrobert   if (MUST_SAVE_RETURN_ADDR)
1217*404b540aSrobert     fprintf (file, "\tst %s,[%s,%d]\n",
1218*404b540aSrobert 	     reg_names[RETURN_ADDR_REGNUM], sp_str, UNITS_PER_WORD);
1219*404b540aSrobert 
1220*404b540aSrobert   /* Set up the previous frame pointer next (if we need to).  */
1221*404b540aSrobert   if (frame_pointer_needed)
1222*404b540aSrobert     {
1223*404b540aSrobert       fprintf (file, "\tst %s,[%s]\n", fp_str, sp_str);
1224*404b540aSrobert       fprintf (file, "\tmov %s,%s\n", fp_str, sp_str);
1225*404b540aSrobert     }
1226*404b540aSrobert 
1227*404b540aSrobert   /* ??? We don't handle the case where the saved regs are more than 252
1228*404b540aSrobert      bytes away from sp.  This can be handled by decrementing sp once, saving
1229*404b540aSrobert      the regs, and then decrementing it again.  The epilogue doesn't have this
1230*404b540aSrobert      problem as the `ld' insn takes reg+limm values (though it would be more
1231*404b540aSrobert      efficient to avoid reg+limm).  */
1232*404b540aSrobert 
1233*404b540aSrobert   /* Allocate the stack frame.  */
1234*404b540aSrobert   if (size - current_frame_info.pretend_size > 0)
1235*404b540aSrobert     fprintf (file, "\tsub %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1236*404b540aSrobert 	     sp_str, sp_str, size - current_frame_info.pretend_size);
1237*404b540aSrobert 
1238*404b540aSrobert   /* Save any needed call-saved regs (and call-used if this is an
1239*404b540aSrobert      interrupt handler).  */
1240*404b540aSrobert   arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1241*404b540aSrobert 		    /* The zeroing of these two bits is unnecessary,
1242*404b540aSrobert 		       but leave this in for clarity.  */
1243*404b540aSrobert 		    gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1244*404b540aSrobert 		    "st");
1245*404b540aSrobert 
1246*404b540aSrobert   fprintf (file, "\t%s END PROLOGUE\n", ASM_COMMENT_START);
1247*404b540aSrobert }
1248*404b540aSrobert 
1249*404b540aSrobert /* Do any necessary cleanup after a function to restore stack, frame,
1250*404b540aSrobert    and regs.  */
1251*404b540aSrobert 
1252*404b540aSrobert static void
arc_output_function_epilogue(FILE * file,HOST_WIDE_INT size)1253*404b540aSrobert arc_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
1254*404b540aSrobert {
1255*404b540aSrobert   rtx epilogue_delay = current_function_epilogue_delay_list;
1256*404b540aSrobert   int noepilogue = FALSE;
1257*404b540aSrobert   enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1258*404b540aSrobert 
1259*404b540aSrobert   /* This is only for the human reader.  */
1260*404b540aSrobert   fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
1261*404b540aSrobert 
1262*404b540aSrobert   size = ARC_STACK_ALIGN (size);
1263*404b540aSrobert   size = (!current_frame_info.initialized
1264*404b540aSrobert 	   ? arc_compute_frame_size (size)
1265*404b540aSrobert 	   : current_frame_info.total_size);
1266*404b540aSrobert 
1267*404b540aSrobert   if (size == 0 && epilogue_delay == 0)
1268*404b540aSrobert     {
1269*404b540aSrobert       rtx insn = get_last_insn ();
1270*404b540aSrobert 
1271*404b540aSrobert       /* If the last insn was a BARRIER, we don't have to write any code
1272*404b540aSrobert 	 because a jump (aka return) was put there.  */
1273*404b540aSrobert       if (GET_CODE (insn) == NOTE)
1274*404b540aSrobert 	insn = prev_nonnote_insn (insn);
1275*404b540aSrobert       if (insn && GET_CODE (insn) == BARRIER)
1276*404b540aSrobert 	noepilogue = TRUE;
1277*404b540aSrobert     }
1278*404b540aSrobert 
1279*404b540aSrobert   if (!noepilogue)
1280*404b540aSrobert     {
1281*404b540aSrobert       unsigned int pretend_size = current_frame_info.pretend_size;
1282*404b540aSrobert       unsigned int frame_size = size - pretend_size;
1283*404b540aSrobert       int restored, fp_restored_p;
1284*404b540aSrobert       int can_trust_sp_p = !current_function_calls_alloca;
1285*404b540aSrobert       const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1286*404b540aSrobert       const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1287*404b540aSrobert 
1288*404b540aSrobert       /* ??? There are lots of optimizations that can be done here.
1289*404b540aSrobert 	 EG: Use fp to restore regs if it's closer.
1290*404b540aSrobert 	 Maybe in time we'll do them all.  For now, always restore regs from
1291*404b540aSrobert 	 sp, but don't restore sp if we don't have to.  */
1292*404b540aSrobert 
1293*404b540aSrobert       if (!can_trust_sp_p)
1294*404b540aSrobert 	{
1295*404b540aSrobert 	  gcc_assert (frame_pointer_needed);
1296*404b540aSrobert 	  fprintf (file,"\tsub %s,%s,%d\t\t%s sp not trusted here\n",
1297*404b540aSrobert 		   sp_str, fp_str, frame_size, ASM_COMMENT_START);
1298*404b540aSrobert 	}
1299*404b540aSrobert 
1300*404b540aSrobert       /* Restore any saved registers.  */
1301*404b540aSrobert       arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1302*404b540aSrobert 			/* The zeroing of these two bits is unnecessary,
1303*404b540aSrobert 			   but leave this in for clarity.  */
1304*404b540aSrobert 			current_frame_info.gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1305*404b540aSrobert 			"ld");
1306*404b540aSrobert 
1307*404b540aSrobert       if (MUST_SAVE_RETURN_ADDR)
1308*404b540aSrobert 	fprintf (file, "\tld %s,[%s,%d]\n",
1309*404b540aSrobert 		 reg_names[RETURN_ADDR_REGNUM],
1310*404b540aSrobert 		 frame_pointer_needed ? fp_str : sp_str,
1311*404b540aSrobert 		 UNITS_PER_WORD + (frame_pointer_needed ? 0 : frame_size));
1312*404b540aSrobert 
1313*404b540aSrobert       /* Keep track of how much of the stack pointer we've restored.
1314*404b540aSrobert 	 It makes the following a lot more readable.  */
1315*404b540aSrobert       restored = 0;
1316*404b540aSrobert       fp_restored_p = 0;
1317*404b540aSrobert 
1318*404b540aSrobert       /* We try to emit the epilogue delay slot insn right after the load
1319*404b540aSrobert 	 of the return address register so that it can execute with the
1320*404b540aSrobert 	 stack intact.  Secondly, loads are delayed.  */
1321*404b540aSrobert       /* ??? If stack intactness is important, always emit now.  */
1322*404b540aSrobert       if (MUST_SAVE_RETURN_ADDR && epilogue_delay != NULL_RTX)
1323*404b540aSrobert 	{
1324*404b540aSrobert 	  final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1325*404b540aSrobert 	  epilogue_delay = NULL_RTX;
1326*404b540aSrobert 	}
1327*404b540aSrobert 
1328*404b540aSrobert       if (frame_pointer_needed)
1329*404b540aSrobert 	{
1330*404b540aSrobert 	  /* Try to restore the frame pointer in the delay slot.  We can't,
1331*404b540aSrobert 	     however, if any of these is true.  */
1332*404b540aSrobert 	  if (epilogue_delay != NULL_RTX
1333*404b540aSrobert 	      || !SMALL_INT (frame_size)
1334*404b540aSrobert 	      || pretend_size
1335*404b540aSrobert 	      || ARC_INTERRUPT_P (fn_type))
1336*404b540aSrobert 	    {
1337*404b540aSrobert 	      /* Note that we restore fp and sp here!  */
1338*404b540aSrobert 	      fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1339*404b540aSrobert 	      restored += frame_size;
1340*404b540aSrobert 	      fp_restored_p = 1;
1341*404b540aSrobert 	    }
1342*404b540aSrobert 	}
1343*404b540aSrobert       else if (!SMALL_INT (size /* frame_size + pretend_size */)
1344*404b540aSrobert 	       || ARC_INTERRUPT_P (fn_type))
1345*404b540aSrobert 	{
1346*404b540aSrobert 	  fprintf (file, "\tadd %s,%s,%d\n", sp_str, sp_str, frame_size);
1347*404b540aSrobert 	  restored += frame_size;
1348*404b540aSrobert 	}
1349*404b540aSrobert 
1350*404b540aSrobert       /* These must be done before the return insn because the delay slot
1351*404b540aSrobert 	 does the final stack restore.  */
1352*404b540aSrobert       if (ARC_INTERRUPT_P (fn_type))
1353*404b540aSrobert 	{
1354*404b540aSrobert 	  if (epilogue_delay)
1355*404b540aSrobert 	    {
1356*404b540aSrobert 	      final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1357*404b540aSrobert 	    }
1358*404b540aSrobert 	}
1359*404b540aSrobert 
1360*404b540aSrobert       /* Emit the return instruction.  */
1361*404b540aSrobert       {
1362*404b540aSrobert 	static const int regs[4] = {
1363*404b540aSrobert 	  0, RETURN_ADDR_REGNUM, ILINK1_REGNUM, ILINK2_REGNUM
1364*404b540aSrobert 	};
1365*404b540aSrobert 
1366*404b540aSrobert 	/* Update the flags, if returning from an interrupt handler. */
1367*404b540aSrobert 	if (ARC_INTERRUPT_P (fn_type))
1368*404b540aSrobert 	  fprintf (file, "\tj.d.f %s\n", reg_names[regs[fn_type]]);
1369*404b540aSrobert 	else
1370*404b540aSrobert 	  fprintf (file, "\tj.d %s\n", reg_names[regs[fn_type]]);
1371*404b540aSrobert 	}
1372*404b540aSrobert 
1373*404b540aSrobert       /* If the only register saved is the return address, we need a
1374*404b540aSrobert 	 nop, unless we have an instruction to put into it.  Otherwise
1375*404b540aSrobert 	 we don't since reloading multiple registers doesn't reference
1376*404b540aSrobert 	 the register being loaded.  */
1377*404b540aSrobert 
1378*404b540aSrobert       if (ARC_INTERRUPT_P (fn_type))
1379*404b540aSrobert 	fprintf (file, "\tadd %s,%s,16\n", sp_str, sp_str);
1380*404b540aSrobert       else if (epilogue_delay != NULL_RTX)
1381*404b540aSrobert 	{
1382*404b540aSrobert 	  gcc_assert (!frame_pointer_needed || fp_restored_p);
1383*404b540aSrobert 	  gcc_assert (restored >= size);
1384*404b540aSrobert 	  final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1385*404b540aSrobert 	}
1386*404b540aSrobert       else if (frame_pointer_needed && !fp_restored_p)
1387*404b540aSrobert 	{
1388*404b540aSrobert 	  gcc_assert (SMALL_INT (frame_size));
1389*404b540aSrobert 	  /* Note that we restore fp and sp here!  */
1390*404b540aSrobert 	  fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1391*404b540aSrobert 	}
1392*404b540aSrobert       else if (restored < size)
1393*404b540aSrobert 	{
1394*404b540aSrobert 	  gcc_assert (SMALL_INT (size - restored));
1395*404b540aSrobert 	  fprintf (file, "\tadd %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1396*404b540aSrobert 		   sp_str, sp_str, size - restored);
1397*404b540aSrobert 	}
1398*404b540aSrobert       else
1399*404b540aSrobert 	fprintf (file, "\tnop\n");
1400*404b540aSrobert     }
1401*404b540aSrobert 
1402*404b540aSrobert   /* Reset state info for each function.  */
1403*404b540aSrobert   current_frame_info = zero_frame_info;
1404*404b540aSrobert   arc_compute_function_type (NULL_TREE);
1405*404b540aSrobert }
1406*404b540aSrobert 
1407*404b540aSrobert /* Define the number of delay slots needed for the function epilogue.
1408*404b540aSrobert 
1409*404b540aSrobert    Interrupt handlers can't have any epilogue delay slots (it's always needed
1410*404b540aSrobert    for something else, I think).  For normal functions, we have to worry about
1411*404b540aSrobert    using call-saved regs as they'll be restored before the delay slot insn.
1412*404b540aSrobert    Functions with non-empty frames already have enough choices for the epilogue
1413*404b540aSrobert    delay slot so for now we only consider functions with empty frames.  */
1414*404b540aSrobert 
1415*404b540aSrobert int
arc_delay_slots_for_epilogue(void)1416*404b540aSrobert arc_delay_slots_for_epilogue (void)
1417*404b540aSrobert {
1418*404b540aSrobert   if (arc_compute_function_type (current_function_decl) != ARC_FUNCTION_NORMAL)
1419*404b540aSrobert     return 0;
1420*404b540aSrobert   if (!current_frame_info.initialized)
1421*404b540aSrobert     (void) arc_compute_frame_size (get_frame_size ());
1422*404b540aSrobert   if (current_frame_info.total_size == 0)
1423*404b540aSrobert     return 1;
1424*404b540aSrobert   return 0;
1425*404b540aSrobert }
1426*404b540aSrobert 
1427*404b540aSrobert /* Return true if TRIAL is a valid insn for the epilogue delay slot.
1428*404b540aSrobert    Any single length instruction which doesn't reference the stack or frame
1429*404b540aSrobert    pointer or any call-saved register is OK.  SLOT will always be 0.  */
1430*404b540aSrobert 
1431*404b540aSrobert int
arc_eligible_for_epilogue_delay(rtx trial,int slot)1432*404b540aSrobert arc_eligible_for_epilogue_delay (rtx trial, int slot)
1433*404b540aSrobert {
1434*404b540aSrobert   gcc_assert (!slot);
1435*404b540aSrobert 
1436*404b540aSrobert   if (get_attr_length (trial) == 1
1437*404b540aSrobert       /* If registers where saved, presumably there's more than enough
1438*404b540aSrobert 	 possibilities for the delay slot.  The alternative is something
1439*404b540aSrobert 	 more complicated (of course, if we expanded the epilogue as rtl
1440*404b540aSrobert 	 this problem would go away).  */
1441*404b540aSrobert       /* ??? Note that this will always be true since only functions with
1442*404b540aSrobert 	 empty frames have epilogue delay slots.  See
1443*404b540aSrobert 	 arc_delay_slots_for_epilogue.  */
1444*404b540aSrobert       && current_frame_info.gmask == 0
1445*404b540aSrobert       && ! reg_mentioned_p (stack_pointer_rtx, PATTERN (trial))
1446*404b540aSrobert       && ! reg_mentioned_p (frame_pointer_rtx, PATTERN (trial)))
1447*404b540aSrobert     return 1;
1448*404b540aSrobert   return 0;
1449*404b540aSrobert }
1450*404b540aSrobert 
1451*404b540aSrobert /* Return true if OP is a shift operator.  */
1452*404b540aSrobert 
1453*404b540aSrobert int
shift_operator(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)1454*404b540aSrobert shift_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1455*404b540aSrobert {
1456*404b540aSrobert   switch (GET_CODE (op))
1457*404b540aSrobert     {
1458*404b540aSrobert     case ASHIFTRT:
1459*404b540aSrobert     case LSHIFTRT:
1460*404b540aSrobert     case ASHIFT:
1461*404b540aSrobert       return 1;
1462*404b540aSrobert     default:
1463*404b540aSrobert       return 0;
1464*404b540aSrobert     }
1465*404b540aSrobert }
1466*404b540aSrobert 
1467*404b540aSrobert /* Output the assembler code for doing a shift.
1468*404b540aSrobert    We go to a bit of trouble to generate efficient code as the ARC only has
1469*404b540aSrobert    single bit shifts.  This is taken from the h8300 port.  We only have one
1470*404b540aSrobert    mode of shifting and can't access individual bytes like the h8300 can, so
1471*404b540aSrobert    this is greatly simplified (at the expense of not generating hyper-
1472*404b540aSrobert    efficient code).
1473*404b540aSrobert 
1474*404b540aSrobert    This function is not used if the variable shift insns are present.  */
1475*404b540aSrobert 
1476*404b540aSrobert /* ??? We assume the output operand is the same as operand 1.
1477*404b540aSrobert    This can be optimized (deleted) in the case of 1 bit shifts.  */
1478*404b540aSrobert /* ??? We use the loop register here.  We don't use it elsewhere (yet) and
1479*404b540aSrobert    using it here will give us a chance to play with it.  */
1480*404b540aSrobert 
1481*404b540aSrobert const char *
output_shift(rtx * operands)1482*404b540aSrobert output_shift (rtx *operands)
1483*404b540aSrobert {
1484*404b540aSrobert   rtx shift = operands[3];
1485*404b540aSrobert   enum machine_mode mode = GET_MODE (shift);
1486*404b540aSrobert   enum rtx_code code = GET_CODE (shift);
1487*404b540aSrobert   const char *shift_one;
1488*404b540aSrobert 
1489*404b540aSrobert   gcc_assert (mode == SImode);
1490*404b540aSrobert 
1491*404b540aSrobert   switch (code)
1492*404b540aSrobert     {
1493*404b540aSrobert     case ASHIFT:   shift_one = "asl %0,%0"; break;
1494*404b540aSrobert     case ASHIFTRT: shift_one = "asr %0,%0"; break;
1495*404b540aSrobert     case LSHIFTRT: shift_one = "lsr %0,%0"; break;
1496*404b540aSrobert     default:       gcc_unreachable ();
1497*404b540aSrobert     }
1498*404b540aSrobert 
1499*404b540aSrobert   if (GET_CODE (operands[2]) != CONST_INT)
1500*404b540aSrobert     {
1501*404b540aSrobert       if (optimize)
1502*404b540aSrobert 	{
1503*404b540aSrobert 	  output_asm_insn ("sub.f 0,%2,0", operands);
1504*404b540aSrobert       	  output_asm_insn ("mov lp_count,%2", operands);
1505*404b540aSrobert 	  output_asm_insn ("bz 2f", operands);
1506*404b540aSrobert 	}
1507*404b540aSrobert       else
1508*404b540aSrobert 	output_asm_insn ("mov %4,%2", operands);
1509*404b540aSrobert       goto shiftloop;
1510*404b540aSrobert     }
1511*404b540aSrobert   else
1512*404b540aSrobert     {
1513*404b540aSrobert       int n = INTVAL (operands[2]);
1514*404b540aSrobert 
1515*404b540aSrobert       /* If the count is negative, make it 0.  */
1516*404b540aSrobert       if (n < 0)
1517*404b540aSrobert 	n = 0;
1518*404b540aSrobert       /* If the count is too big, truncate it.
1519*404b540aSrobert          ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to
1520*404b540aSrobert 	 do the intuitive thing.  */
1521*404b540aSrobert       else if (n > GET_MODE_BITSIZE (mode))
1522*404b540aSrobert 	n = GET_MODE_BITSIZE (mode);
1523*404b540aSrobert 
1524*404b540aSrobert       /* First see if we can do them inline.  */
1525*404b540aSrobert       if (n <= 8)
1526*404b540aSrobert 	{
1527*404b540aSrobert 	  while (--n >= 0)
1528*404b540aSrobert 	    output_asm_insn (shift_one, operands);
1529*404b540aSrobert 	}
1530*404b540aSrobert       /* See if we can use a rotate/and.  */
1531*404b540aSrobert       else if (n == BITS_PER_WORD - 1)
1532*404b540aSrobert 	{
1533*404b540aSrobert 	  switch (code)
1534*404b540aSrobert 	    {
1535*404b540aSrobert 	    case ASHIFT :
1536*404b540aSrobert 	      output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands);
1537*404b540aSrobert 	      break;
1538*404b540aSrobert 	    case ASHIFTRT :
1539*404b540aSrobert 	      /* The ARC doesn't have a rol insn.  Use something else.  */
1540*404b540aSrobert 	      output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands);
1541*404b540aSrobert 	      break;
1542*404b540aSrobert 	    case LSHIFTRT :
1543*404b540aSrobert 	      /* The ARC doesn't have a rol insn.  Use something else.  */
1544*404b540aSrobert 	      output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands);
1545*404b540aSrobert 	      break;
1546*404b540aSrobert 	    default:
1547*404b540aSrobert 	      break;
1548*404b540aSrobert 	    }
1549*404b540aSrobert 	}
1550*404b540aSrobert       /* Must loop.  */
1551*404b540aSrobert       else
1552*404b540aSrobert 	{
1553*404b540aSrobert 	  char buf[100];
1554*404b540aSrobert 
1555*404b540aSrobert 	  if (optimize)
1556*404b540aSrobert 	    output_asm_insn ("mov lp_count,%c2", operands);
1557*404b540aSrobert 	  else
1558*404b540aSrobert 	    output_asm_insn ("mov %4,%c2", operands);
1559*404b540aSrobert 	shiftloop:
1560*404b540aSrobert 	  if (optimize)
1561*404b540aSrobert 	    {
1562*404b540aSrobert 	      if (flag_pic)
1563*404b540aSrobert 		sprintf (buf, "lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start",
1564*404b540aSrobert 			 ASM_COMMENT_START);
1565*404b540aSrobert 	      else
1566*404b540aSrobert 		sprintf (buf, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2",
1567*404b540aSrobert 			 ASM_COMMENT_START);
1568*404b540aSrobert 	      output_asm_insn (buf, operands);
1569*404b540aSrobert 	      output_asm_insn ("sr %4,[lp_start]", operands);
1570*404b540aSrobert 	      output_asm_insn ("add %4,%4,1", operands);
1571*404b540aSrobert 	      output_asm_insn ("sr %4,[lp_end]", operands);
1572*404b540aSrobert 	      output_asm_insn ("nop\n\tnop", operands);
1573*404b540aSrobert 	      if (flag_pic)
1574*404b540aSrobert 		fprintf (asm_out_file, "\t%s single insn loop\n",
1575*404b540aSrobert 			 ASM_COMMENT_START);
1576*404b540aSrobert 	      else
1577*404b540aSrobert 		fprintf (asm_out_file, "1:\t%s single insn loop\n",
1578*404b540aSrobert 			 ASM_COMMENT_START);
1579*404b540aSrobert 	      output_asm_insn (shift_one, operands);
1580*404b540aSrobert 	      fprintf (asm_out_file, "2:\t%s end single insn loop\n",
1581*404b540aSrobert 		       ASM_COMMENT_START);
1582*404b540aSrobert 	    }
1583*404b540aSrobert 	  else
1584*404b540aSrobert 	    {
1585*404b540aSrobert 	      fprintf (asm_out_file, "1:\t%s begin shift loop\n",
1586*404b540aSrobert 		       ASM_COMMENT_START);
1587*404b540aSrobert 	      output_asm_insn ("sub.f %4,%4,1", operands);
1588*404b540aSrobert 	      output_asm_insn ("nop", operands);
1589*404b540aSrobert 	      output_asm_insn ("bn.nd 2f", operands);
1590*404b540aSrobert 	      output_asm_insn (shift_one, operands);
1591*404b540aSrobert 	      output_asm_insn ("b.nd 1b", operands);
1592*404b540aSrobert 	      fprintf (asm_out_file, "2:\t%s end shift loop\n",
1593*404b540aSrobert 		       ASM_COMMENT_START);
1594*404b540aSrobert 	    }
1595*404b540aSrobert 	}
1596*404b540aSrobert     }
1597*404b540aSrobert 
1598*404b540aSrobert   return "";
1599*404b540aSrobert }
1600*404b540aSrobert 
1601*404b540aSrobert /* Nested function support.  */
1602*404b540aSrobert 
1603*404b540aSrobert /* Emit RTL insns to initialize the variable parts of a trampoline.
1604*404b540aSrobert    FNADDR is an RTX for the address of the function's pure code.
1605*404b540aSrobert    CXT is an RTX for the static chain value for the function.  */
1606*404b540aSrobert 
1607*404b540aSrobert void
arc_initialize_trampoline(rtx tramp ATTRIBUTE_UNUSED,rtx fnaddr ATTRIBUTE_UNUSED,rtx cxt ATTRIBUTE_UNUSED)1608*404b540aSrobert arc_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
1609*404b540aSrobert                            rtx fnaddr ATTRIBUTE_UNUSED,
1610*404b540aSrobert                            rtx cxt ATTRIBUTE_UNUSED)
1611*404b540aSrobert {
1612*404b540aSrobert }
1613*404b540aSrobert 
1614*404b540aSrobert /* Set the cpu type and print out other fancy things,
1615*404b540aSrobert    at the top of the file.  */
1616*404b540aSrobert 
1617*404b540aSrobert static void
arc_file_start(void)1618*404b540aSrobert arc_file_start (void)
1619*404b540aSrobert {
1620*404b540aSrobert   default_file_start ();
1621*404b540aSrobert   fprintf (asm_out_file, "\t.cpu %s\n", arc_cpu_string);
1622*404b540aSrobert }
1623*404b540aSrobert 
1624*404b540aSrobert /* Print operand X (an rtx) in assembler syntax to file FILE.
1625*404b540aSrobert    CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1626*404b540aSrobert    For `%' followed by punctuation, CODE is the punctuation and X is null.  */
1627*404b540aSrobert 
1628*404b540aSrobert void
arc_print_operand(FILE * file,rtx x,int code)1629*404b540aSrobert arc_print_operand (FILE *file, rtx x, int code)
1630*404b540aSrobert {
1631*404b540aSrobert   switch (code)
1632*404b540aSrobert     {
1633*404b540aSrobert     case '#' :
1634*404b540aSrobert       /* Conditional branches.  For now these are equivalent.  */
1635*404b540aSrobert     case '*' :
1636*404b540aSrobert       /* Unconditional branches.  Output the appropriate delay slot suffix.  */
1637*404b540aSrobert       if (!final_sequence || XVECLEN (final_sequence, 0) == 1)
1638*404b540aSrobert 	{
1639*404b540aSrobert 	  /* There's nothing in the delay slot.  */
1640*404b540aSrobert 	  fputs (".nd", file);
1641*404b540aSrobert 	}
1642*404b540aSrobert       else
1643*404b540aSrobert 	{
1644*404b540aSrobert 	  rtx jump = XVECEXP (final_sequence, 0, 0);
1645*404b540aSrobert 	  rtx delay = XVECEXP (final_sequence, 0, 1);
1646*404b540aSrobert 	  if (INSN_ANNULLED_BRANCH_P (jump))
1647*404b540aSrobert 	    fputs (INSN_FROM_TARGET_P (delay) ? ".jd" : ".nd", file);
1648*404b540aSrobert 	  else
1649*404b540aSrobert 	    fputs (".d", file);
1650*404b540aSrobert 	}
1651*404b540aSrobert       return;
1652*404b540aSrobert     case '?' : /* with leading "." */
1653*404b540aSrobert     case '!' : /* without leading "." */
1654*404b540aSrobert       /* This insn can be conditionally executed.  See if the ccfsm machinery
1655*404b540aSrobert 	 says it should be conditionalized.  */
1656*404b540aSrobert       if (arc_ccfsm_state == 3 || arc_ccfsm_state == 4)
1657*404b540aSrobert 	{
1658*404b540aSrobert 	  /* Is this insn in a delay slot?  */
1659*404b540aSrobert 	  if (final_sequence && XVECLEN (final_sequence, 0) == 2)
1660*404b540aSrobert 	    {
1661*404b540aSrobert 	      rtx insn = XVECEXP (final_sequence, 0, 1);
1662*404b540aSrobert 
1663*404b540aSrobert 	      /* If the insn is annulled and is from the target path, we need
1664*404b540aSrobert 		 to inverse the condition test.  */
1665*404b540aSrobert 	      if (INSN_ANNULLED_BRANCH_P (insn))
1666*404b540aSrobert 		{
1667*404b540aSrobert 		  if (INSN_FROM_TARGET_P (insn))
1668*404b540aSrobert 		    fprintf (file, "%s%s",
1669*404b540aSrobert 			     code == '?' ? "." : "",
1670*404b540aSrobert 			     arc_condition_codes[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc)]);
1671*404b540aSrobert 		  else
1672*404b540aSrobert 		    fprintf (file, "%s%s",
1673*404b540aSrobert 			     code == '?' ? "." : "",
1674*404b540aSrobert 			     arc_condition_codes[arc_ccfsm_current_cc]);
1675*404b540aSrobert 		}
1676*404b540aSrobert 	      else
1677*404b540aSrobert 	        {
1678*404b540aSrobert 		  /* This insn is executed for either path, so don't
1679*404b540aSrobert 		     conditionalize it at all.  */
1680*404b540aSrobert 		  ; /* nothing to do */
1681*404b540aSrobert 		}
1682*404b540aSrobert 	    }
1683*404b540aSrobert 	  else
1684*404b540aSrobert 	    {
1685*404b540aSrobert 	      /* This insn isn't in a delay slot.  */
1686*404b540aSrobert 	      fprintf (file, "%s%s",
1687*404b540aSrobert 		       code == '?' ? "." : "",
1688*404b540aSrobert 		       arc_condition_codes[arc_ccfsm_current_cc]);
1689*404b540aSrobert 	    }
1690*404b540aSrobert 	}
1691*404b540aSrobert       return;
1692*404b540aSrobert     case '~' :
1693*404b540aSrobert       /* Output a nop if we're between a set of the condition codes,
1694*404b540aSrobert 	 and a conditional branch.  */
1695*404b540aSrobert       if (last_insn_set_cc_p)
1696*404b540aSrobert 	fputs ("nop\n\t", file);
1697*404b540aSrobert       return;
1698*404b540aSrobert     case 'd' :
1699*404b540aSrobert       fputs (arc_condition_codes[get_arc_condition_code (x)], file);
1700*404b540aSrobert       return;
1701*404b540aSrobert     case 'D' :
1702*404b540aSrobert       fputs (arc_condition_codes[ARC_INVERSE_CONDITION_CODE
1703*404b540aSrobert 				 (get_arc_condition_code (x))],
1704*404b540aSrobert 	     file);
1705*404b540aSrobert       return;
1706*404b540aSrobert     case 'R' :
1707*404b540aSrobert       /* Write second word of DImode or DFmode reference,
1708*404b540aSrobert 	 register or memory.  */
1709*404b540aSrobert       if (GET_CODE (x) == REG)
1710*404b540aSrobert 	fputs (reg_names[REGNO (x)+1], file);
1711*404b540aSrobert       else if (GET_CODE (x) == MEM)
1712*404b540aSrobert 	{
1713*404b540aSrobert 	  fputc ('[', file);
1714*404b540aSrobert 	  /* Handle possible auto-increment.  Since it is pre-increment and
1715*404b540aSrobert 	     we have already done it, we can just use an offset of four.  */
1716*404b540aSrobert 	  /* ??? This is taken from rs6000.c I think.  I don't think it is
1717*404b540aSrobert 	     currently necessary, but keep it around.  */
1718*404b540aSrobert 	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
1719*404b540aSrobert 	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1720*404b540aSrobert 	    output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
1721*404b540aSrobert 	  else
1722*404b540aSrobert 	    output_address (plus_constant (XEXP (x, 0), 4));
1723*404b540aSrobert 	  fputc (']', file);
1724*404b540aSrobert 	}
1725*404b540aSrobert       else
1726*404b540aSrobert 	output_operand_lossage ("invalid operand to %%R code");
1727*404b540aSrobert       return;
1728*404b540aSrobert     case 'S' :
1729*404b540aSrobert       if ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1730*404b540aSrobert 	  || GET_CODE (x) == LABEL_REF)
1731*404b540aSrobert 	{
1732*404b540aSrobert 	  fprintf (file, "%%st(");
1733*404b540aSrobert 	  output_addr_const (file, x);
1734*404b540aSrobert 	  fprintf (file, ")");
1735*404b540aSrobert 	  return;
1736*404b540aSrobert 	}
1737*404b540aSrobert       break;
1738*404b540aSrobert     case 'H' :
1739*404b540aSrobert     case 'L' :
1740*404b540aSrobert       if (GET_CODE (x) == REG)
1741*404b540aSrobert 	{
1742*404b540aSrobert 	  /* L = least significant word, H = most significant word */
1743*404b540aSrobert 	  if ((TARGET_BIG_ENDIAN != 0) ^ (code == 'L'))
1744*404b540aSrobert 	    fputs (reg_names[REGNO (x)], file);
1745*404b540aSrobert 	  else
1746*404b540aSrobert 	    fputs (reg_names[REGNO (x)+1], file);
1747*404b540aSrobert 	}
1748*404b540aSrobert       else if (GET_CODE (x) == CONST_INT
1749*404b540aSrobert 	       || GET_CODE (x) == CONST_DOUBLE)
1750*404b540aSrobert 	{
1751*404b540aSrobert 	  rtx first, second;
1752*404b540aSrobert 
1753*404b540aSrobert 	  split_double (x, &first, &second);
1754*404b540aSrobert 	  fprintf (file, "0x%08lx",
1755*404b540aSrobert 		   (long)(code == 'L' ? INTVAL (first) : INTVAL (second)));
1756*404b540aSrobert 	}
1757*404b540aSrobert       else
1758*404b540aSrobert 	output_operand_lossage ("invalid operand to %%H/%%L code");
1759*404b540aSrobert       return;
1760*404b540aSrobert     case 'A' :
1761*404b540aSrobert       {
1762*404b540aSrobert 	char str[30];
1763*404b540aSrobert 
1764*404b540aSrobert 	gcc_assert (GET_CODE (x) == CONST_DOUBLE
1765*404b540aSrobert 		    && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT);
1766*404b540aSrobert 
1767*404b540aSrobert 	real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
1768*404b540aSrobert 	fprintf (file, "%s", str);
1769*404b540aSrobert 	return;
1770*404b540aSrobert       }
1771*404b540aSrobert     case 'U' :
1772*404b540aSrobert       /* Output a load/store with update indicator if appropriate.  */
1773*404b540aSrobert       if (GET_CODE (x) == MEM)
1774*404b540aSrobert 	{
1775*404b540aSrobert 	  if (GET_CODE (XEXP (x, 0)) == PRE_INC
1776*404b540aSrobert 	      || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1777*404b540aSrobert 	    fputs (".a", file);
1778*404b540aSrobert 	}
1779*404b540aSrobert       else
1780*404b540aSrobert 	output_operand_lossage ("invalid operand to %%U code");
1781*404b540aSrobert       return;
1782*404b540aSrobert     case 'V' :
1783*404b540aSrobert       /* Output cache bypass indicator for a load/store insn.  Volatile memory
1784*404b540aSrobert 	 refs are defined to use the cache bypass mechanism.  */
1785*404b540aSrobert       if (GET_CODE (x) == MEM)
1786*404b540aSrobert 	{
1787*404b540aSrobert 	  if (MEM_VOLATILE_P (x))
1788*404b540aSrobert 	    fputs (".di", file);
1789*404b540aSrobert 	}
1790*404b540aSrobert       else
1791*404b540aSrobert 	output_operand_lossage ("invalid operand to %%V code");
1792*404b540aSrobert       return;
1793*404b540aSrobert     case 0 :
1794*404b540aSrobert       /* Do nothing special.  */
1795*404b540aSrobert       break;
1796*404b540aSrobert     default :
1797*404b540aSrobert       /* Unknown flag.  */
1798*404b540aSrobert       output_operand_lossage ("invalid operand output code");
1799*404b540aSrobert     }
1800*404b540aSrobert 
1801*404b540aSrobert   switch (GET_CODE (x))
1802*404b540aSrobert     {
1803*404b540aSrobert     case REG :
1804*404b540aSrobert       fputs (reg_names[REGNO (x)], file);
1805*404b540aSrobert       break;
1806*404b540aSrobert     case MEM :
1807*404b540aSrobert       fputc ('[', file);
1808*404b540aSrobert       if (GET_CODE (XEXP (x, 0)) == PRE_INC)
1809*404b540aSrobert 	output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1810*404b540aSrobert 				       GET_MODE_SIZE (GET_MODE (x))));
1811*404b540aSrobert       else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
1812*404b540aSrobert 	output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1813*404b540aSrobert 				       - GET_MODE_SIZE (GET_MODE (x))));
1814*404b540aSrobert       else
1815*404b540aSrobert 	output_address (XEXP (x, 0));
1816*404b540aSrobert       fputc (']', file);
1817*404b540aSrobert       break;
1818*404b540aSrobert     case CONST_DOUBLE :
1819*404b540aSrobert       /* We handle SFmode constants here as output_addr_const doesn't.  */
1820*404b540aSrobert       if (GET_MODE (x) == SFmode)
1821*404b540aSrobert 	{
1822*404b540aSrobert 	  REAL_VALUE_TYPE d;
1823*404b540aSrobert 	  long l;
1824*404b540aSrobert 
1825*404b540aSrobert 	  REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1826*404b540aSrobert 	  REAL_VALUE_TO_TARGET_SINGLE (d, l);
1827*404b540aSrobert 	  fprintf (file, "0x%08lx", l);
1828*404b540aSrobert 	  break;
1829*404b540aSrobert 	}
1830*404b540aSrobert       /* Fall through.  Let output_addr_const deal with it.  */
1831*404b540aSrobert     default :
1832*404b540aSrobert       output_addr_const (file, x);
1833*404b540aSrobert       break;
1834*404b540aSrobert     }
1835*404b540aSrobert }
1836*404b540aSrobert 
1837*404b540aSrobert /* Print a memory address as an operand to reference that memory location.  */
1838*404b540aSrobert 
1839*404b540aSrobert void
arc_print_operand_address(FILE * file,rtx addr)1840*404b540aSrobert arc_print_operand_address (FILE *file, rtx addr)
1841*404b540aSrobert {
1842*404b540aSrobert   register rtx base, index = 0;
1843*404b540aSrobert   int offset = 0;
1844*404b540aSrobert 
1845*404b540aSrobert   switch (GET_CODE (addr))
1846*404b540aSrobert     {
1847*404b540aSrobert     case REG :
1848*404b540aSrobert       fputs (reg_names[REGNO (addr)], file);
1849*404b540aSrobert       break;
1850*404b540aSrobert     case SYMBOL_REF :
1851*404b540aSrobert       if (/*???*/ 0 && SYMBOL_REF_FUNCTION_P (addr))
1852*404b540aSrobert 	{
1853*404b540aSrobert 	  fprintf (file, "%%st(");
1854*404b540aSrobert 	  output_addr_const (file, addr);
1855*404b540aSrobert 	  fprintf (file, ")");
1856*404b540aSrobert 	}
1857*404b540aSrobert       else
1858*404b540aSrobert 	output_addr_const (file, addr);
1859*404b540aSrobert       break;
1860*404b540aSrobert     case PLUS :
1861*404b540aSrobert       if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
1862*404b540aSrobert 	offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
1863*404b540aSrobert       else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
1864*404b540aSrobert 	offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
1865*404b540aSrobert       else
1866*404b540aSrobert 	base = XEXP (addr, 0), index = XEXP (addr, 1);
1867*404b540aSrobert       gcc_assert (GET_CODE (base) == REG);
1868*404b540aSrobert       fputs (reg_names[REGNO (base)], file);
1869*404b540aSrobert       if (index == 0)
1870*404b540aSrobert 	{
1871*404b540aSrobert 	  if (offset != 0)
1872*404b540aSrobert 	    fprintf (file, ",%d", offset);
1873*404b540aSrobert 	}
1874*404b540aSrobert       else
1875*404b540aSrobert 	{
1876*404b540aSrobert 	  switch (GET_CODE (index))
1877*404b540aSrobert 	    {
1878*404b540aSrobert 	    case REG:
1879*404b540aSrobert 	      fprintf (file, ",%s", reg_names[REGNO (index)]);
1880*404b540aSrobert 	      break;
1881*404b540aSrobert 	    case SYMBOL_REF:
1882*404b540aSrobert 	      fputc (',', file), output_addr_const (file, index);
1883*404b540aSrobert 	      break;
1884*404b540aSrobert 	    default:
1885*404b540aSrobert 	      gcc_unreachable ();
1886*404b540aSrobert 	    }
1887*404b540aSrobert 	}
1888*404b540aSrobert       break;
1889*404b540aSrobert     case PRE_INC :
1890*404b540aSrobert     case PRE_DEC :
1891*404b540aSrobert       /* We shouldn't get here as we've lost the mode of the memory object
1892*404b540aSrobert 	 (which says how much to inc/dec by.  */
1893*404b540aSrobert       gcc_unreachable ();
1894*404b540aSrobert       break;
1895*404b540aSrobert     default :
1896*404b540aSrobert       output_addr_const (file, addr);
1897*404b540aSrobert       break;
1898*404b540aSrobert     }
1899*404b540aSrobert }
1900*404b540aSrobert 
1901*404b540aSrobert /* Update compare/branch separation marker.  */
1902*404b540aSrobert 
1903*404b540aSrobert static void
record_cc_ref(rtx insn)1904*404b540aSrobert record_cc_ref (rtx insn)
1905*404b540aSrobert {
1906*404b540aSrobert   last_insn_set_cc_p = current_insn_set_cc_p;
1907*404b540aSrobert 
1908*404b540aSrobert   switch (get_attr_cond (insn))
1909*404b540aSrobert     {
1910*404b540aSrobert     case COND_SET :
1911*404b540aSrobert     case COND_SET_ZN :
1912*404b540aSrobert     case COND_SET_ZNC :
1913*404b540aSrobert       if (get_attr_length (insn) == 1)
1914*404b540aSrobert 	current_insn_set_cc_p = 1;
1915*404b540aSrobert       else
1916*404b540aSrobert 	current_insn_set_cc_p = 0;
1917*404b540aSrobert       break;
1918*404b540aSrobert     default :
1919*404b540aSrobert       current_insn_set_cc_p = 0;
1920*404b540aSrobert       break;
1921*404b540aSrobert     }
1922*404b540aSrobert }
1923*404b540aSrobert 
1924*404b540aSrobert /* Conditional execution support.
1925*404b540aSrobert 
1926*404b540aSrobert    This is based on the ARM port but for now is much simpler.
1927*404b540aSrobert 
1928*404b540aSrobert    A finite state machine takes care of noticing whether or not instructions
1929*404b540aSrobert    can be conditionally executed, and thus decrease execution time and code
1930*404b540aSrobert    size by deleting branch instructions.  The fsm is controlled by
1931*404b540aSrobert    final_prescan_insn, and controls the actions of PRINT_OPERAND.  The patterns
1932*404b540aSrobert    in the .md file for the branch insns also have a hand in this.  */
1933*404b540aSrobert 
1934*404b540aSrobert /* The state of the fsm controlling condition codes are:
1935*404b540aSrobert    0: normal, do nothing special
1936*404b540aSrobert    1: don't output this insn
1937*404b540aSrobert    2: don't output this insn
1938*404b540aSrobert    3: make insns conditional
1939*404b540aSrobert    4: make insns conditional
1940*404b540aSrobert 
1941*404b540aSrobert    State transitions (state->state by whom, under what condition):
1942*404b540aSrobert    0 -> 1 final_prescan_insn, if insn is conditional branch
1943*404b540aSrobert    0 -> 2 final_prescan_insn, if the `target' is an unconditional branch
1944*404b540aSrobert    1 -> 3 branch patterns, after having not output the conditional branch
1945*404b540aSrobert    2 -> 4 branch patterns, after having not output the conditional branch
1946*404b540aSrobert    3 -> 0 (*targetm.asm_out.internal_label), if the `target' label is reached
1947*404b540aSrobert           (the target label has CODE_LABEL_NUMBER equal to
1948*404b540aSrobert 	  arc_ccfsm_target_label).
1949*404b540aSrobert    4 -> 0 final_prescan_insn, if `target' unconditional branch is reached
1950*404b540aSrobert 
1951*404b540aSrobert    If the jump clobbers the conditions then we use states 2 and 4.
1952*404b540aSrobert 
1953*404b540aSrobert    A similar thing can be done with conditional return insns.
1954*404b540aSrobert 
1955*404b540aSrobert    We also handle separating branches from sets of the condition code.
1956*404b540aSrobert    This is done here because knowledge of the ccfsm state is required,
1957*404b540aSrobert    we may not be outputting the branch.  */
1958*404b540aSrobert 
1959*404b540aSrobert void
arc_final_prescan_insn(rtx insn,rtx * opvec ATTRIBUTE_UNUSED,int noperands ATTRIBUTE_UNUSED)1960*404b540aSrobert arc_final_prescan_insn (rtx insn,
1961*404b540aSrobert                         rtx *opvec ATTRIBUTE_UNUSED,
1962*404b540aSrobert                         int noperands ATTRIBUTE_UNUSED)
1963*404b540aSrobert {
1964*404b540aSrobert   /* BODY will hold the body of INSN.  */
1965*404b540aSrobert   register rtx body = PATTERN (insn);
1966*404b540aSrobert 
1967*404b540aSrobert   /* This will be 1 if trying to repeat the trick (i.e.: do the `else' part of
1968*404b540aSrobert      an if/then/else), and things need to be reversed.  */
1969*404b540aSrobert   int reverse = 0;
1970*404b540aSrobert 
1971*404b540aSrobert   /* If we start with a return insn, we only succeed if we find another one.  */
1972*404b540aSrobert   int seeking_return = 0;
1973*404b540aSrobert 
1974*404b540aSrobert   /* START_INSN will hold the insn from where we start looking.  This is the
1975*404b540aSrobert      first insn after the following code_label if REVERSE is true.  */
1976*404b540aSrobert   rtx start_insn = insn;
1977*404b540aSrobert 
1978*404b540aSrobert   /* Update compare/branch separation marker.  */
1979*404b540aSrobert   record_cc_ref (insn);
1980*404b540aSrobert 
1981*404b540aSrobert   /* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
1982*404b540aSrobert      We can't do this in macro FINAL_PRESCAN_INSN because its called from
1983*404b540aSrobert      final_scan_insn which has `optimize' as a local.  */
1984*404b540aSrobert   if (optimize < 2 || TARGET_NO_COND_EXEC)
1985*404b540aSrobert     return;
1986*404b540aSrobert 
1987*404b540aSrobert   /* If in state 4, check if the target branch is reached, in order to
1988*404b540aSrobert      change back to state 0.  */
1989*404b540aSrobert   if (arc_ccfsm_state == 4)
1990*404b540aSrobert     {
1991*404b540aSrobert       if (insn == arc_ccfsm_target_insn)
1992*404b540aSrobert 	{
1993*404b540aSrobert 	  arc_ccfsm_target_insn = NULL;
1994*404b540aSrobert 	  arc_ccfsm_state = 0;
1995*404b540aSrobert 	}
1996*404b540aSrobert       return;
1997*404b540aSrobert     }
1998*404b540aSrobert 
1999*404b540aSrobert   /* If in state 3, it is possible to repeat the trick, if this insn is an
2000*404b540aSrobert      unconditional branch to a label, and immediately following this branch
2001*404b540aSrobert      is the previous target label which is only used once, and the label this
2002*404b540aSrobert      branch jumps to is not too far off.  Or in other words "we've done the
2003*404b540aSrobert      `then' part, see if we can do the `else' part."  */
2004*404b540aSrobert   if (arc_ccfsm_state == 3)
2005*404b540aSrobert     {
2006*404b540aSrobert       if (simplejump_p (insn))
2007*404b540aSrobert 	{
2008*404b540aSrobert 	  start_insn = next_nonnote_insn (start_insn);
2009*404b540aSrobert 	  if (GET_CODE (start_insn) == BARRIER)
2010*404b540aSrobert 	    {
2011*404b540aSrobert 	      /* ??? Isn't this always a barrier?  */
2012*404b540aSrobert 	      start_insn = next_nonnote_insn (start_insn);
2013*404b540aSrobert 	    }
2014*404b540aSrobert 	  if (GET_CODE (start_insn) == CODE_LABEL
2015*404b540aSrobert 	      && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2016*404b540aSrobert 	      && LABEL_NUSES (start_insn) == 1)
2017*404b540aSrobert 	    reverse = TRUE;
2018*404b540aSrobert 	  else
2019*404b540aSrobert 	    return;
2020*404b540aSrobert 	}
2021*404b540aSrobert       else if (GET_CODE (body) == RETURN)
2022*404b540aSrobert         {
2023*404b540aSrobert 	  start_insn = next_nonnote_insn (start_insn);
2024*404b540aSrobert 	  if (GET_CODE (start_insn) == BARRIER)
2025*404b540aSrobert 	    start_insn = next_nonnote_insn (start_insn);
2026*404b540aSrobert 	  if (GET_CODE (start_insn) == CODE_LABEL
2027*404b540aSrobert 	      && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2028*404b540aSrobert 	      && LABEL_NUSES (start_insn) == 1)
2029*404b540aSrobert 	    {
2030*404b540aSrobert 	      reverse = TRUE;
2031*404b540aSrobert 	      seeking_return = 1;
2032*404b540aSrobert 	    }
2033*404b540aSrobert 	  else
2034*404b540aSrobert 	    return;
2035*404b540aSrobert         }
2036*404b540aSrobert       else
2037*404b540aSrobert 	return;
2038*404b540aSrobert     }
2039*404b540aSrobert 
2040*404b540aSrobert   if (GET_CODE (insn) != JUMP_INSN)
2041*404b540aSrobert     return;
2042*404b540aSrobert 
2043*404b540aSrobert   /* This jump might be paralleled with a clobber of the condition codes,
2044*404b540aSrobert      the jump should always come first.  */
2045*404b540aSrobert   if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
2046*404b540aSrobert     body = XVECEXP (body, 0, 0);
2047*404b540aSrobert 
2048*404b540aSrobert   if (reverse
2049*404b540aSrobert       || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2050*404b540aSrobert 	  && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
2051*404b540aSrobert     {
2052*404b540aSrobert       int insns_skipped = 0, fail = FALSE, succeed = FALSE;
2053*404b540aSrobert       /* Flag which part of the IF_THEN_ELSE is the LABEL_REF.  */
2054*404b540aSrobert       int then_not_else = TRUE;
2055*404b540aSrobert       /* Nonzero if next insn must be the target label.  */
2056*404b540aSrobert       int next_must_be_target_label_p;
2057*404b540aSrobert       rtx this_insn = start_insn, label = 0;
2058*404b540aSrobert 
2059*404b540aSrobert       /* Register the insn jumped to.  */
2060*404b540aSrobert       if (reverse)
2061*404b540aSrobert         {
2062*404b540aSrobert 	  if (!seeking_return)
2063*404b540aSrobert 	    label = XEXP (SET_SRC (body), 0);
2064*404b540aSrobert         }
2065*404b540aSrobert       else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
2066*404b540aSrobert 	label = XEXP (XEXP (SET_SRC (body), 1), 0);
2067*404b540aSrobert       else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
2068*404b540aSrobert 	{
2069*404b540aSrobert 	  label = XEXP (XEXP (SET_SRC (body), 2), 0);
2070*404b540aSrobert 	  then_not_else = FALSE;
2071*404b540aSrobert 	}
2072*404b540aSrobert       else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
2073*404b540aSrobert 	seeking_return = 1;
2074*404b540aSrobert       else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
2075*404b540aSrobert         {
2076*404b540aSrobert 	  seeking_return = 1;
2077*404b540aSrobert 	  then_not_else = FALSE;
2078*404b540aSrobert         }
2079*404b540aSrobert       else
2080*404b540aSrobert 	gcc_unreachable ();
2081*404b540aSrobert 
2082*404b540aSrobert       /* See how many insns this branch skips, and what kind of insns.  If all
2083*404b540aSrobert 	 insns are okay, and the label or unconditional branch to the same
2084*404b540aSrobert 	 label is not too far away, succeed.  */
2085*404b540aSrobert       for (insns_skipped = 0, next_must_be_target_label_p = FALSE;
2086*404b540aSrobert 	   !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
2087*404b540aSrobert 	   insns_skipped++)
2088*404b540aSrobert 	{
2089*404b540aSrobert 	  rtx scanbody;
2090*404b540aSrobert 
2091*404b540aSrobert 	  this_insn = next_nonnote_insn (this_insn);
2092*404b540aSrobert 	  if (!this_insn)
2093*404b540aSrobert 	    break;
2094*404b540aSrobert 
2095*404b540aSrobert 	  if (next_must_be_target_label_p)
2096*404b540aSrobert 	    {
2097*404b540aSrobert 	      if (GET_CODE (this_insn) == BARRIER)
2098*404b540aSrobert 		continue;
2099*404b540aSrobert 	      if (GET_CODE (this_insn) == CODE_LABEL
2100*404b540aSrobert 		  && this_insn == label)
2101*404b540aSrobert 		{
2102*404b540aSrobert 		  arc_ccfsm_state = 1;
2103*404b540aSrobert 		  succeed = TRUE;
2104*404b540aSrobert 		}
2105*404b540aSrobert 	      else
2106*404b540aSrobert 		fail = TRUE;
2107*404b540aSrobert 	      break;
2108*404b540aSrobert 	    }
2109*404b540aSrobert 
2110*404b540aSrobert 	  scanbody = PATTERN (this_insn);
2111*404b540aSrobert 
2112*404b540aSrobert 	  switch (GET_CODE (this_insn))
2113*404b540aSrobert 	    {
2114*404b540aSrobert 	    case CODE_LABEL:
2115*404b540aSrobert 	      /* Succeed if it is the target label, otherwise fail since
2116*404b540aSrobert 		 control falls in from somewhere else.  */
2117*404b540aSrobert 	      if (this_insn == label)
2118*404b540aSrobert 		{
2119*404b540aSrobert 		  arc_ccfsm_state = 1;
2120*404b540aSrobert 		  succeed = TRUE;
2121*404b540aSrobert 		}
2122*404b540aSrobert 	      else
2123*404b540aSrobert 		fail = TRUE;
2124*404b540aSrobert 	      break;
2125*404b540aSrobert 
2126*404b540aSrobert 	    case BARRIER:
2127*404b540aSrobert 	      /* Succeed if the following insn is the target label.
2128*404b540aSrobert 		 Otherwise fail.
2129*404b540aSrobert 		 If return insns are used then the last insn in a function
2130*404b540aSrobert 		 will be a barrier.  */
2131*404b540aSrobert 	      next_must_be_target_label_p = TRUE;
2132*404b540aSrobert 	      break;
2133*404b540aSrobert 
2134*404b540aSrobert 	    case CALL_INSN:
2135*404b540aSrobert 	      /* Can handle a call insn if there are no insns after it.
2136*404b540aSrobert 		 IE: The next "insn" is the target label.  We don't have to
2137*404b540aSrobert 		 worry about delay slots as such insns are SEQUENCE's inside
2138*404b540aSrobert 		 INSN's.  ??? It is possible to handle such insns though.  */
2139*404b540aSrobert 	      if (get_attr_cond (this_insn) == COND_CANUSE)
2140*404b540aSrobert 		next_must_be_target_label_p = TRUE;
2141*404b540aSrobert 	      else
2142*404b540aSrobert 		fail = TRUE;
2143*404b540aSrobert 	      break;
2144*404b540aSrobert 
2145*404b540aSrobert 	    case JUMP_INSN:
2146*404b540aSrobert       	      /* If this is an unconditional branch to the same label, succeed.
2147*404b540aSrobert 		 If it is to another label, do nothing.  If it is conditional,
2148*404b540aSrobert 		 fail.  */
2149*404b540aSrobert 	      /* ??? Probably, the test for the SET and the PC are unnecessary.  */
2150*404b540aSrobert 
2151*404b540aSrobert 	      if (GET_CODE (scanbody) == SET
2152*404b540aSrobert 		  && GET_CODE (SET_DEST (scanbody)) == PC)
2153*404b540aSrobert 		{
2154*404b540aSrobert 		  if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
2155*404b540aSrobert 		      && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
2156*404b540aSrobert 		    {
2157*404b540aSrobert 		      arc_ccfsm_state = 2;
2158*404b540aSrobert 		      succeed = TRUE;
2159*404b540aSrobert 		    }
2160*404b540aSrobert 		  else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
2161*404b540aSrobert 		    fail = TRUE;
2162*404b540aSrobert 		}
2163*404b540aSrobert 	      else if (GET_CODE (scanbody) == RETURN
2164*404b540aSrobert 		       && seeking_return)
2165*404b540aSrobert 	        {
2166*404b540aSrobert 		  arc_ccfsm_state = 2;
2167*404b540aSrobert 		  succeed = TRUE;
2168*404b540aSrobert 	        }
2169*404b540aSrobert 	      else if (GET_CODE (scanbody) == PARALLEL)
2170*404b540aSrobert 	        {
2171*404b540aSrobert 		  if (get_attr_cond (this_insn) != COND_CANUSE)
2172*404b540aSrobert 		    fail = TRUE;
2173*404b540aSrobert 		}
2174*404b540aSrobert 	      break;
2175*404b540aSrobert 
2176*404b540aSrobert 	    case INSN:
2177*404b540aSrobert 	      /* We can only do this with insns that can use the condition
2178*404b540aSrobert 		 codes (and don't set them).  */
2179*404b540aSrobert 	      if (GET_CODE (scanbody) == SET
2180*404b540aSrobert 		  || GET_CODE (scanbody) == PARALLEL)
2181*404b540aSrobert 		{
2182*404b540aSrobert 		  if (get_attr_cond (this_insn) != COND_CANUSE)
2183*404b540aSrobert 		    fail = TRUE;
2184*404b540aSrobert 		}
2185*404b540aSrobert 	      /* We can't handle other insns like sequences.  */
2186*404b540aSrobert 	      else
2187*404b540aSrobert 		fail = TRUE;
2188*404b540aSrobert 	      break;
2189*404b540aSrobert 
2190*404b540aSrobert 	    default:
2191*404b540aSrobert 	      break;
2192*404b540aSrobert 	    }
2193*404b540aSrobert 	}
2194*404b540aSrobert 
2195*404b540aSrobert       if (succeed)
2196*404b540aSrobert 	{
2197*404b540aSrobert 	  if ((!seeking_return) && (arc_ccfsm_state == 1 || reverse))
2198*404b540aSrobert 	    arc_ccfsm_target_label = CODE_LABEL_NUMBER (label);
2199*404b540aSrobert 	  else
2200*404b540aSrobert 	    {
2201*404b540aSrobert 	      gcc_assert (seeking_return || arc_ccfsm_state == 2);
2202*404b540aSrobert 	      while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
2203*404b540aSrobert 	        {
2204*404b540aSrobert 		  this_insn = next_nonnote_insn (this_insn);
2205*404b540aSrobert 		  gcc_assert (!this_insn
2206*404b540aSrobert 			      || (GET_CODE (this_insn) != BARRIER
2207*404b540aSrobert 				  && GET_CODE (this_insn) != CODE_LABEL));
2208*404b540aSrobert 	        }
2209*404b540aSrobert 	      if (!this_insn)
2210*404b540aSrobert 	        {
2211*404b540aSrobert 		  /* Oh dear! we ran off the end, give up.  */
2212*404b540aSrobert 		  extract_insn_cached (insn);
2213*404b540aSrobert 		  arc_ccfsm_state = 0;
2214*404b540aSrobert 		  arc_ccfsm_target_insn = NULL;
2215*404b540aSrobert 		  return;
2216*404b540aSrobert 	        }
2217*404b540aSrobert 	      arc_ccfsm_target_insn = this_insn;
2218*404b540aSrobert 	    }
2219*404b540aSrobert 
2220*404b540aSrobert 	  /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2221*404b540aSrobert 	     what it was.  */
2222*404b540aSrobert 	  if (!reverse)
2223*404b540aSrobert 	    arc_ccfsm_current_cc = get_arc_condition_code (XEXP (SET_SRC (body),
2224*404b540aSrobert 								 0));
2225*404b540aSrobert 
2226*404b540aSrobert 	  if (reverse || then_not_else)
2227*404b540aSrobert 	    arc_ccfsm_current_cc = ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc);
2228*404b540aSrobert 	}
2229*404b540aSrobert 
2230*404b540aSrobert       /* Restore recog_data.  Getting the attributes of other insns can
2231*404b540aSrobert 	 destroy this array, but final.c assumes that it remains intact
2232*404b540aSrobert 	 across this call.  */
2233*404b540aSrobert       extract_insn_cached (insn);
2234*404b540aSrobert     }
2235*404b540aSrobert }
2236*404b540aSrobert 
2237*404b540aSrobert /* Record that we are currently outputting label NUM with prefix PREFIX.
2238*404b540aSrobert    It it's the label we're looking for, reset the ccfsm machinery.
2239*404b540aSrobert 
2240*404b540aSrobert    Called from (*targetm.asm_out.internal_label).  */
2241*404b540aSrobert 
2242*404b540aSrobert void
arc_ccfsm_at_label(const char * prefix,int num)2243*404b540aSrobert arc_ccfsm_at_label (const char *prefix, int num)
2244*404b540aSrobert {
2245*404b540aSrobert   if (arc_ccfsm_state == 3 && arc_ccfsm_target_label == num
2246*404b540aSrobert       && !strcmp (prefix, "L"))
2247*404b540aSrobert     {
2248*404b540aSrobert       arc_ccfsm_state = 0;
2249*404b540aSrobert       arc_ccfsm_target_insn = NULL_RTX;
2250*404b540aSrobert     }
2251*404b540aSrobert }
2252*404b540aSrobert 
2253*404b540aSrobert /* See if the current insn, which is a conditional branch, is to be
2254*404b540aSrobert    deleted.  */
2255*404b540aSrobert 
2256*404b540aSrobert int
arc_ccfsm_branch_deleted_p(void)2257*404b540aSrobert arc_ccfsm_branch_deleted_p (void)
2258*404b540aSrobert {
2259*404b540aSrobert   if (arc_ccfsm_state == 1 || arc_ccfsm_state == 2)
2260*404b540aSrobert     return 1;
2261*404b540aSrobert   return 0;
2262*404b540aSrobert }
2263*404b540aSrobert 
2264*404b540aSrobert /* Record a branch isn't output because subsequent insns can be
2265*404b540aSrobert    conditionalized.  */
2266*404b540aSrobert 
2267*404b540aSrobert void
arc_ccfsm_record_branch_deleted(void)2268*404b540aSrobert arc_ccfsm_record_branch_deleted (void)
2269*404b540aSrobert {
2270*404b540aSrobert   /* Indicate we're conditionalizing insns now.  */
2271*404b540aSrobert   arc_ccfsm_state += 2;
2272*404b540aSrobert 
2273*404b540aSrobert   /* If the next insn is a subroutine call, we still need a nop between the
2274*404b540aSrobert      cc setter and user.  We need to undo the effect of calling record_cc_ref
2275*404b540aSrobert      for the just deleted branch.  */
2276*404b540aSrobert   current_insn_set_cc_p = last_insn_set_cc_p;
2277*404b540aSrobert }
2278*404b540aSrobert 
2279*404b540aSrobert void
arc_va_start(tree valist,rtx nextarg)2280*404b540aSrobert arc_va_start (tree valist, rtx nextarg)
2281*404b540aSrobert {
2282*404b540aSrobert   /* See arc_setup_incoming_varargs for reasons for this oddity.  */
2283*404b540aSrobert   if (current_function_args_info < 8
2284*404b540aSrobert       && (current_function_args_info & 1))
2285*404b540aSrobert     nextarg = plus_constant (nextarg, UNITS_PER_WORD);
2286*404b540aSrobert 
2287*404b540aSrobert   std_expand_builtin_va_start (valist, nextarg);
2288*404b540aSrobert }
2289*404b540aSrobert 
2290*404b540aSrobert /* This is how to output a definition of an internal numbered label where
2291*404b540aSrobert    PREFIX is the class of label and NUM is the number within the class.  */
2292*404b540aSrobert 
2293*404b540aSrobert static void
arc_internal_label(FILE * stream,const char * prefix,unsigned long labelno)2294*404b540aSrobert arc_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
2295*404b540aSrobert {
2296*404b540aSrobert   arc_ccfsm_at_label (prefix, labelno);
2297*404b540aSrobert   default_internal_label (stream, prefix, labelno);
2298*404b540aSrobert }
2299*404b540aSrobert 
2300*404b540aSrobert /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL.  */
2301*404b540aSrobert 
2302*404b540aSrobert static void
arc_external_libcall(rtx fun ATTRIBUTE_UNUSED)2303*404b540aSrobert arc_external_libcall (rtx fun ATTRIBUTE_UNUSED)
2304*404b540aSrobert {
2305*404b540aSrobert #if 0
2306*404b540aSrobert /* On the ARC we want to have libgcc's for multiple cpus in one binary.
2307*404b540aSrobert    We can't use `assemble_name' here as that will call ASM_OUTPUT_LABELREF
2308*404b540aSrobert    and we'll get another suffix added on if -mmangle-cpu.  */
2309*404b540aSrobert   if (TARGET_MANGLE_CPU_LIBGCC)
2310*404b540aSrobert     {
2311*404b540aSrobert       fprintf (FILE, "\t.rename\t_%s, _%s%s\n",
2312*404b540aSrobert 	       XSTR (SYMREF, 0), XSTR (SYMREF, 0),
2313*404b540aSrobert 	       arc_mangle_suffix);
2314*404b540aSrobert     }
2315*404b540aSrobert #endif
2316*404b540aSrobert }
2317*404b540aSrobert 
2318*404b540aSrobert /* Worker function for TARGET_RETURN_IN_MEMORY.  */
2319*404b540aSrobert 
2320*404b540aSrobert static bool
arc_return_in_memory(tree type,tree fntype ATTRIBUTE_UNUSED)2321*404b540aSrobert arc_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2322*404b540aSrobert {
2323*404b540aSrobert   if (AGGREGATE_TYPE_P (type))
2324*404b540aSrobert     return true;
2325*404b540aSrobert   else
2326*404b540aSrobert     {
2327*404b540aSrobert       HOST_WIDE_INT size = int_size_in_bytes (type);
2328*404b540aSrobert       return (size == -1 || size > 8);
2329*404b540aSrobert     }
2330*404b540aSrobert }
2331*404b540aSrobert 
2332*404b540aSrobert /* For ARC, All aggregates and arguments greater than 8 bytes are
2333*404b540aSrobert    passed by reference.  */
2334*404b540aSrobert 
2335*404b540aSrobert static bool
arc_pass_by_reference(CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED,enum machine_mode mode,tree type,bool named ATTRIBUTE_UNUSED)2336*404b540aSrobert arc_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
2337*404b540aSrobert 		       enum machine_mode mode, tree type,
2338*404b540aSrobert 		       bool named ATTRIBUTE_UNUSED)
2339*404b540aSrobert {
2340*404b540aSrobert   unsigned HOST_WIDE_INT size;
2341*404b540aSrobert 
2342*404b540aSrobert   if (type)
2343*404b540aSrobert     {
2344*404b540aSrobert       if (AGGREGATE_TYPE_P (type))
2345*404b540aSrobert 	return true;
2346*404b540aSrobert       size = int_size_in_bytes (type);
2347*404b540aSrobert     }
2348*404b540aSrobert   else
2349*404b540aSrobert     size = GET_MODE_SIZE (mode);
2350*404b540aSrobert 
2351*404b540aSrobert   return size > 8;
2352*404b540aSrobert }
2353