xref: /openbsd/gnu/usr.bin/gcc/gcc/config/c4x/c4x.c (revision c87b03e5)
1 /* Subroutines for assembler code output on the TMS320C[34]x
2    Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3    Free Software Foundation, Inc.
4 
5    Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6               and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
7 
8 This file is part of GNU CC.
9 
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
14 
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
18 GNU General Public License for more details.
19 
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING.  If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA.  */
24 
25 /* Some output-actions in c4x.md need these.  */
26 #include "config.h"
27 #include "system.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 #include "conditions.h"
37 #include "output.h"
38 #include "function.h"
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "flags.h"
43 #include "loop.h"
44 #include "recog.h"
45 #include "c-tree.h"
46 #include "ggc.h"
47 #include "cpplib.h"
48 #include "toplev.h"
49 #include "tm_p.h"
50 #include "target.h"
51 #include "target-def.h"
52 
53 rtx smulhi3_libfunc;
54 rtx umulhi3_libfunc;
55 rtx fix_truncqfhi2_libfunc;
56 rtx fixuns_truncqfhi2_libfunc;
57 rtx fix_trunchfhi2_libfunc;
58 rtx fixuns_trunchfhi2_libfunc;
59 rtx floathiqf2_libfunc;
60 rtx floatunshiqf2_libfunc;
61 rtx floathihf2_libfunc;
62 rtx floatunshihf2_libfunc;
63 
64 static int c4x_leaf_function;
65 
66 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
67 
68 /* Array of the smallest class containing reg number REGNO, indexed by
69    REGNO.  Used by REGNO_REG_CLASS in c4x.h.  We assume that all these
70    registers are available and set the class to NO_REGS for registers
71    that the target switches say are unavailable.  */
72 
73 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
74 {
75                                 /* Reg          Modes           Saved.  */
76   R0R1_REGS,			/* R0           QI, QF, HF      No.  */
77   R0R1_REGS,			/* R1           QI, QF, HF      No.  */
78   R2R3_REGS,			/* R2           QI, QF, HF      No.  */
79   R2R3_REGS,			/* R3           QI, QF, HF      No.  */
80   EXT_LOW_REGS,			/* R4           QI, QF, HF      QI.  */
81   EXT_LOW_REGS,			/* R5           QI, QF, HF      QI.  */
82   EXT_LOW_REGS,			/* R6           QI, QF, HF      QF.  */
83   EXT_LOW_REGS,			/* R7           QI, QF, HF      QF.  */
84   ADDR_REGS,			/* AR0          QI              No.  */
85   ADDR_REGS,			/* AR1          QI              No.  */
86   ADDR_REGS,			/* AR2          QI              No.  */
87   ADDR_REGS,			/* AR3          QI              QI.  */
88   ADDR_REGS,			/* AR4          QI              QI.  */
89   ADDR_REGS,			/* AR5          QI              QI.  */
90   ADDR_REGS,			/* AR6          QI              QI.  */
91   ADDR_REGS,			/* AR7          QI              QI.  */
92   DP_REG,			/* DP           QI              No.  */
93   INDEX_REGS,			/* IR0          QI              No.  */
94   INDEX_REGS,			/* IR1          QI              No.  */
95   BK_REG,			/* BK           QI              QI.  */
96   SP_REG,			/* SP           QI              No.  */
97   ST_REG,			/* ST           CC              No.  */
98   NO_REGS,			/* DIE/IE                       No.  */
99   NO_REGS,			/* IIE/IF                       No.  */
100   NO_REGS,			/* IIF/IOF                      No.  */
101   INT_REGS,			/* RS           QI              No.  */
102   INT_REGS,			/* RE           QI              No.  */
103   RC_REG,			/* RC           QI              No.  */
104   EXT_REGS,			/* R8           QI, QF, HF      QI.  */
105   EXT_REGS,			/* R9           QI, QF, HF      No.  */
106   EXT_REGS,			/* R10          QI, QF, HF      No.  */
107   EXT_REGS,			/* R11          QI, QF, HF      No.  */
108 };
109 
110 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
111 {
112                                 /* Reg          Modes           Saved.  */
113   HFmode,			/* R0           QI, QF, HF      No.  */
114   HFmode,			/* R1           QI, QF, HF      No.  */
115   HFmode,			/* R2           QI, QF, HF      No.  */
116   HFmode,			/* R3           QI, QF, HF      No.  */
117   QFmode,			/* R4           QI, QF, HF      QI.  */
118   QFmode,			/* R5           QI, QF, HF      QI.  */
119   QImode,			/* R6           QI, QF, HF      QF.  */
120   QImode,			/* R7           QI, QF, HF      QF.  */
121   QImode,			/* AR0          QI              No.  */
122   QImode,			/* AR1          QI              No.  */
123   QImode,			/* AR2          QI              No.  */
124   QImode,			/* AR3          QI              QI.  */
125   QImode,			/* AR4          QI              QI.  */
126   QImode,			/* AR5          QI              QI.  */
127   QImode,			/* AR6          QI              QI.  */
128   QImode,			/* AR7          QI              QI.  */
129   VOIDmode,			/* DP           QI              No.  */
130   QImode,			/* IR0          QI              No.  */
131   QImode,			/* IR1          QI              No.  */
132   QImode,			/* BK           QI              QI.  */
133   VOIDmode,			/* SP           QI              No.  */
134   VOIDmode,			/* ST           CC              No.  */
135   VOIDmode,			/* DIE/IE                       No.  */
136   VOIDmode,			/* IIE/IF                       No.  */
137   VOIDmode,			/* IIF/IOF                      No.  */
138   QImode,			/* RS           QI              No.  */
139   QImode,			/* RE           QI              No.  */
140   VOIDmode,			/* RC           QI              No.  */
141   QFmode,			/* R8           QI, QF, HF      QI.  */
142   HFmode,			/* R9           QI, QF, HF      No.  */
143   HFmode,			/* R10          QI, QF, HF      No.  */
144   HFmode,			/* R11          QI, QF, HF      No.  */
145 };
146 
147 
148 /* Test and compare insns in c4x.md store the information needed to
149    generate branch and scc insns here.  */
150 
151 rtx c4x_compare_op0;
152 rtx c4x_compare_op1;
153 
154 const char *c4x_rpts_cycles_string;
155 int c4x_rpts_cycles = 0;	/* Max. cycles for RPTS.  */
156 const char *c4x_cpu_version_string;
157 int c4x_cpu_version = 40;	/* CPU version C30/31/32/33/40/44.  */
158 
159 /* Pragma definitions.  */
160 
161 tree code_tree = NULL_TREE;
162 tree data_tree = NULL_TREE;
163 tree pure_tree = NULL_TREE;
164 tree noreturn_tree = NULL_TREE;
165 tree interrupt_tree = NULL_TREE;
166 
167 /* Forward declarations */
168 static int c4x_isr_reg_used_p PARAMS ((unsigned int));
169 static int c4x_leaf_function_p PARAMS ((void));
170 static int c4x_assembler_function_p PARAMS ((void));
171 static int c4x_immed_float_p PARAMS ((rtx));
172 static int c4x_a_register PARAMS ((rtx));
173 static int c4x_x_register PARAMS ((rtx));
174 static int c4x_immed_int_constant PARAMS ((rtx));
175 static int c4x_immed_float_constant PARAMS ((rtx));
176 static int c4x_K_constant PARAMS ((rtx));
177 static int c4x_N_constant PARAMS ((rtx));
178 static int c4x_O_constant PARAMS ((rtx));
179 static int c4x_R_indirect PARAMS ((rtx));
180 static int c4x_S_indirect PARAMS ((rtx));
181 static void c4x_S_address_parse PARAMS ((rtx , int *, int *, int *, int *));
182 static int c4x_valid_operands PARAMS ((enum rtx_code, rtx *,
183 				       enum machine_mode, int));
184 static int c4x_arn_reg_operand PARAMS ((rtx, enum machine_mode, unsigned int));
185 static int c4x_arn_mem_operand PARAMS ((rtx, enum machine_mode, unsigned int));
186 static void c4x_check_attribute PARAMS ((const char *, tree, tree, tree *));
187 static int c4x_r11_set_p PARAMS ((rtx));
188 static int c4x_rptb_valid_p PARAMS ((rtx, rtx));
189 static int c4x_label_ref_used_p PARAMS ((rtx, rtx));
190 static tree c4x_handle_fntype_attribute PARAMS ((tree *, tree, tree, int, bool *));
191 const struct attribute_spec c4x_attribute_table[];
192 static void c4x_insert_attributes PARAMS ((tree, tree *));
193 static void c4x_asm_named_section PARAMS ((const char *, unsigned int));
194 static int c4x_adjust_cost PARAMS ((rtx, rtx, rtx, int));
195 static void c4x_encode_section_info PARAMS ((tree, int));
196 static void c4x_globalize_label PARAMS ((FILE *, const char *));
197 
198 /* Initialize the GCC target structure.  */
199 #undef TARGET_ASM_BYTE_OP
200 #define TARGET_ASM_BYTE_OP "\t.word\t"
201 #undef TARGET_ASM_ALIGNED_HI_OP
202 #define TARGET_ASM_ALIGNED_HI_OP NULL
203 #undef TARGET_ASM_ALIGNED_SI_OP
204 #define TARGET_ASM_ALIGNED_SI_OP NULL
205 
206 #undef TARGET_ATTRIBUTE_TABLE
207 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
208 
209 #undef TARGET_INSERT_ATTRIBUTES
210 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
211 
212 #undef TARGET_INIT_BUILTINS
213 #define TARGET_INIT_BUILTINS c4x_init_builtins
214 
215 #undef TARGET_EXPAND_BUILTIN
216 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
217 
218 #undef TARGET_SCHED_ADJUST_COST
219 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
220 
221 #undef TARGET_ENCODE_SECTION_INFO
222 #define TARGET_ENCODE_SECTION_INFO c4x_encode_section_info
223 
224 #undef TARGET_ASM_GLOBALIZE_LABEL
225 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
226 
227 struct gcc_target targetm = TARGET_INITIALIZER;
228 
229 /* Override command line options.
230    Called once after all options have been parsed.
231    Mostly we process the processor
232    type and sometimes adjust other TARGET_ options.  */
233 
234 void
c4x_override_options()235 c4x_override_options ()
236 {
237   if (c4x_rpts_cycles_string)
238     c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
239   else
240     c4x_rpts_cycles = 0;
241 
242   if (TARGET_C30)
243     c4x_cpu_version = 30;
244   else if (TARGET_C31)
245     c4x_cpu_version = 31;
246   else if (TARGET_C32)
247     c4x_cpu_version = 32;
248   else if (TARGET_C33)
249     c4x_cpu_version = 33;
250   else if (TARGET_C40)
251     c4x_cpu_version = 40;
252   else if (TARGET_C44)
253     c4x_cpu_version = 44;
254   else
255     c4x_cpu_version = 40;
256 
257   /* -mcpu=xx overrides -m40 etc.  */
258   if (c4x_cpu_version_string)
259     {
260       const char *p = c4x_cpu_version_string;
261 
262       /* Also allow -mcpu=c30 etc.  */
263       if (*p == 'c' || *p == 'C')
264 	p++;
265       c4x_cpu_version = atoi (p);
266     }
267 
268   target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
269 		    C40_FLAG | C44_FLAG);
270 
271   switch (c4x_cpu_version)
272     {
273     case 30: target_flags |= C30_FLAG; break;
274     case 31: target_flags |= C31_FLAG; break;
275     case 32: target_flags |= C32_FLAG; break;
276     case 33: target_flags |= C33_FLAG; break;
277     case 40: target_flags |= C40_FLAG; break;
278     case 44: target_flags |= C44_FLAG; break;
279     default:
280       warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version);
281       c4x_cpu_version = 40;
282       target_flags |= C40_FLAG;
283     }
284 
285   if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
286     target_flags |= C3X_FLAG;
287   else
288     target_flags &= ~C3X_FLAG;
289 
290   /* Convert foo / 8.0 into foo * 0.125, etc.  */
291   set_fast_math_flags (1);
292 
293   /* We should phase out the following at some stage.
294      This provides compatibility with the old -mno-aliases option.  */
295   if (! TARGET_ALIASES && ! flag_argument_noalias)
296     flag_argument_noalias = 1;
297 
298   /* We're C4X floating point, not IEEE floating point.  */
299   memset (real_format_for_mode, 0, sizeof real_format_for_mode);
300   real_format_for_mode[QFmode - QFmode] = &c4x_single_format;
301   real_format_for_mode[HFmode - QFmode] = &c4x_extended_format;
302 }
303 
304 
305 /* This is called before c4x_override_options.  */
306 
307 void
c4x_optimization_options(level,size)308 c4x_optimization_options (level, size)
309      int level ATTRIBUTE_UNUSED;
310      int size ATTRIBUTE_UNUSED;
311 {
312   /* Scheduling before register allocation can screw up global
313      register allocation, especially for functions that use MPY||ADD
314      instructions.  The benefit we gain we get by scheduling before
315      register allocation is probably marginal anyhow.  */
316   flag_schedule_insns = 0;
317 }
318 
319 
320 /* Write an ASCII string.  */
321 
322 #define C4X_ASCII_LIMIT 40
323 
324 void
c4x_output_ascii(stream,ptr,len)325 c4x_output_ascii (stream, ptr, len)
326      FILE *stream;
327      const char *ptr;
328      int len;
329 {
330   char sbuf[C4X_ASCII_LIMIT + 1];
331   int s, l, special, first = 1, onlys;
332 
333   if (len)
334       fprintf (stream, "\t.byte\t");
335 
336   for (s = l = 0; len > 0; --len, ++ptr)
337     {
338       onlys = 0;
339 
340       /* Escape " and \ with a \".  */
341       special = *ptr == '\"' || *ptr == '\\';
342 
343       /* If printable - add to buff.  */
344       if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
345 	{
346 	  if (special)
347 	    sbuf[s++] = '\\';
348 	  sbuf[s++] = *ptr;
349 	  if (s < C4X_ASCII_LIMIT - 1)
350 	    continue;
351 	  onlys = 1;
352 	}
353       if (s)
354 	{
355 	  if (first)
356 	    first = 0;
357 	  else
358 	    {
359 	      fputc (',', stream);
360 	      l++;
361 	    }
362 
363 	  sbuf[s] = 0;
364 	  fprintf (stream, "\"%s\"", sbuf);
365 	  l += s + 2;
366 	  if (TARGET_TI && l >= 80 && len > 1)
367 	    {
368 	      fprintf (stream, "\n\t.byte\t");
369 	      first = 1;
370 	      l = 0;
371 	    }
372 
373 	  s = 0;
374 	}
375       if (onlys)
376 	continue;
377 
378       if (first)
379 	first = 0;
380       else
381 	{
382 	  fputc (',', stream);
383 	  l++;
384 	}
385 
386       fprintf (stream, "%d", *ptr);
387       l += 3;
388       if (TARGET_TI && l >= 80 && len > 1)
389 	{
390 	  fprintf (stream, "\n\t.byte\t");
391 	  first = 1;
392 	  l = 0;
393 	}
394     }
395   if (s)
396     {
397       if (! first)
398 	fputc (',', stream);
399 
400       sbuf[s] = 0;
401       fprintf (stream, "\"%s\"", sbuf);
402       s = 0;
403     }
404   fputc ('\n', stream);
405 }
406 
407 
408 int
c4x_hard_regno_mode_ok(regno,mode)409 c4x_hard_regno_mode_ok (regno, mode)
410      unsigned int regno;
411      enum machine_mode mode;
412 {
413   switch (mode)
414     {
415 #if Pmode != QImode
416     case Pmode:			/* Pointer (24/32 bits).  */
417 #endif
418     case QImode:		/* Integer (32 bits).  */
419       return IS_INT_REGNO (regno);
420 
421     case QFmode:		/* Float, Double (32 bits).  */
422     case HFmode:		/* Long Double (40 bits).  */
423       return IS_EXT_REGNO (regno);
424 
425     case CCmode:		/* Condition Codes.  */
426     case CC_NOOVmode:		/* Condition Codes.  */
427       return IS_ST_REGNO (regno);
428 
429     case HImode:		/* Long Long (64 bits).  */
430       /* We need two registers to store long longs.  Note that
431 	 it is much easier to constrain the first register
432 	 to start on an even boundary.  */
433       return IS_INT_REGNO (regno)
434 	&& IS_INT_REGNO (regno + 1)
435 	&& (regno & 1) == 0;
436 
437     default:
438       return 0;			/* We don't support these modes.  */
439     }
440 
441   return 0;
442 }
443 
444 /* Return nonzero if REGNO1 can be renamed to REGNO2.  */
445 int
c4x_hard_regno_rename_ok(regno1,regno2)446 c4x_hard_regno_rename_ok (regno1, regno2)
447      unsigned int regno1;
448      unsigned int regno2;
449 {
450   /* We can not copy call saved registers from mode QI into QF or from
451      mode QF into QI.  */
452   if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
453     return 0;
454   if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
455     return 0;
456   /* We cannot copy from an extended (40 bit) register to a standard
457      (32 bit) register because we only set the condition codes for
458      extended registers.  */
459   if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
460     return 0;
461   if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
462     return 0;
463   return 1;
464 }
465 
466 /* The TI C3x C compiler register argument runtime model uses 6 registers,
467    AR2, R2, R3, RC, RS, RE.
468 
469    The first two floating point arguments (float, double, long double)
470    that are found scanning from left to right are assigned to R2 and R3.
471 
472    The remaining integer (char, short, int, long) or pointer arguments
473    are assigned to the remaining registers in the order AR2, R2, R3,
474    RC, RS, RE when scanning left to right, except for the last named
475    argument prior to an ellipsis denoting variable number of
476    arguments.  We don't have to worry about the latter condition since
477    function.c treats the last named argument as anonymous (unnamed).
478 
479    All arguments that cannot be passed in registers are pushed onto
480    the stack in reverse order (right to left).  GCC handles that for us.
481 
482    c4x_init_cumulative_args() is called at the start, so we can parse
483    the args to see how many floating point arguments and how many
484    integer (or pointer) arguments there are.  c4x_function_arg() is
485    then called (sometimes repeatedly) for each argument (parsed left
486    to right) to obtain the register to pass the argument in, or zero
487    if the argument is to be passed on the stack.  Once the compiler is
488    happy, c4x_function_arg_advance() is called.
489 
490    Don't use R0 to pass arguments in, we use 0 to indicate a stack
491    argument.  */
492 
493 static const int c4x_int_reglist[3][6] =
494 {
495   {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
496   {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
497   {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
498 };
499 
500 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
501 
502 
503 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
504    function whose data type is FNTYPE.
505    For a library call, FNTYPE is  0.  */
506 
507 void
c4x_init_cumulative_args(cum,fntype,libname)508 c4x_init_cumulative_args (cum, fntype, libname)
509      CUMULATIVE_ARGS *cum;	/* Argument info to initialize.  */
510      tree fntype;		/* Tree ptr for function decl.  */
511      rtx libname;		/* SYMBOL_REF of library name or 0.  */
512 {
513   tree param, next_param;
514 
515   cum->floats = cum->ints = 0;
516   cum->init = 0;
517   cum->var = 0;
518   cum->args = 0;
519 
520   if (TARGET_DEBUG)
521     {
522       fprintf (stderr, "\nc4x_init_cumulative_args (");
523       if (fntype)
524 	{
525 	  tree ret_type = TREE_TYPE (fntype);
526 
527 	  fprintf (stderr, "fntype code = %s, ret code = %s",
528 		   tree_code_name[(int) TREE_CODE (fntype)],
529 		   tree_code_name[(int) TREE_CODE (ret_type)]);
530 	}
531       else
532 	fprintf (stderr, "no fntype");
533 
534       if (libname)
535 	fprintf (stderr, ", libname = %s", XSTR (libname, 0));
536     }
537 
538   cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
539 
540   for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
541        param; param = next_param)
542     {
543       tree type;
544 
545       next_param = TREE_CHAIN (param);
546 
547       type = TREE_VALUE (param);
548       if (type && type != void_type_node)
549 	{
550 	  enum machine_mode mode;
551 
552 	  /* If the last arg doesn't have void type then we have
553 	     variable arguments.  */
554 	  if (! next_param)
555 	    cum->var = 1;
556 
557 	  if ((mode = TYPE_MODE (type)))
558 	    {
559 	      if (! MUST_PASS_IN_STACK (mode, type))
560 		{
561 		  /* Look for float, double, or long double argument.  */
562 		  if (mode == QFmode || mode == HFmode)
563 		    cum->floats++;
564 		  /* Look for integer, enumeral, boolean, char, or pointer
565 		     argument.  */
566 		  else if (mode == QImode || mode == Pmode)
567 		    cum->ints++;
568 		}
569 	    }
570 	  cum->args++;
571 	}
572     }
573 
574   if (TARGET_DEBUG)
575     fprintf (stderr, "%s%s, args = %d)\n",
576 	     cum->prototype ? ", prototype" : "",
577 	     cum->var ? ", variable args" : "",
578 	     cum->args);
579 }
580 
581 
582 /* Update the data in CUM to advance over an argument
583    of mode MODE and data type TYPE.
584    (TYPE is null for libcalls where that information may not be available.)  */
585 
586 void
c4x_function_arg_advance(cum,mode,type,named)587 c4x_function_arg_advance (cum, mode, type, named)
588      CUMULATIVE_ARGS *cum;	/* Current arg information.  */
589      enum machine_mode mode;	/* Current arg mode.  */
590      tree type;			/* Type of the arg or 0 if lib support.  */
591      int named;			/* Whether or not the argument was named.  */
592 {
593   if (TARGET_DEBUG)
594     fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
595 	     GET_MODE_NAME (mode), named);
596   if (! TARGET_MEMPARM
597       && named
598       && type
599       && ! MUST_PASS_IN_STACK (mode, type))
600     {
601       /* Look for float, double, or long double argument.  */
602       if (mode == QFmode || mode == HFmode)
603 	cum->floats++;
604       /* Look for integer, enumeral, boolean, char, or pointer argument.  */
605       else if (mode == QImode || mode == Pmode)
606 	cum->ints++;
607     }
608   else if (! TARGET_MEMPARM && ! type)
609     {
610       /* Handle libcall arguments.  */
611       if (mode == QFmode || mode == HFmode)
612 	cum->floats++;
613       else if (mode == QImode || mode == Pmode)
614 	cum->ints++;
615     }
616   return;
617 }
618 
619 
620 /* Define where to put the arguments to a function.  Value is zero to
621    push the argument on the stack, or a hard register in which to
622    store the argument.
623 
624    MODE is the argument's machine mode.
625    TYPE is the data type of the argument (as a tree).
626    This is null for libcalls where that information may
627    not be available.
628    CUM is a variable of type CUMULATIVE_ARGS which gives info about
629    the preceding args and about the function being called.
630    NAMED is nonzero if this argument is a named parameter
631    (otherwise it is an extra parameter matching an ellipsis).  */
632 
633 struct rtx_def *
c4x_function_arg(cum,mode,type,named)634 c4x_function_arg (cum, mode, type, named)
635      CUMULATIVE_ARGS *cum;	/* Current arg information.  */
636      enum machine_mode mode;	/* Current arg mode.  */
637      tree type;			/* Type of the arg or 0 if lib support.  */
638      int named;			/* != 0 for normal args, == 0 for ... args.  */
639 {
640   int reg = 0;			/* Default to passing argument on stack.  */
641 
642   if (! cum->init)
643     {
644       /* We can handle at most 2 floats in R2, R3.  */
645       cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
646 
647       /* We can handle at most 6 integers minus number of floats passed
648 	 in registers.  */
649       cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
650 	6 - cum->maxfloats : cum->ints;
651 
652       /* If there is no prototype, assume all the arguments are integers.  */
653       if (! cum->prototype)
654 	cum->maxints = 6;
655 
656       cum->ints = cum->floats = 0;
657       cum->init = 1;
658     }
659 
660   /* This marks the last argument.  We don't need to pass this through
661      to the call insn.  */
662   if (type == void_type_node)
663     return 0;
664 
665   if (! TARGET_MEMPARM
666       && named
667       && type
668       && ! MUST_PASS_IN_STACK (mode, type))
669     {
670       /* Look for float, double, or long double argument.  */
671       if (mode == QFmode || mode == HFmode)
672 	{
673 	  if (cum->floats < cum->maxfloats)
674 	    reg = c4x_fp_reglist[cum->floats];
675 	}
676       /* Look for integer, enumeral, boolean, char, or pointer argument.  */
677       else if (mode == QImode || mode == Pmode)
678 	{
679 	  if (cum->ints < cum->maxints)
680 	    reg = c4x_int_reglist[cum->maxfloats][cum->ints];
681 	}
682     }
683   else if (! TARGET_MEMPARM && ! type)
684     {
685       /* We could use a different argument calling model for libcalls,
686          since we're only calling functions in libgcc.  Thus we could
687          pass arguments for long longs in registers rather than on the
688          stack.  In the meantime, use the odd TI format.  We make the
689          assumption that we won't have more than two floating point
690          args, six integer args, and that all the arguments are of the
691          same mode.  */
692       if (mode == QFmode || mode == HFmode)
693 	reg = c4x_fp_reglist[cum->floats];
694       else if (mode == QImode || mode == Pmode)
695 	reg = c4x_int_reglist[0][cum->ints];
696     }
697 
698   if (TARGET_DEBUG)
699     {
700       fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
701 	       GET_MODE_NAME (mode), named);
702       if (reg)
703 	fprintf (stderr, ", reg=%s", reg_names[reg]);
704       else
705 	fprintf (stderr, ", stack");
706       fprintf (stderr, ")\n");
707     }
708   if (reg)
709     return gen_rtx_REG (mode, reg);
710   else
711     return NULL_RTX;
712 }
713 
714 /* C[34]x arguments grow in weird ways (downwards) that the standard
715    varargs stuff can't handle..  */
716 rtx
c4x_va_arg(valist,type)717 c4x_va_arg (valist, type)
718      tree valist, type;
719 {
720   tree t;
721 
722   t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
723 	     build_int_2 (int_size_in_bytes (type), 0));
724   TREE_SIDE_EFFECTS (t) = 1;
725 
726   return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
727 }
728 
729 
730 static int
c4x_isr_reg_used_p(regno)731 c4x_isr_reg_used_p (regno)
732      unsigned int regno;
733 {
734   /* Don't save/restore FP or ST, we handle them separately.  */
735   if (regno == FRAME_POINTER_REGNUM
736       || IS_ST_REGNO (regno))
737     return 0;
738 
739   /* We could be a little smarter abut saving/restoring DP.
740      We'll only save if for the big memory model or if
741      we're paranoid. ;-)  */
742   if (IS_DP_REGNO (regno))
743     return ! TARGET_SMALL || TARGET_PARANOID;
744 
745   /* Only save/restore regs in leaf function that are used.  */
746   if (c4x_leaf_function)
747     return regs_ever_live[regno] && fixed_regs[regno] == 0;
748 
749   /* Only save/restore regs that are used by the ISR and regs
750      that are likely to be used by functions the ISR calls
751      if they are not fixed.  */
752   return IS_EXT_REGNO (regno)
753     || ((regs_ever_live[regno] || call_used_regs[regno])
754 	&& fixed_regs[regno] == 0);
755 }
756 
757 
758 static int
c4x_leaf_function_p()759 c4x_leaf_function_p ()
760 {
761   /* A leaf function makes no calls, so we only need
762      to save/restore the registers we actually use.
763      For the global variable leaf_function to be set, we need
764      to define LEAF_REGISTERS and all that it entails.
765      Let's check ourselves...   */
766 
767   if (lookup_attribute ("leaf_pretend",
768 			TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
769     return 1;
770 
771   /* Use the leaf_pretend attribute at your own risk.  This is a hack
772      to speed up ISRs that call a function infrequently where the
773      overhead of saving and restoring the additional registers is not
774      warranted.  You must save and restore the additional registers
775      required by the called function.  Caveat emptor.  Here's enough
776      rope...  */
777 
778   if (leaf_function_p ())
779     return 1;
780 
781   return 0;
782 }
783 
784 
785 static int
c4x_assembler_function_p()786 c4x_assembler_function_p ()
787 {
788   tree type;
789 
790   type = TREE_TYPE (current_function_decl);
791   return (lookup_attribute ("assembler", TYPE_ATTRIBUTES (type)) != NULL)
792     || (lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL);
793 }
794 
795 
796 int
c4x_interrupt_function_p()797 c4x_interrupt_function_p ()
798 {
799   if (lookup_attribute ("interrupt",
800 			TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
801     return 1;
802 
803   /* Look for TI style c_intnn.  */
804   return current_function_name[0] == 'c'
805     && current_function_name[1] == '_'
806     && current_function_name[2] == 'i'
807     && current_function_name[3] == 'n'
808     && current_function_name[4] == 't'
809     && ISDIGIT (current_function_name[5])
810     && ISDIGIT (current_function_name[6]);
811 }
812 
813 void
c4x_expand_prologue()814 c4x_expand_prologue ()
815 {
816   unsigned int regno;
817   int size = get_frame_size ();
818   rtx insn;
819 
820   /* In functions where ar3 is not used but frame pointers are still
821      specified, frame pointers are not adjusted (if >= -O2) and this
822      is used so it won't needlessly push the frame pointer.  */
823   int dont_push_ar3;
824 
825   /* For __assembler__ function don't build a prologue.  */
826   if (c4x_assembler_function_p ())
827     {
828       return;
829     }
830 
831   /* For __interrupt__ function build specific prologue.  */
832   if (c4x_interrupt_function_p ())
833     {
834       c4x_leaf_function = c4x_leaf_function_p ();
835 
836       insn = emit_insn (gen_push_st ());
837       RTX_FRAME_RELATED_P (insn) = 1;
838       if (size)
839 	{
840           insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
841           RTX_FRAME_RELATED_P (insn) = 1;
842 	  insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
843 				       gen_rtx_REG (QImode, SP_REGNO)));
844           RTX_FRAME_RELATED_P (insn) = 1;
845 	  /* We require that an ISR uses fewer than 32768 words of
846 	     local variables, otherwise we have to go to lots of
847 	     effort to save a register, load it with the desired size,
848 	     adjust the stack pointer, and then restore the modified
849 	     register.  Frankly, I think it is a poor ISR that
850 	     requires more than 32767 words of local temporary
851 	     storage!  */
852 	  if (size > 32767)
853 	    error ("ISR %s requires %d words of local vars, max is 32767",
854 		   current_function_name, size);
855 
856 	  insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
857 				        gen_rtx_REG (QImode, SP_REGNO),
858 					GEN_INT (size)));
859           RTX_FRAME_RELATED_P (insn) = 1;
860 	}
861       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
862 	{
863 	  if (c4x_isr_reg_used_p (regno))
864 	    {
865 	      if (regno == DP_REGNO)
866 		{
867 		  insn = emit_insn (gen_push_dp ());
868                   RTX_FRAME_RELATED_P (insn) = 1;
869 		}
870 	      else
871 		{
872                   insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
873                   RTX_FRAME_RELATED_P (insn) = 1;
874 		  if (IS_EXT_REGNO (regno))
875 		    {
876                       insn = emit_insn (gen_pushqf
877 					(gen_rtx_REG (QFmode, regno)));
878                       RTX_FRAME_RELATED_P (insn) = 1;
879 		    }
880 		}
881 	    }
882 	}
883       /* We need to clear the repeat mode flag if the ISR is
884          going to use a RPTB instruction or uses the RC, RS, or RE
885          registers.  */
886       if (regs_ever_live[RC_REGNO]
887 	  || regs_ever_live[RS_REGNO]
888 	  || regs_ever_live[RE_REGNO])
889 	{
890           insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
891           RTX_FRAME_RELATED_P (insn) = 1;
892 	}
893 
894       /* Reload DP reg if we are paranoid about some turkey
895          violating small memory model rules.  */
896       if (TARGET_SMALL && TARGET_PARANOID)
897 	{
898           insn = emit_insn (gen_set_ldp_prologue
899 			    (gen_rtx_REG (QImode, DP_REGNO),
900 			     gen_rtx_SYMBOL_REF (QImode, "data_sec")));
901           RTX_FRAME_RELATED_P (insn) = 1;
902 	}
903     }
904   else
905     {
906       if (frame_pointer_needed)
907 	{
908 	  if ((size != 0)
909 	      || (current_function_args_size != 0)
910 	      || (optimize < 2))
911 	    {
912               insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
913               RTX_FRAME_RELATED_P (insn) = 1;
914 	      insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
915 				           gen_rtx_REG (QImode, SP_REGNO)));
916               RTX_FRAME_RELATED_P (insn) = 1;
917 	      dont_push_ar3 = 1;
918 	    }
919 	  else
920 	    {
921 	      /* Since ar3 is not used, we don't need to push it.  */
922 	      dont_push_ar3 = 1;
923 	    }
924 	}
925       else
926 	{
927 	  /* If we use ar3, we need to push it.   */
928 	  dont_push_ar3 = 0;
929 	  if ((size != 0) || (current_function_args_size != 0))
930 	    {
931 	      /* If we are omitting the frame pointer, we still have
932 	         to make space for it so the offsets are correct
933 	         unless we don't use anything on the stack at all.  */
934 	      size += 1;
935 	    }
936 	}
937 
938       if (size > 32767)
939 	{
940 	  /* Local vars are too big, it will take multiple operations
941 	     to increment SP.  */
942 	  if (TARGET_C3X)
943 	    {
944 	      insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
945 					   GEN_INT(size >> 16)));
946               RTX_FRAME_RELATED_P (insn) = 1;
947 	      insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
948 					     gen_rtx_REG (QImode, R1_REGNO),
949 					     GEN_INT(-16)));
950               RTX_FRAME_RELATED_P (insn) = 1;
951 	    }
952 	  else
953 	    {
954 	      insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
955 					   GEN_INT(size & ~0xffff)));
956               RTX_FRAME_RELATED_P (insn) = 1;
957 	    }
958 	  insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
959 				        gen_rtx_REG (QImode, R1_REGNO),
960 					GEN_INT(size & 0xffff)));
961           RTX_FRAME_RELATED_P (insn) = 1;
962 	  insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
963 				        gen_rtx_REG (QImode, SP_REGNO),
964 				        gen_rtx_REG (QImode, R1_REGNO)));
965           RTX_FRAME_RELATED_P (insn) = 1;
966 	}
967       else if (size != 0)
968 	{
969 	  /* Local vars take up less than 32767 words, so we can directly
970 	     add the number.  */
971 	  insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
972 				        gen_rtx_REG (QImode, SP_REGNO),
973 				        GEN_INT (size)));
974           RTX_FRAME_RELATED_P (insn) = 1;
975 	}
976 
977       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
978 	{
979 	  if (regs_ever_live[regno] && ! call_used_regs[regno])
980 	    {
981 	      if (IS_FLOAT_CALL_SAVED_REGNO (regno))
982 		{
983 		  if (TARGET_PRESERVE_FLOAT)
984 		    {
985                       insn = emit_insn (gen_pushqi
986 					(gen_rtx_REG (QImode, regno)));
987 		      RTX_FRAME_RELATED_P (insn) = 1;
988 		    }
989                   insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
990 		  RTX_FRAME_RELATED_P (insn) = 1;
991 		}
992 	      else if ((! dont_push_ar3) || (regno != AR3_REGNO))
993 		{
994                   insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
995 		  RTX_FRAME_RELATED_P (insn) = 1;
996 		}
997 	    }
998 	}
999     }
1000 }
1001 
1002 
1003 void
c4x_expand_epilogue()1004 c4x_expand_epilogue()
1005 {
1006   int regno;
1007   int jump = 0;
1008   int dont_pop_ar3;
1009   rtx insn;
1010   int size = get_frame_size ();
1011 
1012   /* For __assembler__ function build no epilogue.  */
1013   if (c4x_assembler_function_p ())
1014     {
1015       insn = emit_jump_insn (gen_return_from_epilogue ());
1016       RTX_FRAME_RELATED_P (insn) = 1;
1017       return;
1018     }
1019 
1020   /* For __interrupt__ function build specific epilogue.  */
1021   if (c4x_interrupt_function_p ())
1022     {
1023       for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1024 	{
1025 	  if (! c4x_isr_reg_used_p (regno))
1026 	    continue;
1027 	  if (regno == DP_REGNO)
1028 	    {
1029 	      insn = emit_insn (gen_pop_dp ());
1030 	      RTX_FRAME_RELATED_P (insn) = 1;
1031 	    }
1032 	  else
1033 	    {
1034 	      /* We have to use unspec because the compiler will delete insns
1035 	         that are not call-saved.  */
1036 	      if (IS_EXT_REGNO (regno))
1037 		{
1038                   insn = emit_insn (gen_popqf_unspec
1039 				    (gen_rtx_REG (QFmode, regno)));
1040 	          RTX_FRAME_RELATED_P (insn) = 1;
1041 		}
1042 	      insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1043 	      RTX_FRAME_RELATED_P (insn) = 1;
1044 	    }
1045 	}
1046       if (size)
1047 	{
1048 	  insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1049 				        gen_rtx_REG (QImode, SP_REGNO),
1050 					GEN_INT(size)));
1051           RTX_FRAME_RELATED_P (insn) = 1;
1052 	  insn = emit_insn (gen_popqi
1053 			    (gen_rtx_REG (QImode, AR3_REGNO)));
1054           RTX_FRAME_RELATED_P (insn) = 1;
1055 	}
1056       insn = emit_insn (gen_pop_st ());
1057       RTX_FRAME_RELATED_P (insn) = 1;
1058       insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1059       RTX_FRAME_RELATED_P (insn) = 1;
1060     }
1061   else
1062     {
1063       if (frame_pointer_needed)
1064 	{
1065 	  if ((size != 0)
1066 	      || (current_function_args_size != 0)
1067 	      || (optimize < 2))
1068 	    {
1069 	      insn = emit_insn
1070 		(gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1071 			    gen_rtx_MEM (QImode,
1072 					 gen_rtx_PLUS
1073 					 (QImode, gen_rtx_REG (QImode,
1074 							       AR3_REGNO),
1075 					  GEN_INT(-1)))));
1076 	      RTX_FRAME_RELATED_P (insn) = 1;
1077 
1078 	      /* We already have the return value and the fp,
1079 	         so we need to add those to the stack.  */
1080 	      size += 2;
1081 	      jump = 1;
1082 	      dont_pop_ar3 = 1;
1083 	    }
1084 	  else
1085 	    {
1086 	      /* Since ar3 is not used for anything, we don't need to
1087 	         pop it.  */
1088 	      dont_pop_ar3 = 1;
1089 	    }
1090 	}
1091       else
1092 	{
1093 	  dont_pop_ar3 = 0;	/* If we use ar3, we need to pop it.  */
1094 	  if (size || current_function_args_size)
1095 	    {
1096 	      /* If we are ommitting the frame pointer, we still have
1097 	         to make space for it so the offsets are correct
1098 	         unless we don't use anything on the stack at all.  */
1099 	      size += 1;
1100 	    }
1101 	}
1102 
1103       /* Now restore the saved registers, putting in the delayed branch
1104          where required.  */
1105       for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1106 	{
1107 	  if (regs_ever_live[regno] && ! call_used_regs[regno])
1108 	    {
1109 	      if (regno == AR3_REGNO && dont_pop_ar3)
1110 		continue;
1111 
1112 	      if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1113 		{
1114 		  insn = emit_insn (gen_popqf_unspec
1115 				    (gen_rtx_REG (QFmode, regno)));
1116 		  RTX_FRAME_RELATED_P (insn) = 1;
1117 		  if (TARGET_PRESERVE_FLOAT)
1118 		    {
1119                       insn = emit_insn (gen_popqi_unspec
1120 					(gen_rtx_REG (QImode, regno)));
1121 		      RTX_FRAME_RELATED_P (insn) = 1;
1122 		    }
1123 		}
1124 	      else
1125 		{
1126 		  insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1127 		  RTX_FRAME_RELATED_P (insn) = 1;
1128 		}
1129 	    }
1130 	}
1131 
1132       if (frame_pointer_needed)
1133 	{
1134 	  if ((size != 0)
1135 	      || (current_function_args_size != 0)
1136 	      || (optimize < 2))
1137 	    {
1138 	      /* Restore the old FP.  */
1139 	      insn = emit_insn
1140 		(gen_movqi
1141 		 (gen_rtx_REG (QImode, AR3_REGNO),
1142 		  gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1143 
1144 	      RTX_FRAME_RELATED_P (insn) = 1;
1145 	    }
1146 	}
1147 
1148       if (size > 32767)
1149 	{
1150 	  /* Local vars are too big, it will take multiple operations
1151 	     to decrement SP.  */
1152 	  if (TARGET_C3X)
1153 	    {
1154 	      insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1155 					   GEN_INT(size >> 16)));
1156               RTX_FRAME_RELATED_P (insn) = 1;
1157 	      insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1158 					     gen_rtx_REG (QImode, R3_REGNO),
1159 					     GEN_INT(-16)));
1160               RTX_FRAME_RELATED_P (insn) = 1;
1161 	    }
1162 	  else
1163 	    {
1164 	      insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1165 					   GEN_INT(size & ~0xffff)));
1166               RTX_FRAME_RELATED_P (insn) = 1;
1167 	    }
1168 	  insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1169 				        gen_rtx_REG (QImode, R3_REGNO),
1170 					GEN_INT(size & 0xffff)));
1171           RTX_FRAME_RELATED_P (insn) = 1;
1172 	  insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1173 				        gen_rtx_REG (QImode, SP_REGNO),
1174 				        gen_rtx_REG (QImode, R3_REGNO)));
1175           RTX_FRAME_RELATED_P (insn) = 1;
1176 	}
1177       else if (size != 0)
1178 	{
1179 	  /* Local vars take up less than 32768 words, so we can directly
1180 	     subtract the number.  */
1181 	  insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1182 				        gen_rtx_REG (QImode, SP_REGNO),
1183 				        GEN_INT(size)));
1184           RTX_FRAME_RELATED_P (insn) = 1;
1185 	}
1186 
1187       if (jump)
1188 	{
1189 	  insn = emit_jump_insn (gen_return_indirect_internal
1190 				 (gen_rtx_REG (QImode, R2_REGNO)));
1191           RTX_FRAME_RELATED_P (insn) = 1;
1192 	}
1193       else
1194 	{
1195           insn = emit_jump_insn (gen_return_from_epilogue ());
1196           RTX_FRAME_RELATED_P (insn) = 1;
1197 	}
1198     }
1199 }
1200 
1201 
1202 int
c4x_null_epilogue_p()1203 c4x_null_epilogue_p ()
1204 {
1205   int regno;
1206 
1207   if (reload_completed
1208       && ! c4x_assembler_function_p ()
1209       && ! c4x_interrupt_function_p ()
1210       && ! current_function_calls_alloca
1211       && ! current_function_args_size
1212       && ! (optimize < 2)
1213       && ! get_frame_size ())
1214     {
1215       for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1216 	if (regs_ever_live[regno] && ! call_used_regs[regno]
1217 	    && (regno != AR3_REGNO))
1218 	  return 0;
1219       return 1;
1220     }
1221   return 0;
1222 }
1223 
1224 
1225 int
c4x_emit_move_sequence(operands,mode)1226 c4x_emit_move_sequence (operands, mode)
1227      rtx *operands;
1228      enum machine_mode mode;
1229 {
1230   rtx op0 = operands[0];
1231   rtx op1 = operands[1];
1232 
1233   if (! reload_in_progress
1234       && ! REG_P (op0)
1235       && ! REG_P (op1)
1236       && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1237     op1 = force_reg (mode, op1);
1238 
1239   if (GET_CODE (op1) == LO_SUM
1240       && GET_MODE (op1) == Pmode
1241       && dp_reg_operand (XEXP (op1, 0), mode))
1242     {
1243       /* expand_increment will sometimes create a LO_SUM immediate
1244 	 address.  */
1245       op1 = XEXP (op1, 1);
1246     }
1247   else if (symbolic_address_operand (op1, mode))
1248     {
1249       if (TARGET_LOAD_ADDRESS)
1250 	{
1251 	  /* Alias analysis seems to do a better job if we force
1252 	     constant addresses to memory after reload.  */
1253 	  emit_insn (gen_load_immed_address (op0, op1));
1254 	  return 1;
1255 	}
1256       else
1257 	{
1258 	  /* Stick symbol or label address into the constant pool.  */
1259 	  op1 = force_const_mem (Pmode, op1);
1260 	}
1261     }
1262   else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1263     {
1264       /* We could be a lot smarter about loading some of these
1265 	 constants...  */
1266       op1 = force_const_mem (mode, op1);
1267     }
1268 
1269   /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1270      and emit associated (HIGH (SYMREF)) if large memory model.
1271      c4x_legitimize_address could be used to do this,
1272      perhaps by calling validize_address.  */
1273   if (TARGET_EXPOSE_LDP
1274       && ! (reload_in_progress || reload_completed)
1275       && GET_CODE (op1) == MEM
1276       && symbolic_address_operand (XEXP (op1, 0), Pmode))
1277     {
1278       rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1279       if (! TARGET_SMALL)
1280 	emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1281       op1 = change_address (op1, mode,
1282 			    gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1283     }
1284 
1285   if (TARGET_EXPOSE_LDP
1286       && ! (reload_in_progress || reload_completed)
1287       && GET_CODE (op0) == MEM
1288       && symbolic_address_operand (XEXP (op0, 0), Pmode))
1289     {
1290       rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1291       if (! TARGET_SMALL)
1292 	emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1293       op0 = change_address (op0, mode,
1294 			    gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1295     }
1296 
1297   if (GET_CODE (op0) == SUBREG
1298       && mixed_subreg_operand (op0, mode))
1299     {
1300       /* We should only generate these mixed mode patterns
1301 	 during RTL generation.  If we need do it later on
1302 	 then we'll have to emit patterns that won't clobber CC.  */
1303       if (reload_in_progress || reload_completed)
1304 	abort ();
1305       if (GET_MODE (SUBREG_REG (op0)) == QImode)
1306 	op0 = SUBREG_REG (op0);
1307       else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1308 	{
1309 	  op0 = copy_rtx (op0);
1310 	  PUT_MODE (op0, QImode);
1311 	}
1312       else
1313 	abort ();
1314 
1315       if (mode == QFmode)
1316 	emit_insn (gen_storeqf_int_clobber (op0, op1));
1317       else
1318 	abort ();
1319       return 1;
1320     }
1321 
1322   if (GET_CODE (op1) == SUBREG
1323       && mixed_subreg_operand (op1, mode))
1324     {
1325       /* We should only generate these mixed mode patterns
1326 	 during RTL generation.  If we need do it later on
1327 	 then we'll have to emit patterns that won't clobber CC.  */
1328       if (reload_in_progress || reload_completed)
1329 	abort ();
1330       if (GET_MODE (SUBREG_REG (op1)) == QImode)
1331 	op1 = SUBREG_REG (op1);
1332       else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1333 	{
1334 	  op1 = copy_rtx (op1);
1335 	  PUT_MODE (op1, QImode);
1336 	}
1337       else
1338 	abort ();
1339 
1340       if (mode == QFmode)
1341 	emit_insn (gen_loadqf_int_clobber (op0, op1));
1342       else
1343 	abort ();
1344       return 1;
1345     }
1346 
1347   if (mode == QImode
1348       && reg_operand (op0, mode)
1349       && const_int_operand (op1, mode)
1350       && ! IS_INT16_CONST (INTVAL (op1))
1351       && ! IS_HIGH_CONST (INTVAL (op1)))
1352     {
1353       emit_insn (gen_loadqi_big_constant (op0, op1));
1354       return 1;
1355     }
1356 
1357   if (mode == HImode
1358       && reg_operand (op0, mode)
1359       && const_int_operand (op1, mode))
1360     {
1361       emit_insn (gen_loadhi_big_constant (op0, op1));
1362       return 1;
1363     }
1364 
1365   /* Adjust operands in case we have modified them.  */
1366   operands[0] = op0;
1367   operands[1] = op1;
1368 
1369   /* Emit normal pattern.  */
1370   return 0;
1371 }
1372 
1373 
1374 void
c4x_emit_libcall(libcall,code,dmode,smode,noperands,operands)1375 c4x_emit_libcall (libcall, code, dmode, smode, noperands, operands)
1376      rtx libcall;
1377      enum rtx_code code;
1378      enum machine_mode dmode;
1379      enum machine_mode smode;
1380      int noperands;
1381      rtx *operands;
1382 {
1383   rtx ret;
1384   rtx insns;
1385   rtx equiv;
1386 
1387   start_sequence ();
1388   switch (noperands)
1389     {
1390     case 2:
1391       ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1392 				     operands[1], smode);
1393       equiv = gen_rtx (code, dmode, operands[1]);
1394       break;
1395 
1396     case 3:
1397       ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1398 				     operands[1], smode, operands[2], smode);
1399       equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1400       break;
1401 
1402     default:
1403       abort ();
1404     }
1405 
1406   insns = get_insns ();
1407   end_sequence ();
1408   emit_libcall_block (insns, operands[0], ret, equiv);
1409 }
1410 
1411 
1412 void
c4x_emit_libcall3(libcall,code,mode,operands)1413 c4x_emit_libcall3 (libcall, code, mode, operands)
1414      rtx libcall;
1415      enum rtx_code code;
1416      enum machine_mode mode;
1417      rtx *operands;
1418 {
1419   c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1420 }
1421 
1422 
1423 void
c4x_emit_libcall_mulhi(libcall,code,mode,operands)1424 c4x_emit_libcall_mulhi (libcall, code, mode, operands)
1425      rtx libcall;
1426      enum rtx_code code;
1427      enum machine_mode mode;
1428      rtx *operands;
1429 {
1430   rtx ret;
1431   rtx insns;
1432   rtx equiv;
1433 
1434   start_sequence ();
1435   ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1436                                  operands[1], mode, operands[2], mode);
1437   equiv = gen_rtx_TRUNCATE (mode,
1438                    gen_rtx_LSHIFTRT (HImode,
1439                             gen_rtx_MULT (HImode,
1440                                      gen_rtx (code, HImode, operands[1]),
1441                                      gen_rtx (code, HImode, operands[2])),
1442                                      GEN_INT (32)));
1443   insns = get_insns ();
1444   end_sequence ();
1445   emit_libcall_block (insns, operands[0], ret, equiv);
1446 }
1447 
1448 
1449 /* Set the SYMBOL_REF_FLAG for a function decl.  However, wo do not
1450    yet use this info.  */
1451 
1452 static void
c4x_encode_section_info(decl,first)1453 c4x_encode_section_info (decl, first)
1454      tree decl;
1455      int first ATTRIBUTE_UNUSED;
1456 {
1457   if (TREE_CODE (decl) == FUNCTION_DECL)
1458     SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1459 }
1460 
1461 
1462 int
c4x_check_legit_addr(mode,addr,strict)1463 c4x_check_legit_addr (mode, addr, strict)
1464      enum machine_mode mode;
1465      rtx addr;
1466      int strict;
1467 {
1468   rtx base = NULL_RTX;		/* Base register (AR0-AR7).  */
1469   rtx indx = NULL_RTX;		/* Index register (IR0,IR1).  */
1470   rtx disp = NULL_RTX;		/* Displacement.  */
1471   enum rtx_code code;
1472 
1473   code = GET_CODE (addr);
1474   switch (code)
1475     {
1476       /* Register indirect with auto increment/decrement.  We don't
1477 	 allow SP here---push_operand should recognize an operand
1478 	 being pushed on the stack.  */
1479 
1480     case PRE_DEC:
1481     case PRE_INC:
1482     case POST_DEC:
1483       if (mode != QImode && mode != QFmode)
1484 	return 0;
1485 
1486     case POST_INC:
1487       base = XEXP (addr, 0);
1488       if (! REG_P (base))
1489 	return 0;
1490       break;
1491 
1492     case PRE_MODIFY:
1493     case POST_MODIFY:
1494       {
1495 	rtx op0 = XEXP (addr, 0);
1496 	rtx op1 = XEXP (addr, 1);
1497 
1498 	if (mode != QImode && mode != QFmode)
1499 	  return 0;
1500 
1501 	if (! REG_P (op0)
1502 	    || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1503 	  return 0;
1504 	base = XEXP (op1, 0);
1505 	if (base != op0)
1506 	  return 0;
1507 	if (REG_P (XEXP (op1, 1)))
1508 	  indx = XEXP (op1, 1);
1509 	else
1510 	  disp = XEXP (op1, 1);
1511       }
1512       break;
1513 
1514       /* Register indirect.  */
1515     case REG:
1516       base = addr;
1517       break;
1518 
1519       /* Register indirect with displacement or index.  */
1520     case PLUS:
1521       {
1522 	rtx op0 = XEXP (addr, 0);
1523 	rtx op1 = XEXP (addr, 1);
1524 	enum rtx_code code0 = GET_CODE (op0);
1525 
1526 	switch (code0)
1527 	  {
1528 	  case REG:
1529 	    if (REG_P (op1))
1530 	      {
1531 		base = op0;	/* Base + index.  */
1532 		indx = op1;
1533 		if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1534 		  {
1535 		    base = op1;
1536 		    indx = op0;
1537 		  }
1538 	      }
1539 	    else
1540 	      {
1541 		base = op0;	/* Base + displacement.  */
1542 		disp = op1;
1543 	      }
1544 	    break;
1545 
1546 	  default:
1547 	    return 0;
1548 	  }
1549       }
1550       break;
1551 
1552       /* Direct addressing with DP register.  */
1553     case LO_SUM:
1554       {
1555 	rtx op0 = XEXP (addr, 0);
1556 	rtx op1 = XEXP (addr, 1);
1557 
1558 	/* HImode and HFmode direct memory references aren't truly
1559 	   offsettable (consider case at end of data page).  We
1560 	   probably get better code by loading a pointer and using an
1561 	   indirect memory reference.  */
1562 	if (mode == HImode || mode == HFmode)
1563 	  return 0;
1564 
1565 	if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1566 	  return 0;
1567 
1568 	if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1569 	  return 1;
1570 
1571 	if (GET_CODE (op1) == CONST)
1572 	  return 1;
1573 	return 0;
1574       }
1575       break;
1576 
1577       /* Direct addressing with some work for the assembler...  */
1578     case CONST:
1579       /* Direct addressing.  */
1580     case LABEL_REF:
1581     case SYMBOL_REF:
1582       if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1583 	return 1;
1584       /* These need to be converted to a LO_SUM (...).
1585 	 LEGITIMIZE_RELOAD_ADDRESS will do this during reload.  */
1586       return 0;
1587 
1588       /* Do not allow direct memory access to absolute addresses.
1589          This is more pain than it's worth, especially for the
1590          small memory model where we can't guarantee that
1591          this address is within the data page---we don't want
1592          to modify the DP register in the small memory model,
1593          even temporarily, since an interrupt can sneak in....  */
1594     case CONST_INT:
1595       return 0;
1596 
1597       /* Indirect indirect addressing.  */
1598     case MEM:
1599       return 0;
1600 
1601     case CONST_DOUBLE:
1602       fatal_insn ("using CONST_DOUBLE for address", addr);
1603 
1604     default:
1605       return 0;
1606     }
1607 
1608   /* Validate the base register.  */
1609   if (base)
1610     {
1611       /* Check that the address is offsettable for HImode and HFmode.  */
1612       if (indx && (mode == HImode || mode == HFmode))
1613 	return 0;
1614 
1615       /* Handle DP based stuff.  */
1616       if (REGNO (base) == DP_REGNO)
1617 	return 1;
1618       if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1619 	return 0;
1620       else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1621 	return 0;
1622     }
1623 
1624   /* Now validate the index register.  */
1625   if (indx)
1626     {
1627       if (GET_CODE (indx) != REG)
1628 	return 0;
1629       if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1630 	return 0;
1631       else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1632 	return 0;
1633     }
1634 
1635   /* Validate displacement.  */
1636   if (disp)
1637     {
1638       if (GET_CODE (disp) != CONST_INT)
1639 	return 0;
1640       if (mode == HImode || mode == HFmode)
1641 	{
1642 	  /* The offset displacement must be legitimate.  */
1643 	  if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1644 	    return 0;
1645 	}
1646       else
1647 	{
1648 	  if (! IS_DISP8_CONST (INTVAL (disp)))
1649 	    return 0;
1650 	}
1651       /* Can't add an index with a disp.  */
1652       if (indx)
1653 	return 0;
1654     }
1655   return 1;
1656 }
1657 
1658 
1659 rtx
c4x_legitimize_address(orig,mode)1660 c4x_legitimize_address (orig, mode)
1661      rtx orig ATTRIBUTE_UNUSED;
1662      enum machine_mode mode ATTRIBUTE_UNUSED;
1663 {
1664   if (GET_CODE (orig) == SYMBOL_REF
1665       || GET_CODE (orig) == LABEL_REF)
1666     {
1667       if (mode == HImode || mode == HFmode)
1668 	{
1669 	  /* We need to force the address into
1670 	     a register so that it is offsettable.  */
1671 	  rtx addr_reg = gen_reg_rtx (Pmode);
1672 	  emit_move_insn (addr_reg, orig);
1673 	  return addr_reg;
1674 	}
1675       else
1676 	{
1677 	  rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1678 
1679 	  if (! TARGET_SMALL)
1680 	    emit_insn (gen_set_ldp (dp_reg, orig));
1681 
1682 	  return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1683 	}
1684     }
1685 
1686   return NULL_RTX;
1687 }
1688 
1689 
1690 /* Provide the costs of an addressing mode that contains ADDR.
1691    If ADDR is not a valid address, its cost is irrelevant.
1692    This is used in cse and loop optimisation to determine
1693    if it is worthwhile storing a common address into a register.
1694    Unfortunately, the C4x address cost depends on other operands.  */
1695 
1696 int
c4x_address_cost(addr)1697 c4x_address_cost (addr)
1698      rtx addr;
1699 {
1700   switch (GET_CODE (addr))
1701     {
1702     case REG:
1703       return 1;
1704 
1705     case POST_INC:
1706     case POST_DEC:
1707     case PRE_INC:
1708     case PRE_DEC:
1709       return 1;
1710 
1711       /* These shouldn't be directly generated.  */
1712     case SYMBOL_REF:
1713     case LABEL_REF:
1714     case CONST:
1715       return 10;
1716 
1717     case LO_SUM:
1718       {
1719 	rtx op1 = XEXP (addr, 1);
1720 
1721 	if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1722 	  return TARGET_SMALL ? 3 : 4;
1723 
1724 	if (GET_CODE (op1) == CONST)
1725 	  {
1726 	    rtx offset = const0_rtx;
1727 
1728 	    op1 = eliminate_constant_term (op1, &offset);
1729 
1730 	    /* ??? These costs need rethinking...  */
1731 	    if (GET_CODE (op1) == LABEL_REF)
1732 	      return 3;
1733 
1734 	    if (GET_CODE (op1) != SYMBOL_REF)
1735 	      return 4;
1736 
1737 	    if (INTVAL (offset) == 0)
1738 	      return 3;
1739 
1740 	    return 4;
1741 	  }
1742 	fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1743       }
1744       break;
1745 
1746     case PLUS:
1747       {
1748 	register rtx op0 = XEXP (addr, 0);
1749 	register rtx op1 = XEXP (addr, 1);
1750 
1751 	if (GET_CODE (op0) != REG)
1752 	  break;
1753 
1754 	switch (GET_CODE (op1))
1755 	  {
1756 	  default:
1757 	    break;
1758 
1759 	  case REG:
1760 	    /* This cost for REG+REG must be greater than the cost
1761 	       for REG if we want autoincrement addressing modes.  */
1762 	    return 2;
1763 
1764 	  case CONST_INT:
1765 	    /* The following tries to improve GIV combination
1766 	       in strength reduce but appears not to help.  */
1767 	    if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1768 	      return 1;
1769 
1770 	    if (IS_DISP1_CONST (INTVAL (op1)))
1771 	      return 1;
1772 
1773 	    if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1774 	      return 2;
1775 
1776 	    return 3;
1777 	  }
1778       }
1779     default:
1780       break;
1781     }
1782 
1783   return 4;
1784 }
1785 
1786 
1787 rtx
c4x_gen_compare_reg(code,x,y)1788 c4x_gen_compare_reg (code, x, y)
1789      enum rtx_code code;
1790      rtx x, y;
1791 {
1792   enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1793   rtx cc_reg;
1794 
1795   if (mode == CC_NOOVmode
1796       && (code == LE || code == GE || code == LT || code == GT))
1797     return NULL_RTX;
1798 
1799   cc_reg = gen_rtx_REG (mode, ST_REGNO);
1800   emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1801 			  gen_rtx_COMPARE (mode, x, y)));
1802   return cc_reg;
1803 }
1804 
1805 char *
c4x_output_cbranch(form,seq)1806 c4x_output_cbranch (form, seq)
1807      const char *form;
1808      rtx seq;
1809 {
1810   int delayed = 0;
1811   int annultrue = 0;
1812   int annulfalse = 0;
1813   rtx delay;
1814   char *cp;
1815   static char str[100];
1816 
1817   if (final_sequence)
1818     {
1819       delay = XVECEXP (final_sequence, 0, 1);
1820       delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1821       annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1822       annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1823     }
1824   strcpy (str, form);
1825   cp = &str [strlen (str)];
1826   if (delayed)
1827     {
1828       *cp++ = '%';
1829       *cp++ = '#';
1830     }
1831   if (annultrue)
1832     {
1833       *cp++ = 'a';
1834       *cp++ = 't';
1835     }
1836   if (annulfalse)
1837     {
1838       *cp++ = 'a';
1839       *cp++ = 'f';
1840     }
1841   *cp++ = '\t';
1842   *cp++ = '%';
1843   *cp++ = 'l';
1844   *cp++ = '1';
1845   *cp = 0;
1846   return str;
1847 }
1848 
1849 void
c4x_print_operand(file,op,letter)1850 c4x_print_operand (file, op, letter)
1851      FILE *file;		/* File to write to.  */
1852      rtx op;			/* Operand to print.  */
1853      int letter;		/* %<letter> or 0.  */
1854 {
1855   rtx op1;
1856   enum rtx_code code;
1857 
1858   switch (letter)
1859     {
1860     case '#':			/* Delayed.  */
1861       if (final_sequence)
1862 	fprintf (file, "d");
1863       return;
1864     }
1865 
1866   code = GET_CODE (op);
1867   switch (letter)
1868     {
1869     case 'A':			/* Direct address.  */
1870       if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1871 	fprintf (file, "@");
1872       break;
1873 
1874     case 'H':			/* Sethi.  */
1875       output_addr_const (file, op);
1876       return;
1877 
1878     case 'I':			/* Reversed condition.  */
1879       code = reverse_condition (code);
1880       break;
1881 
1882     case 'L':			/* Log 2 of constant.  */
1883       if (code != CONST_INT)
1884 	fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1885       fprintf (file, "%d", exact_log2 (INTVAL (op)));
1886       return;
1887 
1888     case 'N':			/* Ones complement of small constant.  */
1889       if (code != CONST_INT)
1890 	fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1891       fprintf (file, "%d", ~INTVAL (op));
1892       return;
1893 
1894     case 'K':			/* Generate ldp(k) if direct address.  */
1895       if (! TARGET_SMALL
1896 	  && code == MEM
1897 	  && GET_CODE (XEXP (op, 0)) == LO_SUM
1898 	  && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1899 	  && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1900 	{
1901 	  op1 = XEXP (XEXP (op, 0), 1);
1902           if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1903 	    {
1904 	      fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1905 	      output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1906 	      fprintf (file, "\n");
1907 	    }
1908 	}
1909       return;
1910 
1911     case 'M':			/* Generate ldp(k) if direct address.  */
1912       if (! TARGET_SMALL	/* Only used in asm statements.  */
1913 	  && code == MEM
1914 	  && (GET_CODE (XEXP (op, 0)) == CONST
1915 	      || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1916 	{
1917 	  fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1918           output_address (XEXP (op, 0));
1919 	  fprintf (file, "\n\t");
1920 	}
1921       return;
1922 
1923     case 'O':			/* Offset address.  */
1924       if (code == MEM && c4x_autoinc_operand (op, Pmode))
1925 	break;
1926       else if (code == MEM)
1927 	output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1928       else if (code == REG)
1929 	fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1930       else
1931 	fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1932       return;
1933 
1934     case 'C':			/* Call.  */
1935       break;
1936 
1937     case 'U':			/* Call/callu.  */
1938       if (code != SYMBOL_REF)
1939 	fprintf (file, "u");
1940       return;
1941 
1942     default:
1943       break;
1944     }
1945 
1946   switch (code)
1947     {
1948     case REG:
1949       if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1950 	  && ! TARGET_TI)
1951 	fprintf (file, "%s", float_reg_names[REGNO (op)]);
1952       else
1953 	fprintf (file, "%s", reg_names[REGNO (op)]);
1954       break;
1955 
1956     case MEM:
1957       output_address (XEXP (op, 0));
1958       break;
1959 
1960     case CONST_DOUBLE:
1961       {
1962 	char str[64];
1963 
1964 	real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
1965 			 sizeof (str), 0, 1);
1966 	fprintf (file, "%s", str);
1967       }
1968       break;
1969 
1970     case CONST_INT:
1971       fprintf (file, "%d", INTVAL (op));
1972       break;
1973 
1974     case NE:
1975       fprintf (file, "ne");
1976       break;
1977 
1978     case EQ:
1979       fprintf (file, "eq");
1980       break;
1981 
1982     case GE:
1983       fprintf (file, "ge");
1984       break;
1985 
1986     case GT:
1987       fprintf (file, "gt");
1988       break;
1989 
1990     case LE:
1991       fprintf (file, "le");
1992       break;
1993 
1994     case LT:
1995       fprintf (file, "lt");
1996       break;
1997 
1998     case GEU:
1999       fprintf (file, "hs");
2000       break;
2001 
2002     case GTU:
2003       fprintf (file, "hi");
2004       break;
2005 
2006     case LEU:
2007       fprintf (file, "ls");
2008       break;
2009 
2010     case LTU:
2011       fprintf (file, "lo");
2012       break;
2013 
2014     case SYMBOL_REF:
2015       output_addr_const (file, op);
2016       break;
2017 
2018     case CONST:
2019       output_addr_const (file, XEXP (op, 0));
2020       break;
2021 
2022     case CODE_LABEL:
2023       break;
2024 
2025     default:
2026       fatal_insn ("c4x_print_operand: Bad operand case", op);
2027       break;
2028     }
2029 }
2030 
2031 
2032 void
c4x_print_operand_address(file,addr)2033 c4x_print_operand_address (file, addr)
2034      FILE *file;
2035      rtx addr;
2036 {
2037   switch (GET_CODE (addr))
2038     {
2039     case REG:
2040       fprintf (file, "*%s", reg_names[REGNO (addr)]);
2041       break;
2042 
2043     case PRE_DEC:
2044       fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2045       break;
2046 
2047     case POST_INC:
2048       fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2049       break;
2050 
2051     case POST_MODIFY:
2052       {
2053 	rtx op0 = XEXP (XEXP (addr, 1), 0);
2054 	rtx op1 = XEXP (XEXP (addr, 1), 1);
2055 
2056 	if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2057 	  fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2058 		   reg_names[REGNO (op1)]);
2059 	else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2060 	  fprintf (file, "*%s++(%d)", reg_names[REGNO (op0)],
2061 		   INTVAL (op1));
2062 	else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2063 	  fprintf (file, "*%s--(%d)", reg_names[REGNO (op0)],
2064 		   -INTVAL (op1));
2065 	else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2066 	  fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2067 		   reg_names[REGNO (op1)]);
2068 	else
2069 	  fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2070       }
2071       break;
2072 
2073     case PRE_MODIFY:
2074       {
2075 	rtx op0 = XEXP (XEXP (addr, 1), 0);
2076 	rtx op1 = XEXP (XEXP (addr, 1), 1);
2077 
2078 	if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2079 	  fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2080 		   reg_names[REGNO (op1)]);
2081 	else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2082 	  fprintf (file, "*++%s(%d)", reg_names[REGNO (op0)],
2083 		   INTVAL (op1));
2084 	else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2085 	  fprintf (file, "*--%s(%d)", reg_names[REGNO (op0)],
2086 		   -INTVAL (op1));
2087 	else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2088 	  fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2089 		   reg_names[REGNO (op1)]);
2090 	else
2091 	  fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2092       }
2093       break;
2094 
2095     case PRE_INC:
2096       fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2097       break;
2098 
2099     case POST_DEC:
2100       fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2101       break;
2102 
2103     case PLUS:			/* Indirect with displacement.  */
2104       {
2105 	rtx op0 = XEXP (addr, 0);
2106 	rtx op1 = XEXP (addr, 1);
2107 
2108 	if (REG_P (op0))
2109 	  {
2110 	    if (REG_P (op1))
2111 	      {
2112 		if (IS_INDEX_REG (op0))
2113 		  {
2114 		    fprintf (file, "*+%s(%s)",
2115 			     reg_names[REGNO (op1)],
2116 			     reg_names[REGNO (op0)]);	/* Index + base.  */
2117 		  }
2118 		else
2119 		  {
2120 		    fprintf (file, "*+%s(%s)",
2121 			     reg_names[REGNO (op0)],
2122 			     reg_names[REGNO (op1)]);	/* Base + index.  */
2123 		  }
2124 	      }
2125 	    else if (INTVAL (op1) < 0)
2126 	      {
2127 		fprintf (file, "*-%s(%d)",
2128 			 reg_names[REGNO (op0)],
2129 			 -INTVAL (op1));	/* Base - displacement.  */
2130 	      }
2131 	    else
2132 	      {
2133 		fprintf (file, "*+%s(%d)",
2134 			 reg_names[REGNO (op0)],
2135 			 INTVAL (op1));	/* Base + displacement.  */
2136 	      }
2137 	  }
2138 	else
2139           fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2140       }
2141       break;
2142 
2143     case LO_SUM:
2144       {
2145 	rtx op0 = XEXP (addr, 0);
2146 	rtx op1 = XEXP (addr, 1);
2147 
2148 	if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2149 	  c4x_print_operand_address (file, op1);
2150 	else
2151           fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2152       }
2153       break;
2154 
2155     case CONST:
2156     case SYMBOL_REF:
2157     case LABEL_REF:
2158       fprintf (file, "@");
2159       output_addr_const (file, addr);
2160       break;
2161 
2162       /* We shouldn't access CONST_INT addresses.  */
2163     case CONST_INT:
2164 
2165     default:
2166       fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2167       break;
2168     }
2169 }
2170 
2171 
2172 /* Return nonzero if the floating point operand will fit
2173    in the immediate field.  */
2174 
2175 static int
c4x_immed_float_p(op)2176 c4x_immed_float_p (op)
2177      rtx op;
2178 {
2179   long convval[2];
2180   int exponent;
2181   REAL_VALUE_TYPE r;
2182 
2183   REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2184   if (GET_MODE (op) == HFmode)
2185     REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2186   else
2187     {
2188       REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2189       convval[1] = 0;
2190     }
2191 
2192   /* Sign extend exponent.  */
2193   exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2194   if (exponent == -128)
2195     return 1;			/* 0.0  */
2196   if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2197     return 0;			/* Precision doesn't fit.  */
2198   return (exponent <= 7)	/* Positive exp.  */
2199     && (exponent >= -7);	/* Negative exp.  */
2200 }
2201 
2202 
2203 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2204    CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2205 
2206    None of the last four instructions from the bottom of the block can
2207    be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2208    BcondAT or RETIcondD.
2209 
2210    This routine scans the four previous insns for a jump insn, and if
2211    one is found, returns 1 so that we bung in a nop instruction.
2212    This simple minded strategy will add a nop, when it may not
2213    be required.  Say when there is a JUMP_INSN near the end of the
2214    block that doesn't get converted into a delayed branch.
2215 
2216    Note that we cannot have a call insn, since we don't generate
2217    repeat loops with calls in them (although I suppose we could, but
2218    there's no benefit.)
2219 
2220    !!! FIXME.  The rptb_top insn may be sucked into a SEQUENCE.  */
2221 
2222 int
c4x_rptb_nop_p(insn)2223 c4x_rptb_nop_p (insn)
2224      rtx insn;
2225 {
2226   rtx start_label;
2227   int i;
2228 
2229   /* Extract the start label from the jump pattern (rptb_end).  */
2230   start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2231 
2232   /* If there is a label at the end of the loop we must insert
2233      a NOP.  */
2234   do {
2235     insn = previous_insn (insn);
2236   } while (GET_CODE (insn) == NOTE
2237 	   || GET_CODE (insn) == USE
2238 	   || GET_CODE (insn) == CLOBBER);
2239   if (GET_CODE (insn) == CODE_LABEL)
2240     return 1;
2241 
2242   for (i = 0; i < 4; i++)
2243     {
2244       /* Search back for prev non-note and non-label insn.  */
2245       while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2246 	     || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2247 	{
2248 	  if (insn == start_label)
2249 	    return i == 0;
2250 
2251 	  insn = previous_insn (insn);
2252 	};
2253 
2254       /* If we have a jump instruction we should insert a NOP. If we
2255 	 hit repeat block top we should only insert a NOP if the loop
2256 	 is empty.  */
2257       if (GET_CODE (insn) == JUMP_INSN)
2258 	return 1;
2259       insn = previous_insn (insn);
2260     }
2261   return 0;
2262 }
2263 
2264 
2265 /* The C4x looping instruction needs to be emitted at the top of the
2266   loop.  Emitting the true RTL for a looping instruction at the top of
2267   the loop can cause problems with flow analysis.  So instead, a dummy
2268   doloop insn is emitted at the end of the loop.  This routine checks
2269   for the presence of this doloop insn and then searches back to the
2270   top of the loop, where it inserts the true looping insn (provided
2271   there are no instructions in the loop which would cause problems).
2272   Any additional labels can be emitted at this point.  In addition, if
2273   the desired loop count register was not allocated, this routine does
2274   nothing.
2275 
2276   Before we can create a repeat block looping instruction we have to
2277   verify that there are no jumps outside the loop and no jumps outside
2278   the loop go into this loop. This can happen in the basic blocks reorder
2279   pass. The C4x cpu can not handle this.  */
2280 
2281 static int
c4x_label_ref_used_p(x,code_label)2282 c4x_label_ref_used_p (x, code_label)
2283      rtx x, code_label;
2284 {
2285   enum rtx_code code;
2286   int i, j;
2287   const char *fmt;
2288 
2289   if (x == 0)
2290     return 0;
2291 
2292   code = GET_CODE (x);
2293   if (code == LABEL_REF)
2294     return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2295 
2296   fmt = GET_RTX_FORMAT (code);
2297   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2298     {
2299       if (fmt[i] == 'e')
2300 	{
2301           if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2302 	    return 1;
2303 	}
2304       else if (fmt[i] == 'E')
2305         for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2306           if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2307 	    return 1;
2308     }
2309   return 0;
2310 }
2311 
2312 
2313 static int
c4x_rptb_valid_p(insn,start_label)2314 c4x_rptb_valid_p (insn, start_label)
2315      rtx insn, start_label;
2316 {
2317   rtx end = insn;
2318   rtx start;
2319   rtx tmp;
2320 
2321   /* Find the start label.  */
2322   for (; insn; insn = PREV_INSN (insn))
2323     if (insn == start_label)
2324       break;
2325 
2326   /* Note found then we can not use a rptb or rpts.  The label was
2327      probably moved by the basic block reorder pass.  */
2328   if (! insn)
2329     return 0;
2330 
2331   start = insn;
2332   /* If any jump jumps inside this block then we must fail.  */
2333   for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2334     {
2335       if (GET_CODE (insn) == CODE_LABEL)
2336 	{
2337 	  for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2338 	    if (GET_CODE (tmp) == JUMP_INSN
2339                 && c4x_label_ref_used_p (tmp, insn))
2340 	      return 0;
2341         }
2342     }
2343   for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2344     {
2345       if (GET_CODE (insn) == CODE_LABEL)
2346 	{
2347 	  for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2348 	    if (GET_CODE (tmp) == JUMP_INSN
2349                 && c4x_label_ref_used_p (tmp, insn))
2350 	      return 0;
2351         }
2352     }
2353   /* If any jump jumps outside this block then we must fail.  */
2354   for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2355     {
2356       if (GET_CODE (insn) == CODE_LABEL)
2357 	{
2358 	  for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2359 	    if (GET_CODE (tmp) == JUMP_INSN
2360                 && c4x_label_ref_used_p (tmp, insn))
2361 	      return 0;
2362 	  for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2363 	    if (GET_CODE (tmp) == JUMP_INSN
2364                 && c4x_label_ref_used_p (tmp, insn))
2365 	      return 0;
2366         }
2367     }
2368 
2369   /* All checks OK.  */
2370   return 1;
2371 }
2372 
2373 
2374 void
c4x_rptb_insert(insn)2375 c4x_rptb_insert (insn)
2376      rtx insn;
2377 {
2378   rtx end_label;
2379   rtx start_label;
2380   rtx new_start_label;
2381   rtx count_reg;
2382 
2383   /* If the count register has not been allocated to RC, say if
2384      there is a movstr pattern in the loop, then do not insert a
2385      RPTB instruction.  Instead we emit a decrement and branch
2386      at the end of the loop.  */
2387   count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2388   if (REGNO (count_reg) != RC_REGNO)
2389     return;
2390 
2391   /* Extract the start label from the jump pattern (rptb_end).  */
2392   start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2393 
2394   if (! c4x_rptb_valid_p (insn, start_label))
2395     {
2396       /* We can not use the rptb insn.  Replace it so reorg can use
2397          the delay slots of the jump insn.  */
2398       emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2399       emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2400       emit_insn_before (gen_bge (start_label), insn);
2401       LABEL_NUSES (start_label)++;
2402       delete_insn (insn);
2403       return;
2404     }
2405 
2406   end_label = gen_label_rtx ();
2407   LABEL_NUSES (end_label)++;
2408   emit_label_after (end_label, insn);
2409 
2410   new_start_label = gen_label_rtx ();
2411   LABEL_NUSES (new_start_label)++;
2412 
2413   for (; insn; insn = PREV_INSN (insn))
2414     {
2415       if (insn == start_label)
2416 	 break;
2417       if (GET_CODE (insn) == JUMP_INSN &&
2418 	  JUMP_LABEL (insn) == start_label)
2419 	redirect_jump (insn, new_start_label, 0);
2420     }
2421   if (! insn)
2422     fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2423 
2424   emit_label_after (new_start_label, insn);
2425 
2426   if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2427     emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2428   else
2429     emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2430   if (LABEL_NUSES (start_label) == 0)
2431     delete_insn (start_label);
2432 }
2433 
2434 
2435 /* This function is a C4x special called immediately before delayed
2436    branch scheduling.  We fix up RTPB style loops that didn't get RC
2437    allocated as the loop counter.  */
2438 
2439 void
c4x_process_after_reload(first)2440 c4x_process_after_reload (first)
2441      rtx first;
2442 {
2443   rtx insn;
2444 
2445   for (insn = first; insn; insn = NEXT_INSN (insn))
2446     {
2447       /* Look for insn.  */
2448       if (INSN_P (insn))
2449 	{
2450 	  int insn_code_number;
2451 	  rtx old;
2452 
2453 	  insn_code_number = recog_memoized (insn);
2454 
2455 	  if (insn_code_number < 0)
2456 	    continue;
2457 
2458 	  /* Insert the RTX for RPTB at the top of the loop
2459 	     and a label at the end of the loop.  */
2460 	  if (insn_code_number == CODE_FOR_rptb_end)
2461 	    c4x_rptb_insert(insn);
2462 
2463 	  /* We need to split the insn here. Otherwise the calls to
2464 	     force_const_mem will not work for load_immed_address.  */
2465 	  old = insn;
2466 
2467 	  /* Don't split the insn if it has been deleted.  */
2468 	  if (! INSN_DELETED_P (old))
2469 	    insn = try_split (PATTERN(old), old, 1);
2470 
2471 	  /* When not optimizing, the old insn will be still left around
2472 	     with only the 'deleted' bit set.  Transform it into a note
2473 	     to avoid confusion of subsequent processing.  */
2474 	  if (INSN_DELETED_P (old))
2475 	    {
2476 	      PUT_CODE (old, NOTE);
2477 	      NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2478 	      NOTE_SOURCE_FILE (old) = 0;
2479 	    }
2480 	}
2481     }
2482 }
2483 
2484 
2485 static int
c4x_a_register(op)2486 c4x_a_register (op)
2487      rtx op;
2488 {
2489   return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2490 }
2491 
2492 
2493 static int
c4x_x_register(op)2494 c4x_x_register (op)
2495      rtx op;
2496 {
2497   return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2498 }
2499 
2500 
2501 static int
c4x_immed_int_constant(op)2502 c4x_immed_int_constant (op)
2503      rtx op;
2504 {
2505   if (GET_CODE (op) != CONST_INT)
2506     return 0;
2507 
2508   return GET_MODE (op) == VOIDmode
2509     || GET_MODE_CLASS (op) == MODE_INT
2510     || GET_MODE_CLASS (op) == MODE_PARTIAL_INT;
2511 }
2512 
2513 
2514 static int
c4x_immed_float_constant(op)2515 c4x_immed_float_constant (op)
2516      rtx op;
2517 {
2518   if (GET_CODE (op) != CONST_DOUBLE)
2519     return 0;
2520 
2521   /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2522      present this only means that a MEM rtx has been generated. It does
2523      not mean the rtx is really in memory.  */
2524 
2525   return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2526 }
2527 
2528 
2529 int
c4x_shiftable_constant(op)2530 c4x_shiftable_constant (op)
2531      rtx op;
2532 {
2533   int i;
2534   int mask;
2535   int val = INTVAL (op);
2536 
2537   for (i = 0; i < 16; i++)
2538     {
2539       if (val & (1 << i))
2540 	break;
2541     }
2542   mask = ((0xffff >> i) << 16) | 0xffff;
2543   if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2544 				      : (val >> i) & mask))
2545     return i;
2546   return -1;
2547 }
2548 
2549 
2550 int
c4x_H_constant(op)2551 c4x_H_constant (op)
2552      rtx op;
2553 {
2554   return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2555 }
2556 
2557 
2558 int
c4x_I_constant(op)2559 c4x_I_constant (op)
2560      rtx op;
2561 {
2562   return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2563 }
2564 
2565 
2566 int
c4x_J_constant(op)2567 c4x_J_constant (op)
2568      rtx op;
2569 {
2570   if (TARGET_C3X)
2571     return 0;
2572   return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2573 }
2574 
2575 
2576 static int
c4x_K_constant(op)2577 c4x_K_constant (op)
2578      rtx op;
2579 {
2580   if (TARGET_C3X || ! c4x_immed_int_constant (op))
2581     return 0;
2582   return IS_INT5_CONST (INTVAL (op));
2583 }
2584 
2585 
2586 int
c4x_L_constant(op)2587 c4x_L_constant (op)
2588      rtx op;
2589 {
2590   return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2591 }
2592 
2593 
2594 static int
c4x_N_constant(op)2595 c4x_N_constant (op)
2596      rtx op;
2597 {
2598   return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2599 }
2600 
2601 
2602 static int
c4x_O_constant(op)2603 c4x_O_constant (op)
2604      rtx op;
2605 {
2606   return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2607 }
2608 
2609 
2610 /* The constraints do not have to check the register class,
2611    except when needed to discriminate between the constraints.
2612    The operand has been checked by the predicates to be valid.  */
2613 
2614 /* ARx + 9-bit signed const or IRn
2615    *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2616    We don't include the pre/post inc/dec forms here since
2617    they are handled by the <> constraints.  */
2618 
2619 int
c4x_Q_constraint(op)2620 c4x_Q_constraint (op)
2621      rtx op;
2622 {
2623   enum machine_mode mode = GET_MODE (op);
2624 
2625   if (GET_CODE (op) != MEM)
2626     return 0;
2627   op = XEXP (op, 0);
2628   switch (GET_CODE (op))
2629     {
2630     case REG:
2631       return 1;
2632 
2633     case PLUS:
2634       {
2635 	rtx op0 = XEXP (op, 0);
2636 	rtx op1 = XEXP (op, 1);
2637 
2638 	if (! REG_P (op0))
2639 	  return 0;
2640 
2641 	if (REG_P (op1))
2642 	  return 1;
2643 
2644 	if (GET_CODE (op1) != CONST_INT)
2645 	  return 0;
2646 
2647 	/* HImode and HFmode must be offsettable.  */
2648 	if (mode == HImode || mode == HFmode)
2649 	  return IS_DISP8_OFF_CONST (INTVAL (op1));
2650 
2651 	return IS_DISP8_CONST (INTVAL (op1));
2652       }
2653       break;
2654 
2655     default:
2656       break;
2657     }
2658   return 0;
2659 }
2660 
2661 
2662 /* ARx + 5-bit unsigned const
2663    *ARx, *+ARx(n) for n < 32.  */
2664 
2665 int
c4x_R_constraint(op)2666 c4x_R_constraint (op)
2667      rtx op;
2668 {
2669   enum machine_mode mode = GET_MODE (op);
2670 
2671   if (TARGET_C3X)
2672     return 0;
2673   if (GET_CODE (op) != MEM)
2674     return 0;
2675   op = XEXP (op, 0);
2676   switch (GET_CODE (op))
2677     {
2678     case REG:
2679       return 1;
2680 
2681     case PLUS:
2682       {
2683 	rtx op0 = XEXP (op, 0);
2684 	rtx op1 = XEXP (op, 1);
2685 
2686 	if (! REG_P (op0))
2687 	  return 0;
2688 
2689 	if (GET_CODE (op1) != CONST_INT)
2690 	  return 0;
2691 
2692 	/* HImode and HFmode must be offsettable.  */
2693 	if (mode == HImode || mode == HFmode)
2694 	  return IS_UINT5_CONST (INTVAL (op1) + 1);
2695 
2696 	return IS_UINT5_CONST (INTVAL (op1));
2697       }
2698       break;
2699 
2700     default:
2701       break;
2702     }
2703   return 0;
2704 }
2705 
2706 
2707 static int
c4x_R_indirect(op)2708 c4x_R_indirect (op)
2709      rtx op;
2710 {
2711   enum machine_mode mode = GET_MODE (op);
2712 
2713   if (TARGET_C3X || GET_CODE (op) != MEM)
2714     return 0;
2715 
2716   op = XEXP (op, 0);
2717   switch (GET_CODE (op))
2718     {
2719     case REG:
2720       return IS_ADDR_OR_PSEUDO_REG (op);
2721 
2722     case PLUS:
2723       {
2724 	rtx op0 = XEXP (op, 0);
2725 	rtx op1 = XEXP (op, 1);
2726 
2727 	/* HImode and HFmode must be offsettable.  */
2728 	if (mode == HImode || mode == HFmode)
2729 	  return IS_ADDR_OR_PSEUDO_REG (op0)
2730 	    && GET_CODE (op1) == CONST_INT
2731 	    && IS_UINT5_CONST (INTVAL (op1) + 1);
2732 
2733 	return REG_P (op0)
2734 	  && IS_ADDR_OR_PSEUDO_REG (op0)
2735 	  && GET_CODE (op1) == CONST_INT
2736 	  && IS_UINT5_CONST (INTVAL (op1));
2737       }
2738       break;
2739 
2740     default:
2741       break;
2742     }
2743   return 0;
2744 }
2745 
2746 
2747 /* ARx + 1-bit unsigned const or IRn
2748    *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2749    We don't include the pre/post inc/dec forms here since
2750    they are handled by the <> constraints.  */
2751 
2752 int
c4x_S_constraint(op)2753 c4x_S_constraint (op)
2754      rtx op;
2755 {
2756   enum machine_mode mode = GET_MODE (op);
2757   if (GET_CODE (op) != MEM)
2758     return 0;
2759   op = XEXP (op, 0);
2760   switch (GET_CODE (op))
2761     {
2762     case REG:
2763       return 1;
2764 
2765     case PRE_MODIFY:
2766     case POST_MODIFY:
2767       {
2768 	rtx op0 = XEXP (op, 0);
2769 	rtx op1 = XEXP (op, 1);
2770 
2771 	if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2772 	    || (op0 != XEXP (op1, 0)))
2773 	  return 0;
2774 
2775 	op0 = XEXP (op1, 0);
2776 	op1 = XEXP (op1, 1);
2777 	return REG_P (op0) && REG_P (op1);
2778 	/* Pre or post_modify with a displacement of 0 or 1
2779 	   should not be generated.  */
2780       }
2781       break;
2782 
2783     case PLUS:
2784       {
2785 	rtx op0 = XEXP (op, 0);
2786 	rtx op1 = XEXP (op, 1);
2787 
2788 	if (!REG_P (op0))
2789 	  return 0;
2790 
2791 	if (REG_P (op1))
2792 	  return 1;
2793 
2794 	if (GET_CODE (op1) != CONST_INT)
2795 	  return 0;
2796 
2797 	/* HImode and HFmode must be offsettable.  */
2798 	if (mode == HImode || mode == HFmode)
2799 	  return IS_DISP1_OFF_CONST (INTVAL (op1));
2800 
2801 	return IS_DISP1_CONST (INTVAL (op1));
2802       }
2803       break;
2804 
2805     default:
2806       break;
2807     }
2808   return 0;
2809 }
2810 
2811 
2812 static int
c4x_S_indirect(op)2813 c4x_S_indirect (op)
2814      rtx op;
2815 {
2816   enum machine_mode mode = GET_MODE (op);
2817   if (GET_CODE (op) != MEM)
2818     return 0;
2819 
2820   op = XEXP (op, 0);
2821   switch (GET_CODE (op))
2822     {
2823     case PRE_DEC:
2824     case POST_DEC:
2825       if (mode != QImode && mode != QFmode)
2826 	return 0;
2827     case PRE_INC:
2828     case POST_INC:
2829       op = XEXP (op, 0);
2830 
2831     case REG:
2832       return IS_ADDR_OR_PSEUDO_REG (op);
2833 
2834     case PRE_MODIFY:
2835     case POST_MODIFY:
2836       {
2837 	rtx op0 = XEXP (op, 0);
2838 	rtx op1 = XEXP (op, 1);
2839 
2840 	if (mode != QImode && mode != QFmode)
2841 	  return 0;
2842 
2843 	if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2844 	    || (op0 != XEXP (op1, 0)))
2845 	  return 0;
2846 
2847 	op0 = XEXP (op1, 0);
2848 	op1 = XEXP (op1, 1);
2849 	return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2850 	  && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2851 	/* Pre or post_modify with a displacement of 0 or 1
2852 	   should not be generated.  */
2853       }
2854 
2855     case PLUS:
2856       {
2857 	rtx op0 = XEXP (op, 0);
2858 	rtx op1 = XEXP (op, 1);
2859 
2860 	if (REG_P (op0))
2861 	  {
2862 	    /* HImode and HFmode must be offsettable.  */
2863 	    if (mode == HImode || mode == HFmode)
2864 	      return IS_ADDR_OR_PSEUDO_REG (op0)
2865 		&& GET_CODE (op1) == CONST_INT
2866 		&& IS_DISP1_OFF_CONST (INTVAL (op1));
2867 
2868 	    if (REG_P (op1))
2869 	      return (IS_INDEX_OR_PSEUDO_REG (op1)
2870 		      && IS_ADDR_OR_PSEUDO_REG (op0))
2871 		|| (IS_ADDR_OR_PSEUDO_REG (op1)
2872 		    && IS_INDEX_OR_PSEUDO_REG (op0));
2873 
2874 	    return IS_ADDR_OR_PSEUDO_REG (op0)
2875 	      && GET_CODE (op1) == CONST_INT
2876 	      && IS_DISP1_CONST (INTVAL (op1));
2877 	  }
2878       }
2879       break;
2880 
2881     default:
2882       break;
2883     }
2884   return 0;
2885 }
2886 
2887 
2888 /* Direct memory operand.  */
2889 
2890 int
c4x_T_constraint(op)2891 c4x_T_constraint (op)
2892      rtx op;
2893 {
2894   if (GET_CODE (op) != MEM)
2895     return 0;
2896   op = XEXP (op, 0);
2897 
2898   if (GET_CODE (op) != LO_SUM)
2899     {
2900       /* Allow call operands.  */
2901       return GET_CODE (op) == SYMBOL_REF
2902 	&& GET_MODE (op) == Pmode
2903 	&& SYMBOL_REF_FLAG (op);
2904     }
2905 
2906   /* HImode and HFmode are not offsettable.  */
2907   if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2908     return 0;
2909 
2910   if ((GET_CODE (XEXP (op, 0)) == REG)
2911       && (REGNO (XEXP (op, 0)) == DP_REGNO))
2912     return c4x_U_constraint (XEXP (op, 1));
2913 
2914   return 0;
2915 }
2916 
2917 
2918 /* Symbolic operand.  */
2919 
2920 int
c4x_U_constraint(op)2921 c4x_U_constraint (op)
2922      rtx op;
2923 {
2924   /* Don't allow direct addressing to an arbitrary constant.  */
2925   return GET_CODE (op) == CONST
2926 	 || GET_CODE (op) == SYMBOL_REF
2927 	 || GET_CODE (op) == LABEL_REF;
2928 }
2929 
2930 
2931 int
c4x_autoinc_operand(op,mode)2932 c4x_autoinc_operand (op, mode)
2933      rtx op;
2934      enum machine_mode mode ATTRIBUTE_UNUSED;
2935 {
2936   if (GET_CODE (op) == MEM)
2937     {
2938       enum rtx_code code = GET_CODE (XEXP (op, 0));
2939 
2940       if (code == PRE_INC
2941 	  || code == PRE_DEC
2942 	  || code == POST_INC
2943 	  || code == POST_DEC
2944 	  || code == PRE_MODIFY
2945 	  || code == POST_MODIFY
2946 	  )
2947 	return 1;
2948     }
2949   return 0;
2950 }
2951 
2952 
2953 /* Match any operand.  */
2954 
2955 int
any_operand(op,mode)2956 any_operand (op, mode)
2957      register rtx op ATTRIBUTE_UNUSED;
2958      enum machine_mode mode ATTRIBUTE_UNUSED;
2959 {
2960   return 1;
2961 }
2962 
2963 
2964 /* Nonzero if OP is a floating point value with value 0.0.  */
2965 
2966 int
fp_zero_operand(op,mode)2967 fp_zero_operand (op, mode)
2968      rtx op;
2969      enum machine_mode mode ATTRIBUTE_UNUSED;
2970 {
2971   REAL_VALUE_TYPE r;
2972 
2973   if (GET_CODE (op) != CONST_DOUBLE)
2974     return 0;
2975   REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2976   return REAL_VALUES_EQUAL (r, dconst0);
2977 }
2978 
2979 
2980 int
const_operand(op,mode)2981 const_operand (op, mode)
2982      register rtx op;
2983      register enum machine_mode mode;
2984 {
2985   switch (mode)
2986     {
2987     case QFmode:
2988     case HFmode:
2989       if (GET_CODE (op) != CONST_DOUBLE
2990 	  || GET_MODE (op) != mode
2991 	  || GET_MODE_CLASS (mode) != MODE_FLOAT)
2992 	return 0;
2993 
2994       return c4x_immed_float_p (op);
2995 
2996 #if Pmode != QImode
2997     case Pmode:
2998 #endif
2999     case QImode:
3000       if (GET_CODE (op) == CONSTANT_P_RTX)
3001 	return 1;
3002 
3003       if (GET_CODE (op) != CONST_INT
3004 	  || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
3005 	  || GET_MODE_CLASS (mode) != MODE_INT)
3006 	return 0;
3007 
3008       return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
3009 
3010     case HImode:
3011       return 0;
3012 
3013     default:
3014       return 0;
3015     }
3016 }
3017 
3018 
3019 int
stik_const_operand(op,mode)3020 stik_const_operand (op, mode)
3021      rtx op;
3022      enum machine_mode mode ATTRIBUTE_UNUSED;
3023 {
3024   return c4x_K_constant (op);
3025 }
3026 
3027 
3028 int
not_const_operand(op,mode)3029 not_const_operand (op, mode)
3030      rtx op;
3031      enum machine_mode mode ATTRIBUTE_UNUSED;
3032 {
3033   return c4x_N_constant (op);
3034 }
3035 
3036 
3037 int
reg_operand(op,mode)3038 reg_operand (op, mode)
3039      rtx op;
3040      enum machine_mode mode;
3041 {
3042   if (GET_CODE (op) == SUBREG
3043       && GET_MODE (op) == QFmode)
3044     return 0;
3045   return register_operand (op, mode);
3046 }
3047 
3048 
3049 int
mixed_subreg_operand(op,mode)3050 mixed_subreg_operand (op, mode)
3051      rtx op;
3052      enum machine_mode mode ATTRIBUTE_UNUSED;
3053 {
3054   /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3055      int and a long double.  */
3056   if (GET_CODE (op) == SUBREG
3057       && (GET_MODE (op) == QFmode)
3058       && (GET_MODE (SUBREG_REG (op)) == QImode
3059 	  || GET_MODE (SUBREG_REG (op)) == HImode))
3060     return 1;
3061   return 0;
3062 }
3063 
3064 
3065 int
reg_imm_operand(op,mode)3066 reg_imm_operand (op, mode)
3067      rtx op;
3068      enum machine_mode mode ATTRIBUTE_UNUSED;
3069 {
3070   if (REG_P (op) || CONSTANT_P (op))
3071     return 1;
3072   return 0;
3073 }
3074 
3075 
3076 int
not_modify_reg(op,mode)3077 not_modify_reg (op, mode)
3078      rtx op;
3079      enum machine_mode mode ATTRIBUTE_UNUSED;
3080 {
3081   if (REG_P (op) || CONSTANT_P (op))
3082     return 1;
3083   if (GET_CODE (op) != MEM)
3084     return 0;
3085   op = XEXP (op, 0);
3086   switch (GET_CODE (op))
3087     {
3088     case REG:
3089       return 1;
3090 
3091     case PLUS:
3092       {
3093 	rtx op0 = XEXP (op, 0);
3094 	rtx op1 = XEXP (op, 1);
3095 
3096 	if (! REG_P (op0))
3097 	  return 0;
3098 
3099 	if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3100 	  return 1;
3101       }
3102 
3103     case LO_SUM:
3104       {
3105 	rtx op0 = XEXP (op, 0);
3106 
3107 	if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3108 	  return 1;
3109       }
3110       break;
3111 
3112     case CONST:
3113     case SYMBOL_REF:
3114     case LABEL_REF:
3115       return 1;
3116 
3117     default:
3118       break;
3119     }
3120   return 0;
3121 }
3122 
3123 
3124 int
not_rc_reg(op,mode)3125 not_rc_reg (op, mode)
3126      rtx op;
3127      enum machine_mode mode ATTRIBUTE_UNUSED;
3128 {
3129   if (REG_P (op) && REGNO (op) == RC_REGNO)
3130     return 0;
3131   return 1;
3132 }
3133 
3134 
3135 /* Extended precision register R0-R1.  */
3136 
3137 int
r0r1_reg_operand(op,mode)3138 r0r1_reg_operand (op, mode)
3139      rtx op;
3140      enum machine_mode mode;
3141 {
3142   if (! reg_operand (op, mode))
3143     return 0;
3144   if (GET_CODE (op) == SUBREG)
3145     op = SUBREG_REG (op);
3146   return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3147 }
3148 
3149 
3150 /* Extended precision register R2-R3.  */
3151 
3152 int
r2r3_reg_operand(op,mode)3153 r2r3_reg_operand (op, mode)
3154      rtx op;
3155      enum machine_mode mode;
3156 {
3157   if (! reg_operand (op, mode))
3158     return 0;
3159   if (GET_CODE (op) == SUBREG)
3160     op = SUBREG_REG (op);
3161   return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3162 }
3163 
3164 
3165 /* Low extended precision register R0-R7.  */
3166 
3167 int
ext_low_reg_operand(op,mode)3168 ext_low_reg_operand (op, mode)
3169      rtx op;
3170      enum machine_mode mode;
3171 {
3172   if (! reg_operand (op, mode))
3173     return 0;
3174   if (GET_CODE (op) == SUBREG)
3175     op = SUBREG_REG (op);
3176   return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3177 }
3178 
3179 
3180 /* Extended precision register.  */
3181 
3182 int
ext_reg_operand(op,mode)3183 ext_reg_operand (op, mode)
3184      rtx op;
3185      enum machine_mode mode;
3186 {
3187   if (! reg_operand (op, mode))
3188     return 0;
3189   if (GET_CODE (op) == SUBREG)
3190     op = SUBREG_REG (op);
3191   if (! REG_P (op))
3192     return 0;
3193   return IS_EXT_OR_PSEUDO_REG (op);
3194 }
3195 
3196 
3197 /* Standard precision register.  */
3198 
3199 int
std_reg_operand(op,mode)3200 std_reg_operand (op, mode)
3201      rtx op;
3202      enum machine_mode mode;
3203 {
3204   if (! reg_operand (op, mode))
3205     return 0;
3206   if (GET_CODE (op) == SUBREG)
3207     op = SUBREG_REG (op);
3208   return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3209 }
3210 
3211 /* Standard precision or normal register.  */
3212 
3213 int
std_or_reg_operand(op,mode)3214 std_or_reg_operand (op, mode)
3215      rtx op;
3216      enum machine_mode mode;
3217 {
3218   if (reload_in_progress)
3219     return std_reg_operand (op, mode);
3220   return reg_operand (op, mode);
3221 }
3222 
3223 /* Address register.  */
3224 
3225 int
addr_reg_operand(op,mode)3226 addr_reg_operand (op, mode)
3227      rtx op;
3228      enum machine_mode mode;
3229 {
3230   if (! reg_operand (op, mode))
3231     return 0;
3232   return c4x_a_register (op);
3233 }
3234 
3235 
3236 /* Index register.  */
3237 
3238 int
index_reg_operand(op,mode)3239 index_reg_operand (op, mode)
3240      rtx op;
3241      enum machine_mode mode;
3242 {
3243   if (! reg_operand (op, mode))
3244     return 0;
3245   if (GET_CODE (op) == SUBREG)
3246     op = SUBREG_REG (op);
3247   return c4x_x_register (op);
3248 }
3249 
3250 
3251 /* DP register.  */
3252 
3253 int
dp_reg_operand(op,mode)3254 dp_reg_operand (op, mode)
3255      rtx op;
3256      enum machine_mode mode ATTRIBUTE_UNUSED;
3257 {
3258   return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3259 }
3260 
3261 
3262 /* SP register.  */
3263 
3264 int
sp_reg_operand(op,mode)3265 sp_reg_operand (op, mode)
3266      rtx op;
3267      enum machine_mode mode ATTRIBUTE_UNUSED;
3268 {
3269   return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3270 }
3271 
3272 
3273 /* ST register.  */
3274 
3275 int
st_reg_operand(op,mode)3276 st_reg_operand (op, mode)
3277      register rtx op;
3278      enum machine_mode mode ATTRIBUTE_UNUSED;
3279 {
3280   return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3281 }
3282 
3283 
3284 /* RC register.  */
3285 
3286 int
rc_reg_operand(op,mode)3287 rc_reg_operand (op, mode)
3288      register rtx op;
3289      enum machine_mode mode ATTRIBUTE_UNUSED;
3290 {
3291   return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3292 }
3293 
3294 
3295 int
call_address_operand(op,mode)3296 call_address_operand (op, mode)
3297      rtx op;
3298      enum machine_mode mode ATTRIBUTE_UNUSED;
3299 {
3300   return (REG_P (op) || symbolic_address_operand (op, mode));
3301 }
3302 
3303 
3304 /* Symbolic address operand.  */
3305 
3306 int
symbolic_address_operand(op,mode)3307 symbolic_address_operand (op, mode)
3308      register rtx op;
3309      enum machine_mode mode ATTRIBUTE_UNUSED;
3310 {
3311   switch (GET_CODE (op))
3312     {
3313     case CONST:
3314     case SYMBOL_REF:
3315     case LABEL_REF:
3316       return 1;
3317     default:
3318       return 0;
3319     }
3320 }
3321 
3322 
3323 /* Check dst operand of a move instruction.  */
3324 
3325 int
dst_operand(op,mode)3326 dst_operand (op, mode)
3327      rtx op;
3328      enum machine_mode mode;
3329 {
3330   if (GET_CODE (op) == SUBREG
3331       && mixed_subreg_operand (op, mode))
3332     return 0;
3333 
3334   if (REG_P (op))
3335     return reg_operand (op, mode);
3336 
3337   return nonimmediate_operand (op, mode);
3338 }
3339 
3340 
3341 /* Check src operand of two operand arithmetic instructions.  */
3342 
3343 int
src_operand(op,mode)3344 src_operand (op, mode)
3345      rtx op;
3346      enum machine_mode mode;
3347 {
3348   if (GET_CODE (op) == SUBREG
3349       && mixed_subreg_operand (op, mode))
3350     return 0;
3351 
3352   if (REG_P (op))
3353     return reg_operand (op, mode);
3354 
3355   if (mode == VOIDmode)
3356     mode = GET_MODE (op);
3357 
3358   if (GET_CODE (op) == CONST_INT)
3359     return (mode == QImode || mode == Pmode || mode == HImode)
3360       && c4x_I_constant (op);
3361 
3362   /* We don't like CONST_DOUBLE integers.  */
3363   if (GET_CODE (op) == CONST_DOUBLE)
3364     return c4x_H_constant (op);
3365 
3366   /* Disallow symbolic addresses.  Only the predicate
3367      symbolic_address_operand will match these.  */
3368   if (GET_CODE (op) == SYMBOL_REF
3369       || GET_CODE (op) == LABEL_REF
3370       || GET_CODE (op) == CONST)
3371     return 0;
3372 
3373   /* If TARGET_LOAD_DIRECT_MEMS is nonzero, disallow direct memory
3374      access to symbolic addresses.  These operands will get forced
3375      into a register and the movqi expander will generate a
3376      HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is nonzero.  */
3377   if (GET_CODE (op) == MEM
3378       && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3379 	   || GET_CODE (XEXP (op, 0)) == LABEL_REF
3380 	   || GET_CODE (XEXP (op, 0)) == CONST)))
3381     return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3382 
3383   return general_operand (op, mode);
3384 }
3385 
3386 
3387 int
src_hi_operand(op,mode)3388 src_hi_operand (op, mode)
3389      rtx op;
3390      enum machine_mode mode;
3391 {
3392   if (c4x_O_constant (op))
3393     return 1;
3394   return src_operand (op, mode);
3395 }
3396 
3397 
3398 /* Check src operand of two operand logical instructions.  */
3399 
3400 int
lsrc_operand(op,mode)3401 lsrc_operand (op, mode)
3402      rtx op;
3403      enum machine_mode mode;
3404 {
3405   if (mode == VOIDmode)
3406     mode = GET_MODE (op);
3407 
3408   if (mode != QImode && mode != Pmode)
3409     fatal_insn ("mode not QImode", op);
3410 
3411   if (GET_CODE (op) == CONST_INT)
3412     return c4x_L_constant (op) || c4x_J_constant (op);
3413 
3414   return src_operand (op, mode);
3415 }
3416 
3417 
3418 /* Check src operand of two operand tricky instructions.  */
3419 
3420 int
tsrc_operand(op,mode)3421 tsrc_operand (op, mode)
3422      rtx op;
3423      enum machine_mode mode;
3424 {
3425   if (mode == VOIDmode)
3426     mode = GET_MODE (op);
3427 
3428   if (mode != QImode && mode != Pmode)
3429     fatal_insn ("mode not QImode", op);
3430 
3431   if (GET_CODE (op) == CONST_INT)
3432     return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3433 
3434   return src_operand (op, mode);
3435 }
3436 
3437 
3438 /* Check src operand of two operand non immedidate instructions.  */
3439 
3440 int
nonimmediate_src_operand(op,mode)3441 nonimmediate_src_operand (op, mode)
3442      rtx op;
3443      enum machine_mode mode;
3444 {
3445   if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3446     return 0;
3447 
3448   return src_operand (op, mode);
3449 }
3450 
3451 
3452 /* Check logical src operand of two operand non immedidate instructions.  */
3453 
3454 int
nonimmediate_lsrc_operand(op,mode)3455 nonimmediate_lsrc_operand (op, mode)
3456      rtx op;
3457      enum machine_mode mode;
3458 {
3459   if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3460     return 0;
3461 
3462   return lsrc_operand (op, mode);
3463 }
3464 
3465 
3466 int
reg_or_const_operand(op,mode)3467 reg_or_const_operand (op, mode)
3468      rtx op;
3469      enum machine_mode mode;
3470 {
3471   return reg_operand (op, mode) || const_operand (op, mode);
3472 }
3473 
3474 
3475 /* Check for indirect operands allowable in parallel instruction.  */
3476 
3477 int
par_ind_operand(op,mode)3478 par_ind_operand (op, mode)
3479      rtx op;
3480      enum machine_mode mode;
3481 {
3482   if (mode != VOIDmode && mode != GET_MODE (op))
3483     return 0;
3484 
3485   return c4x_S_indirect (op);
3486 }
3487 
3488 
3489 /* Check for operands allowable in parallel instruction.  */
3490 
3491 int
parallel_operand(op,mode)3492 parallel_operand (op, mode)
3493      rtx op;
3494      enum machine_mode mode;
3495 {
3496   return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3497 }
3498 
3499 
3500 static void
c4x_S_address_parse(op,base,incdec,index,disp)3501 c4x_S_address_parse (op, base, incdec, index, disp)
3502      rtx op;
3503      int *base;
3504      int *incdec;
3505      int *index;
3506      int *disp;
3507 {
3508   *base = 0;
3509   *incdec = 0;
3510   *index = 0;
3511   *disp = 0;
3512 
3513   if (GET_CODE (op) != MEM)
3514     fatal_insn ("invalid indirect memory address", op);
3515 
3516   op = XEXP (op, 0);
3517   switch (GET_CODE (op))
3518     {
3519     case PRE_DEC:
3520       *base = REGNO (XEXP (op, 0));
3521       *incdec = 1;
3522       *disp = -1;
3523       return;
3524 
3525     case POST_DEC:
3526       *base = REGNO (XEXP (op, 0));
3527       *incdec = 1;
3528       *disp = 0;
3529       return;
3530 
3531     case PRE_INC:
3532       *base = REGNO (XEXP (op, 0));
3533       *incdec = 1;
3534       *disp = 1;
3535       return;
3536 
3537     case POST_INC:
3538       *base = REGNO (XEXP (op, 0));
3539       *incdec = 1;
3540       *disp = 0;
3541       return;
3542 
3543     case POST_MODIFY:
3544       *base = REGNO (XEXP (op, 0));
3545       if (REG_P (XEXP (XEXP (op, 1), 1)))
3546 	{
3547 	  *index = REGNO (XEXP (XEXP (op, 1), 1));
3548 	  *disp = 0;		/* ??? */
3549 	}
3550       else
3551 	  *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3552       *incdec = 1;
3553       return;
3554 
3555     case PRE_MODIFY:
3556       *base = REGNO (XEXP (op, 0));
3557       if (REG_P (XEXP (XEXP (op, 1), 1)))
3558 	{
3559 	  *index = REGNO (XEXP (XEXP (op, 1), 1));
3560 	  *disp = 1;		/* ??? */
3561 	}
3562       else
3563 	  *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3564       *incdec = 1;
3565 
3566       return;
3567 
3568     case REG:
3569       *base = REGNO (op);
3570       return;
3571 
3572     case PLUS:
3573       {
3574 	rtx op0 = XEXP (op, 0);
3575 	rtx op1 = XEXP (op, 1);
3576 
3577 	if (c4x_a_register (op0))
3578 	  {
3579 	    if (c4x_x_register (op1))
3580 	      {
3581 		*base = REGNO (op0);
3582 		*index = REGNO (op1);
3583 		return;
3584 	      }
3585 	    else if ((GET_CODE (op1) == CONST_INT
3586 		      && IS_DISP1_CONST (INTVAL (op1))))
3587 	      {
3588 		*base = REGNO (op0);
3589 		*disp = INTVAL (op1);
3590 		return;
3591 	      }
3592 	  }
3593 	else if (c4x_x_register (op0) && c4x_a_register (op1))
3594 	  {
3595 	    *base = REGNO (op1);
3596 	    *index = REGNO (op0);
3597 	    return;
3598 	  }
3599       }
3600       /* Fallthrough.  */
3601 
3602     default:
3603       fatal_insn ("invalid indirect (S) memory address", op);
3604     }
3605 }
3606 
3607 
3608 int
c4x_address_conflict(op0,op1,store0,store1)3609 c4x_address_conflict (op0, op1, store0, store1)
3610      rtx op0;
3611      rtx op1;
3612      int store0;
3613      int store1;
3614 {
3615   int base0;
3616   int base1;
3617   int incdec0;
3618   int incdec1;
3619   int index0;
3620   int index1;
3621   int disp0;
3622   int disp1;
3623 
3624   if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3625     return 1;
3626 
3627   c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3628   c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3629 
3630   if (store0 && store1)
3631     {
3632       /* If we have two stores in parallel to the same address, then
3633 	 the C4x only executes one of the stores.  This is unlikely to
3634 	 cause problems except when writing to a hardware device such
3635 	 as a FIFO since the second write will be lost.  The user
3636 	 should flag the hardware location as being volatile so that
3637 	 we don't do this optimisation.  While it is unlikely that we
3638 	 have an aliased address if both locations are not marked
3639 	 volatile, it is probably safer to flag a potential conflict
3640 	 if either location is volatile.  */
3641       if (! flag_argument_noalias)
3642 	{
3643 	  if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3644 	    return 1;
3645 	}
3646     }
3647 
3648   /* If have a parallel load and a store to the same address, the load
3649      is performed first, so there is no conflict.  Similarly, there is
3650      no conflict if have parallel loads from the same address.  */
3651 
3652   /* Cannot use auto increment or auto decrement twice for same
3653      base register.  */
3654   if (base0 == base1 && incdec0 && incdec0)
3655     return 1;
3656 
3657   /* It might be too confusing for GCC if we have use a base register
3658      with a side effect and a memory reference using the same register
3659      in parallel.  */
3660   if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3661     return 1;
3662 
3663   /* We can not optimize the case where op1 and op2 refer to the same
3664      address.  */
3665   if (base0 == base1 && disp0 == disp1 && index0 == index1)
3666     return 1;
3667 
3668   /* No conflict.  */
3669   return 0;
3670 }
3671 
3672 
3673 /* Check for while loop inside a decrement and branch loop.  */
3674 
3675 int
c4x_label_conflict(insn,jump,db)3676 c4x_label_conflict (insn, jump, db)
3677      rtx insn;
3678      rtx jump;
3679      rtx db;
3680 {
3681   while (insn)
3682     {
3683       if (GET_CODE (insn) == CODE_LABEL)
3684 	{
3685           if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3686 	    return 1;
3687           if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3688 	    return 0;
3689 	}
3690       insn = PREV_INSN (insn);
3691     }
3692   return 1;
3693 }
3694 
3695 
3696 /* Validate combination of operands for parallel load/store instructions.  */
3697 
3698 int
valid_parallel_load_store(operands,mode)3699 valid_parallel_load_store (operands, mode)
3700      rtx *operands;
3701      enum machine_mode mode ATTRIBUTE_UNUSED;
3702 {
3703   rtx op0 = operands[0];
3704   rtx op1 = operands[1];
3705   rtx op2 = operands[2];
3706   rtx op3 = operands[3];
3707 
3708   if (GET_CODE (op0) == SUBREG)
3709     op0 = SUBREG_REG (op0);
3710   if (GET_CODE (op1) == SUBREG)
3711     op1 = SUBREG_REG (op1);
3712   if (GET_CODE (op2) == SUBREG)
3713     op2 = SUBREG_REG (op2);
3714   if (GET_CODE (op3) == SUBREG)
3715     op3 = SUBREG_REG (op3);
3716 
3717   /* The patterns should only allow ext_low_reg_operand() or
3718      par_ind_operand() operands.  Thus of the 4 operands, only 2
3719      should be REGs and the other 2 should be MEMs.  */
3720 
3721   /* This test prevents the multipack pass from using this pattern if
3722      op0 is used as an index or base register in op2 or op3, since
3723      this combination will require reloading.  */
3724   if (GET_CODE (op0) == REG
3725       && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3726 	  || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3727     return 0;
3728 
3729   /* LDI||LDI.  */
3730   if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3731     return (REGNO (op0) != REGNO (op2))
3732       && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3733       && ! c4x_address_conflict (op1, op3, 0, 0);
3734 
3735   /* STI||STI.  */
3736   if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3737     return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3738       && ! c4x_address_conflict (op0, op2, 1, 1);
3739 
3740   /* LDI||STI.  */
3741   if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3742     return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3743       && ! c4x_address_conflict (op1, op2, 0, 1);
3744 
3745   /* STI||LDI.  */
3746   if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3747     return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3748       && ! c4x_address_conflict (op0, op3, 1, 0);
3749 
3750   return 0;
3751 }
3752 
3753 
3754 int
valid_parallel_operands_4(operands,mode)3755 valid_parallel_operands_4 (operands, mode)
3756      rtx *operands;
3757      enum machine_mode mode ATTRIBUTE_UNUSED;
3758 {
3759   rtx op0 = operands[0];
3760   rtx op2 = operands[2];
3761 
3762   if (GET_CODE (op0) == SUBREG)
3763     op0 = SUBREG_REG (op0);
3764   if (GET_CODE (op2) == SUBREG)
3765     op2 = SUBREG_REG (op2);
3766 
3767   /* This test prevents the multipack pass from using this pattern if
3768      op0 is used as an index or base register in op2, since this combination
3769      will require reloading.  */
3770   if (GET_CODE (op0) == REG
3771       && GET_CODE (op2) == MEM
3772       && reg_mentioned_p (op0, XEXP (op2, 0)))
3773     return 0;
3774 
3775   return 1;
3776 }
3777 
3778 
3779 int
valid_parallel_operands_5(operands,mode)3780 valid_parallel_operands_5 (operands, mode)
3781      rtx *operands;
3782      enum machine_mode mode ATTRIBUTE_UNUSED;
3783 {
3784   int regs = 0;
3785   rtx op0 = operands[0];
3786   rtx op1 = operands[1];
3787   rtx op2 = operands[2];
3788   rtx op3 = operands[3];
3789 
3790   if (GET_CODE (op0) == SUBREG)
3791     op0 = SUBREG_REG (op0);
3792   if (GET_CODE (op1) == SUBREG)
3793     op1 = SUBREG_REG (op1);
3794   if (GET_CODE (op2) == SUBREG)
3795     op2 = SUBREG_REG (op2);
3796 
3797   /* The patterns should only allow ext_low_reg_operand() or
3798      par_ind_operand() operands.  Operands 1 and 2 may be commutative
3799      but only one of them can be a register.  */
3800   if (GET_CODE (op1) == REG)
3801     regs++;
3802   if (GET_CODE (op2) == REG)
3803     regs++;
3804 
3805   if (regs != 1)
3806     return 0;
3807 
3808   /* This test prevents the multipack pass from using this pattern if
3809      op0 is used as an index or base register in op3, since this combination
3810      will require reloading.  */
3811   if (GET_CODE (op0) == REG
3812       && GET_CODE (op3) == MEM
3813       && reg_mentioned_p (op0, XEXP (op3, 0)))
3814     return 0;
3815 
3816   return 1;
3817 }
3818 
3819 
3820 int
valid_parallel_operands_6(operands,mode)3821 valid_parallel_operands_6 (operands, mode)
3822      rtx *operands;
3823      enum machine_mode mode ATTRIBUTE_UNUSED;
3824 {
3825   int regs = 0;
3826   rtx op0 = operands[0];
3827   rtx op1 = operands[1];
3828   rtx op2 = operands[2];
3829   rtx op4 = operands[4];
3830   rtx op5 = operands[5];
3831 
3832   if (GET_CODE (op1) == SUBREG)
3833     op1 = SUBREG_REG (op1);
3834   if (GET_CODE (op2) == SUBREG)
3835     op2 = SUBREG_REG (op2);
3836   if (GET_CODE (op4) == SUBREG)
3837     op4 = SUBREG_REG (op4);
3838   if (GET_CODE (op5) == SUBREG)
3839     op5 = SUBREG_REG (op5);
3840 
3841   /* The patterns should only allow ext_low_reg_operand() or
3842      par_ind_operand() operands.  Thus of the 4 input operands, only 2
3843      should be REGs and the other 2 should be MEMs.  */
3844 
3845   if (GET_CODE (op1) == REG)
3846     regs++;
3847   if (GET_CODE (op2) == REG)
3848     regs++;
3849   if (GET_CODE (op4) == REG)
3850     regs++;
3851   if (GET_CODE (op5) == REG)
3852     regs++;
3853 
3854   /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3855      Perhaps we should count the MEMs as well?  */
3856   if (regs != 2)
3857     return 0;
3858 
3859   /* This test prevents the multipack pass from using this pattern if
3860      op0 is used as an index or base register in op4 or op5, since
3861      this combination will require reloading.  */
3862   if (GET_CODE (op0) == REG
3863       && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3864 	  || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3865     return 0;
3866 
3867   return 1;
3868 }
3869 
3870 
3871 /* Validate combination of src operands.  Note that the operands have
3872    been screened by the src_operand predicate.  We just have to check
3873    that the combination of operands is valid.  If FORCE is set, ensure
3874    that the destination regno is valid if we have a 2 operand insn.  */
3875 
3876 static int
c4x_valid_operands(code,operands,mode,force)3877 c4x_valid_operands (code, operands, mode, force)
3878      enum rtx_code code;
3879      rtx *operands;
3880      enum machine_mode mode ATTRIBUTE_UNUSED;
3881      int force;
3882 {
3883   rtx op1;
3884   rtx op2;
3885   enum rtx_code code1;
3886   enum rtx_code code2;
3887 
3888   if (code == COMPARE)
3889     {
3890       op1 = operands[0];
3891       op2 = operands[1];
3892     }
3893   else
3894     {
3895       op1 = operands[1];
3896       op2 = operands[2];
3897     }
3898 
3899   if (GET_CODE (op1) == SUBREG)
3900     op1 = SUBREG_REG (op1);
3901   if (GET_CODE (op2) == SUBREG)
3902     op2 = SUBREG_REG (op2);
3903 
3904   code1 = GET_CODE (op1);
3905   code2 = GET_CODE (op2);
3906 
3907   if (code1 == REG && code2 == REG)
3908     return 1;
3909 
3910   if (code1 == MEM && code2 == MEM)
3911     {
3912       if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3913 	return 1;
3914       return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3915     }
3916 
3917   if (code1 == code2)
3918     return 0;
3919 
3920   if (code1 == REG)
3921     {
3922       switch (code2)
3923 	{
3924 	case CONST_INT:
3925 	  if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3926 	    return 1;
3927 	  break;
3928 
3929 	case CONST_DOUBLE:
3930 	  if (! c4x_H_constant (op2))
3931 	    return 0;
3932 	  break;
3933 
3934 	  /* Any valid memory operand screened by src_operand is OK.  */
3935   	case MEM:
3936 
3937 	  /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3938 	     into a stack slot memory address comprising a PLUS and a
3939 	     constant.  */
3940 	case ADDRESSOF:
3941 	  break;
3942 
3943 	default:
3944 	  fatal_insn ("c4x_valid_operands: Internal error", op2);
3945 	  break;
3946 	}
3947 
3948       /* Check that we have a valid destination register for a two operand
3949 	 instruction.  */
3950       return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
3951     }
3952 
3953   /* We assume MINUS is commutative since the subtract patterns
3954      also support the reverse subtract instructions.  Since op1
3955      is not a register, and op2 is a register, op1 can only
3956      be a restricted memory operand for a shift instruction.  */
3957   if (code == ASHIFTRT || code == LSHIFTRT
3958       || code == ASHIFT || code == COMPARE)
3959     return code2 == REG
3960       && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3961 
3962   switch (code1)
3963     {
3964     case CONST_INT:
3965       if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3966 	return 1;
3967       break;
3968 
3969     case CONST_DOUBLE:
3970       if (! c4x_H_constant (op1))
3971 	return 0;
3972       break;
3973 
3974       /* Any valid memory operand screened by src_operand is OK.  */
3975     case MEM:
3976 #if 0
3977       if (code2 != REG)
3978 	return 0;
3979 #endif
3980       break;
3981 
3982       /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3983 	 into a stack slot memory address comprising a PLUS and a
3984 	 constant.  */
3985     case ADDRESSOF:
3986       break;
3987 
3988     default:
3989       abort ();
3990       break;
3991     }
3992 
3993   /* Check that we have a valid destination register for a two operand
3994      instruction.  */
3995   return ! force || REGNO (op1) == REGNO (operands[0]);
3996 }
3997 
3998 
valid_operands(code,operands,mode)3999 int valid_operands (code, operands, mode)
4000      enum rtx_code code;
4001      rtx *operands;
4002      enum machine_mode mode;
4003 {
4004 
4005   /* If we are not optimizing then we have to let anything go and let
4006      reload fix things up.  instantiate_decl in function.c can produce
4007      invalid insns by changing the offset of a memory operand from a
4008      valid one into an invalid one, when the second operand is also a
4009      memory operand.  The alternative is not to allow two memory
4010      operands for an insn when not optimizing.  The problem only rarely
4011      occurs, for example with the C-torture program DFcmp.c.  */
4012 
4013   return ! optimize || c4x_valid_operands (code, operands, mode, 0);
4014 }
4015 
4016 
4017 int
legitimize_operands(code,operands,mode)4018 legitimize_operands (code, operands, mode)
4019      enum rtx_code code;
4020      rtx *operands;
4021      enum machine_mode mode;
4022 {
4023   /* Compare only has 2 operands.  */
4024   if (code == COMPARE)
4025     {
4026       /* During RTL generation, force constants into pseudos so that
4027 	 they can get hoisted out of loops.  This will tie up an extra
4028 	 register but can save an extra cycle.  Only do this if loop
4029 	 optimisation enabled.  (We cannot pull this trick for add and
4030 	 sub instructions since the flow pass won't find
4031 	 autoincrements etc.)  This allows us to generate compare
4032 	 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4033 	 of LDI *AR0++, R0; CMPI 42, R0.
4034 
4035 	 Note that expand_binops will try to load an expensive constant
4036 	 into a register if it is used within a loop.  Unfortunately,
4037 	 the cost mechanism doesn't allow us to look at the other
4038 	 operand to decide whether the constant is expensive.  */
4039 
4040       if (! reload_in_progress
4041 	  && TARGET_HOIST
4042 	  && optimize > 0
4043 	  && GET_CODE (operands[1]) == CONST_INT
4044 	  && preserve_subexpressions_p ()
4045 	  && rtx_cost (operands[1], code) > 1)
4046 	operands[1] = force_reg (mode, operands[1]);
4047 
4048       if (! reload_in_progress
4049           && ! c4x_valid_operands (code, operands, mode, 0))
4050 	operands[0] = force_reg (mode, operands[0]);
4051       return 1;
4052     }
4053 
4054   /* We cannot do this for ADDI/SUBI insns since we will
4055      defeat the flow pass from finding autoincrement addressing
4056      opportunities.  */
4057   if (! reload_in_progress
4058       && ! ((code == PLUS || code == MINUS) && mode == Pmode)
4059       && TARGET_HOIST
4060       && optimize > 1
4061       && GET_CODE (operands[2]) == CONST_INT
4062       && preserve_subexpressions_p ()
4063       && rtx_cost (operands[2], code) > 1)
4064     operands[2] = force_reg (mode, operands[2]);
4065 
4066   /* We can get better code on a C30 if we force constant shift counts
4067      into a register.  This way they can get hoisted out of loops,
4068      tying up a register, but saving an instruction.  The downside is
4069      that they may get allocated to an address or index register, and
4070      thus we will get a pipeline conflict if there is a nearby
4071      indirect address using an address register.
4072 
4073      Note that expand_binops will not try to load an expensive constant
4074      into a register if it is used within a loop for a shift insn.  */
4075 
4076   if (! reload_in_progress
4077       && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
4078     {
4079       /* If the operand combination is invalid, we force operand1 into a
4080          register, preventing reload from having doing to do this at a
4081          later stage.  */
4082       operands[1] = force_reg (mode, operands[1]);
4083       if (TARGET_FORCE)
4084 	{
4085 	  emit_move_insn (operands[0], operands[1]);
4086 	  operands[1] = copy_rtx (operands[0]);
4087 	}
4088       else
4089 	{
4090 	  /* Just in case...  */
4091 	  if (! c4x_valid_operands (code, operands, mode, 0))
4092 	    operands[2] = force_reg (mode, operands[2]);
4093 	}
4094     }
4095 
4096   /* Right shifts require a negative shift count, but GCC expects
4097      a positive count, so we emit a NEG.  */
4098   if ((code == ASHIFTRT || code == LSHIFTRT)
4099       && (GET_CODE (operands[2]) != CONST_INT))
4100     operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
4101 
4102   return 1;
4103 }
4104 
4105 
4106 /* The following predicates are used for instruction scheduling.  */
4107 
4108 int
group1_reg_operand(op,mode)4109 group1_reg_operand (op, mode)
4110      rtx op;
4111      enum machine_mode mode;
4112 {
4113   if (mode != VOIDmode && mode != GET_MODE (op))
4114     return 0;
4115   if (GET_CODE (op) == SUBREG)
4116     op = SUBREG_REG (op);
4117   return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
4118 }
4119 
4120 
4121 int
group1_mem_operand(op,mode)4122 group1_mem_operand (op, mode)
4123      rtx op;
4124      enum machine_mode mode;
4125 {
4126   if (mode != VOIDmode && mode != GET_MODE (op))
4127     return 0;
4128 
4129   if (GET_CODE (op) == MEM)
4130     {
4131       op = XEXP (op, 0);
4132       if (GET_CODE (op) == PLUS)
4133 	{
4134 	  rtx op0 = XEXP (op, 0);
4135 	  rtx op1 = XEXP (op, 1);
4136 
4137 	  if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4138 	      || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
4139 	    return 1;
4140 	}
4141       else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
4142 	return 1;
4143     }
4144 
4145   return 0;
4146 }
4147 
4148 
4149 /* Return true if any one of the address registers.  */
4150 
4151 int
arx_reg_operand(op,mode)4152 arx_reg_operand (op, mode)
4153      rtx op;
4154      enum machine_mode mode;
4155 {
4156   if (mode != VOIDmode && mode != GET_MODE (op))
4157     return 0;
4158   if (GET_CODE (op) == SUBREG)
4159     op = SUBREG_REG (op);
4160   return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4161 }
4162 
4163 
4164 static int
c4x_arn_reg_operand(op,mode,regno)4165 c4x_arn_reg_operand (op, mode, regno)
4166      rtx op;
4167      enum machine_mode mode;
4168      unsigned int regno;
4169 {
4170   if (mode != VOIDmode && mode != GET_MODE (op))
4171     return 0;
4172   if (GET_CODE (op) == SUBREG)
4173     op = SUBREG_REG (op);
4174   return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4175 }
4176 
4177 
4178 static int
c4x_arn_mem_operand(op,mode,regno)4179 c4x_arn_mem_operand (op, mode, regno)
4180      rtx op;
4181      enum machine_mode mode;
4182      unsigned int regno;
4183 {
4184   if (mode != VOIDmode && mode != GET_MODE (op))
4185     return 0;
4186 
4187   if (GET_CODE (op) == MEM)
4188     {
4189       op = XEXP (op, 0);
4190       switch (GET_CODE (op))
4191 	{
4192 	case PRE_DEC:
4193 	case POST_DEC:
4194 	case PRE_INC:
4195 	case POST_INC:
4196 	  op = XEXP (op, 0);
4197 
4198 	case REG:
4199           return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4200 
4201 	case PRE_MODIFY:
4202 	case POST_MODIFY:
4203           if (REG_P (XEXP (op, 0)) && (! reload_completed
4204 				       || (REGNO (XEXP (op, 0)) == regno)))
4205 	    return 1;
4206           if (REG_P (XEXP (XEXP (op, 1), 1))
4207 	      && (! reload_completed
4208 		  || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4209 	    return 1;
4210 	  break;
4211 
4212 	case PLUS:
4213 	  {
4214 	    rtx op0 = XEXP (op, 0);
4215 	    rtx op1 = XEXP (op, 1);
4216 
4217 	    if ((REG_P (op0) && (! reload_completed
4218 				 || (REGNO (op0) == regno)))
4219 	        || (REG_P (op1) && (! reload_completed
4220 				    || (REGNO (op1) == regno))))
4221 	      return 1;
4222 	  }
4223 	  break;
4224 
4225 	default:
4226 	  break;
4227 	}
4228     }
4229   return 0;
4230 }
4231 
4232 
4233 int
ar0_reg_operand(op,mode)4234 ar0_reg_operand (op, mode)
4235      rtx op;
4236      enum machine_mode mode;
4237 {
4238   return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4239 }
4240 
4241 
4242 int
ar0_mem_operand(op,mode)4243 ar0_mem_operand (op, mode)
4244      rtx op;
4245      enum machine_mode mode;
4246 {
4247   return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4248 }
4249 
4250 
4251 int
ar1_reg_operand(op,mode)4252 ar1_reg_operand (op, mode)
4253      rtx op;
4254      enum machine_mode mode;
4255 {
4256   return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4257 }
4258 
4259 
4260 int
ar1_mem_operand(op,mode)4261 ar1_mem_operand (op, mode)
4262      rtx op;
4263      enum machine_mode mode;
4264 {
4265   return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4266 }
4267 
4268 
4269 int
ar2_reg_operand(op,mode)4270 ar2_reg_operand (op, mode)
4271      rtx op;
4272      enum machine_mode mode;
4273 {
4274   return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4275 }
4276 
4277 
4278 int
ar2_mem_operand(op,mode)4279 ar2_mem_operand (op, mode)
4280      rtx op;
4281      enum machine_mode mode;
4282 {
4283   return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4284 }
4285 
4286 
4287 int
ar3_reg_operand(op,mode)4288 ar3_reg_operand (op, mode)
4289      rtx op;
4290      enum machine_mode mode;
4291 {
4292   return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4293 }
4294 
4295 
4296 int
ar3_mem_operand(op,mode)4297 ar3_mem_operand (op, mode)
4298      rtx op;
4299      enum machine_mode mode;
4300 {
4301   return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4302 }
4303 
4304 
4305 int
ar4_reg_operand(op,mode)4306 ar4_reg_operand (op, mode)
4307      rtx op;
4308      enum machine_mode mode;
4309 {
4310   return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4311 }
4312 
4313 
4314 int
ar4_mem_operand(op,mode)4315 ar4_mem_operand (op, mode)
4316      rtx op;
4317      enum machine_mode mode;
4318 {
4319   return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4320 }
4321 
4322 
4323 int
ar5_reg_operand(op,mode)4324 ar5_reg_operand (op, mode)
4325      rtx op;
4326      enum machine_mode mode;
4327 {
4328   return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4329 }
4330 
4331 
4332 int
ar5_mem_operand(op,mode)4333 ar5_mem_operand (op, mode)
4334      rtx op;
4335      enum machine_mode mode;
4336 {
4337   return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4338 }
4339 
4340 
4341 int
ar6_reg_operand(op,mode)4342 ar6_reg_operand (op, mode)
4343      rtx op;
4344      enum machine_mode mode;
4345 {
4346   return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4347 }
4348 
4349 
4350 int
ar6_mem_operand(op,mode)4351 ar6_mem_operand (op, mode)
4352      rtx op;
4353      enum machine_mode mode;
4354 {
4355   return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4356 }
4357 
4358 
4359 int
ar7_reg_operand(op,mode)4360 ar7_reg_operand (op, mode)
4361      rtx op;
4362      enum machine_mode mode;
4363 {
4364   return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4365 }
4366 
4367 
4368 int
ar7_mem_operand(op,mode)4369 ar7_mem_operand (op, mode)
4370      rtx op;
4371      enum machine_mode mode;
4372 {
4373   return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4374 }
4375 
4376 
4377 int
ir0_reg_operand(op,mode)4378 ir0_reg_operand (op, mode)
4379      rtx op;
4380      enum machine_mode mode;
4381 {
4382   return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4383 }
4384 
4385 
4386 int
ir0_mem_operand(op,mode)4387 ir0_mem_operand (op, mode)
4388      rtx op;
4389      enum machine_mode mode;
4390 {
4391   return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4392 }
4393 
4394 
4395 int
ir1_reg_operand(op,mode)4396 ir1_reg_operand (op, mode)
4397      rtx op;
4398      enum machine_mode mode;
4399 {
4400   return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4401 }
4402 
4403 
4404 int
ir1_mem_operand(op,mode)4405 ir1_mem_operand (op, mode)
4406      rtx op;
4407      enum machine_mode mode;
4408 {
4409   return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4410 }
4411 
4412 
4413 /* This is similar to operand_subword but allows autoincrement
4414    addressing.  */
4415 
4416 rtx
c4x_operand_subword(op,i,validate_address,mode)4417 c4x_operand_subword (op, i, validate_address, mode)
4418      rtx op;
4419      int i;
4420      int validate_address;
4421      enum machine_mode mode;
4422 {
4423   if (mode != HImode && mode != HFmode)
4424     fatal_insn ("c4x_operand_subword: invalid mode", op);
4425 
4426   if (mode == HFmode && REG_P (op))
4427     fatal_insn ("c4x_operand_subword: invalid operand", op);
4428 
4429   if (GET_CODE (op) == MEM)
4430     {
4431       enum rtx_code code = GET_CODE (XEXP (op, 0));
4432       enum machine_mode mode = GET_MODE (XEXP (op, 0));
4433       enum machine_mode submode;
4434 
4435       submode = mode;
4436       if (mode == HImode)
4437 	submode = QImode;
4438       else if (mode == HFmode)
4439 	submode = QFmode;
4440 
4441       switch (code)
4442 	{
4443 	case POST_INC:
4444 	case PRE_INC:
4445 	  return gen_rtx_MEM (submode, XEXP (op, 0));
4446 
4447 	case POST_DEC:
4448 	case PRE_DEC:
4449 	case PRE_MODIFY:
4450 	case POST_MODIFY:
4451 	  /* We could handle these with some difficulty.
4452 	     e.g., *p-- => *(p-=2); *(p+1).  */
4453 	  fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4454 
4455 	case SYMBOL_REF:
4456 	case LABEL_REF:
4457 	case CONST:
4458 	case CONST_INT:
4459 	  fatal_insn ("c4x_operand_subword: invalid address", op);
4460 
4461 	  /* Even though offsettable_address_p considers (MEM
4462 	     (LO_SUM)) to be offsettable, it is not safe if the
4463 	     address is at the end of the data page since we also have
4464 	     to fix up the associated high PART.  In this case where
4465 	     we are trying to split a HImode or HFmode memory
4466 	     reference, we would have to emit another insn to reload a
4467 	     new HIGH value.  It's easier to disable LO_SUM memory references
4468 	     in HImode or HFmode and we probably get better code.  */
4469 	case LO_SUM:
4470 	  fatal_insn ("c4x_operand_subword: address not offsettable", op);
4471 
4472 	default:
4473 	  break;
4474 	}
4475     }
4476 
4477   return operand_subword (op, i, validate_address, mode);
4478 }
4479 
4480 struct name_list
4481 {
4482   struct name_list *next;
4483   const char *name;
4484 };
4485 
4486 static struct name_list *global_head;
4487 static struct name_list *extern_head;
4488 
4489 
4490 /* Add NAME to list of global symbols and remove from external list if
4491    present on external list.  */
4492 
4493 void
c4x_global_label(name)4494 c4x_global_label (name)
4495      const char *name;
4496 {
4497   struct name_list *p, *last;
4498 
4499   /* Do not insert duplicate names, so linearly search through list of
4500      existing names.  */
4501   p = global_head;
4502   while (p)
4503     {
4504       if (strcmp (p->name, name) == 0)
4505 	return;
4506       p = p->next;
4507     }
4508   p = (struct name_list *) xmalloc (sizeof *p);
4509   p->next = global_head;
4510   p->name = name;
4511   global_head = p;
4512 
4513   /* Remove this name from ref list if present.  */
4514   last = NULL;
4515   p = extern_head;
4516   while (p)
4517     {
4518       if (strcmp (p->name, name) == 0)
4519 	{
4520 	  if (last)
4521 	    last->next = p->next;
4522 	  else
4523 	    extern_head = p->next;
4524 	  break;
4525 	}
4526       last = p;
4527       p = p->next;
4528     }
4529 }
4530 
4531 
4532 /* Add NAME to list of external symbols.  */
4533 
4534 void
c4x_external_ref(name)4535 c4x_external_ref (name)
4536      const char *name;
4537 {
4538   struct name_list *p;
4539 
4540   /* Do not insert duplicate names.  */
4541   p = extern_head;
4542   while (p)
4543     {
4544       if (strcmp (p->name, name) == 0)
4545 	return;
4546       p = p->next;
4547     }
4548 
4549   /* Do not insert ref if global found.  */
4550   p = global_head;
4551   while (p)
4552     {
4553       if (strcmp (p->name, name) == 0)
4554 	return;
4555       p = p->next;
4556     }
4557   p = (struct name_list *) xmalloc (sizeof *p);
4558   p->next = extern_head;
4559   p->name = name;
4560   extern_head = p;
4561 }
4562 
4563 
4564 void
c4x_file_end(fp)4565 c4x_file_end (fp)
4566      FILE *fp;
4567 {
4568   struct name_list *p;
4569 
4570   /* Output all external names that are not global.  */
4571   p = extern_head;
4572   while (p)
4573     {
4574       fprintf (fp, "\t.ref\t");
4575       assemble_name (fp, p->name);
4576       fprintf (fp, "\n");
4577       p = p->next;
4578     }
4579   fprintf (fp, "\t.end\n");
4580 }
4581 
4582 
4583 static void
c4x_check_attribute(attrib,list,decl,attributes)4584 c4x_check_attribute (attrib, list, decl, attributes)
4585      const char *attrib;
4586      tree list, decl, *attributes;
4587 {
4588   while (list != NULL_TREE
4589          && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4590 	 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4591     list = TREE_CHAIN (list);
4592   if (list)
4593     *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4594 			     *attributes);
4595 }
4596 
4597 
4598 static void
c4x_insert_attributes(decl,attributes)4599 c4x_insert_attributes (decl, attributes)
4600      tree decl, *attributes;
4601 {
4602   switch (TREE_CODE (decl))
4603     {
4604     case FUNCTION_DECL:
4605       c4x_check_attribute ("section", code_tree, decl, attributes);
4606       c4x_check_attribute ("const", pure_tree, decl, attributes);
4607       c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4608       c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4609       break;
4610 
4611     case VAR_DECL:
4612       c4x_check_attribute ("section", data_tree, decl, attributes);
4613       break;
4614 
4615     default:
4616       break;
4617     }
4618 }
4619 
4620 /* Table of valid machine attributes.  */
4621 const struct attribute_spec c4x_attribute_table[] =
4622 {
4623   /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4624   { "interrupt",    0, 0, false, true,  true,  c4x_handle_fntype_attribute },
4625   /* FIXME: code elsewhere in this file treats "naked" as a synonym of
4626      "interrupt"; should it be accepted here?  */
4627   { "assembler",    0, 0, false, true,  true,  c4x_handle_fntype_attribute },
4628   { "leaf_pretend", 0, 0, false, true,  true,  c4x_handle_fntype_attribute },
4629   { NULL,           0, 0, false, false, false, NULL }
4630 };
4631 
4632 /* Handle an attribute requiring a FUNCTION_TYPE;
4633    arguments as in struct attribute_spec.handler.  */
4634 static tree
c4x_handle_fntype_attribute(node,name,args,flags,no_add_attrs)4635 c4x_handle_fntype_attribute (node, name, args, flags, no_add_attrs)
4636      tree *node;
4637      tree name;
4638      tree args ATTRIBUTE_UNUSED;
4639      int flags ATTRIBUTE_UNUSED;
4640      bool *no_add_attrs;
4641 {
4642   if (TREE_CODE (*node) != FUNCTION_TYPE)
4643     {
4644       warning ("`%s' attribute only applies to functions",
4645 	       IDENTIFIER_POINTER (name));
4646       *no_add_attrs = true;
4647     }
4648 
4649   return NULL_TREE;
4650 }
4651 
4652 
4653 /* !!! FIXME to emit RPTS correctly.  */
4654 
4655 int
c4x_rptb_rpts_p(insn,op)4656 c4x_rptb_rpts_p (insn, op)
4657      rtx insn, op;
4658 {
4659   /* The next insn should be our label marking where the
4660      repeat block starts.  */
4661   insn = NEXT_INSN (insn);
4662   if (GET_CODE (insn) != CODE_LABEL)
4663     {
4664       /* Some insns may have been shifted between the RPTB insn
4665          and the top label... They were probably destined to
4666          be moved out of the loop.  For now, let's leave them
4667          where they are and print a warning.  We should
4668          probably move these insns before the repeat block insn.  */
4669       if (TARGET_DEBUG)
4670 	fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4671 		   insn);
4672       return 0;
4673     }
4674 
4675   /* Skip any notes.  */
4676   insn = next_nonnote_insn (insn);
4677 
4678   /* This should be our first insn in the loop.  */
4679   if (! INSN_P (insn))
4680     return 0;
4681 
4682   /* Skip any notes.  */
4683   insn = next_nonnote_insn (insn);
4684 
4685   if (! INSN_P (insn))
4686     return 0;
4687 
4688   if (recog_memoized (insn) != CODE_FOR_rptb_end)
4689     return 0;
4690 
4691   if (TARGET_RPTS)
4692     return 1;
4693 
4694   return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4695 }
4696 
4697 
4698 /* Check if register r11 is used as the destination of an insn.  */
4699 
4700 static int
c4x_r11_set_p(x)4701 c4x_r11_set_p(x)
4702     rtx x;
4703 {
4704   rtx set;
4705   int i, j;
4706   const char *fmt;
4707 
4708   if (x == 0)
4709     return 0;
4710 
4711   if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4712     x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4713 
4714   if (INSN_P (x) && (set = single_set (x)))
4715     x = SET_DEST (set);
4716 
4717   if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4718     return 1;
4719 
4720   fmt = GET_RTX_FORMAT (GET_CODE (x));
4721   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4722     {
4723       if (fmt[i] == 'e')
4724 	{
4725           if (c4x_r11_set_p (XEXP (x, i)))
4726 	    return 1;
4727 	}
4728       else if (fmt[i] == 'E')
4729         for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4730           if (c4x_r11_set_p (XVECEXP (x, i, j)))
4731 	    return 1;
4732     }
4733   return 0;
4734 }
4735 
4736 
4737 /* The c4x sometimes has a problem when the insn before the laj insn
4738    sets the r11 register.  Check for this situation.  */
4739 
4740 int
c4x_check_laj_p(insn)4741 c4x_check_laj_p (insn)
4742      rtx insn;
4743 {
4744   insn = prev_nonnote_insn (insn);
4745 
4746   /* If this is the start of the function no nop is needed.  */
4747   if (insn == 0)
4748     return 0;
4749 
4750   /* If the previous insn is a code label we have to insert a nop. This
4751      could be a jump or table jump. We can find the normal jumps by
4752      scanning the function but this will not find table jumps.  */
4753   if (GET_CODE (insn) == CODE_LABEL)
4754     return 1;
4755 
4756   /* If the previous insn sets register r11 we have to insert a nop.  */
4757   if (c4x_r11_set_p (insn))
4758     return 1;
4759 
4760   /* No nop needed.  */
4761   return 0;
4762 }
4763 
4764 
4765 /* Adjust the cost of a scheduling dependency.  Return the new cost of
4766    a dependency LINK or INSN on DEP_INSN.  COST is the current cost.
4767    A set of an address register followed by a use occurs a 2 cycle
4768    stall (reduced to a single cycle on the c40 using LDA), while
4769    a read of an address register followed by a use occurs a single cycle.  */
4770 
4771 #define	SET_USE_COST	3
4772 #define	SETLDA_USE_COST	2
4773 #define	READ_USE_COST	2
4774 
4775 static int
c4x_adjust_cost(insn,link,dep_insn,cost)4776 c4x_adjust_cost (insn, link, dep_insn, cost)
4777      rtx insn;
4778      rtx link;
4779      rtx dep_insn;
4780      int cost;
4781 {
4782   /* Don't worry about this until we know what registers have been
4783      assigned.  */
4784   if (flag_schedule_insns == 0 && ! reload_completed)
4785     return 0;
4786 
4787   /* How do we handle dependencies where a read followed by another
4788      read causes a pipeline stall?  For example, a read of ar0 followed
4789      by the use of ar0 for a memory reference.  It looks like we
4790      need to extend the scheduler to handle this case.  */
4791 
4792   /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4793      (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4794      so only deal with insns we know about.  */
4795   if (recog_memoized (dep_insn) < 0)
4796     return 0;
4797 
4798   if (REG_NOTE_KIND (link) == 0)
4799     {
4800       int max = 0;
4801 
4802       /* Data dependency; DEP_INSN writes a register that INSN reads some
4803 	 cycles later.  */
4804       if (TARGET_C3X)
4805 	{
4806 	  if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4807 	    max = SET_USE_COST > max ? SET_USE_COST : max;
4808 	  if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4809 	    max = READ_USE_COST > max ? READ_USE_COST : max;
4810 	}
4811       else
4812 	{
4813 	  /* This could be significantly optimized. We should look
4814 	     to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4815 	     insn uses ar0-ar7.  We then test if the same register
4816 	     is used.  The tricky bit is that some operands will
4817 	     use several registers...  */
4818 	  if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4819 	    max = SET_USE_COST > max ? SET_USE_COST : max;
4820 	  if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4821 	    max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4822 	  if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4823 	    max = READ_USE_COST > max ? READ_USE_COST : max;
4824 
4825 	  if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4826 	    max = SET_USE_COST > max ? SET_USE_COST : max;
4827 	  if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4828 	    max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4829 	  if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4830 	    max = READ_USE_COST > max ? READ_USE_COST : max;
4831 
4832 	  if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4833 	    max = SET_USE_COST > max ? SET_USE_COST : max;
4834 	  if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4835 	    max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4836 	  if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4837 	    max = READ_USE_COST > max ? READ_USE_COST : max;
4838 
4839 	  if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4840 	    max = SET_USE_COST > max ? SET_USE_COST : max;
4841 	  if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4842 	    max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4843 	  if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4844 	    max = READ_USE_COST > max ? READ_USE_COST : max;
4845 
4846 	  if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4847 	    max = SET_USE_COST > max ? SET_USE_COST : max;
4848 	  if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4849 	    max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4850 	  if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4851 	    max = READ_USE_COST > max ? READ_USE_COST : max;
4852 
4853 	  if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4854 	    max = SET_USE_COST > max ? SET_USE_COST : max;
4855 	  if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4856 	    max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4857 	  if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4858 	    max = READ_USE_COST > max ? READ_USE_COST : max;
4859 
4860 	  if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4861 	    max = SET_USE_COST > max ? SET_USE_COST : max;
4862 	  if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4863 	    max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4864 	  if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4865 	    max = READ_USE_COST > max ? READ_USE_COST : max;
4866 
4867 	  if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4868 	    max = SET_USE_COST > max ? SET_USE_COST : max;
4869 	  if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4870 	    max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4871 	  if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4872 	    max = READ_USE_COST > max ? READ_USE_COST : max;
4873 
4874 	  if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4875 	    max = SET_USE_COST > max ? SET_USE_COST : max;
4876 	  if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4877 	    max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4878 
4879 	  if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4880 	    max = SET_USE_COST > max ? SET_USE_COST : max;
4881 	  if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4882 	    max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4883 	}
4884 
4885       if (max)
4886 	cost = max;
4887 
4888       /* For other data dependencies, the default cost specified in the
4889 	 md is correct.  */
4890       return cost;
4891     }
4892   else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4893     {
4894       /* Anti dependency; DEP_INSN reads a register that INSN writes some
4895 	 cycles later.  */
4896 
4897       /* For c4x anti dependencies, the cost is 0.  */
4898       return 0;
4899     }
4900   else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4901     {
4902       /* Output dependency; DEP_INSN writes a register that INSN writes some
4903 	 cycles later.  */
4904 
4905       /* For c4x output dependencies, the cost is 0.  */
4906       return 0;
4907     }
4908   else
4909     abort ();
4910 }
4911 
4912 void
c4x_init_builtins()4913 c4x_init_builtins ()
4914 {
4915   tree endlink = void_list_node;
4916 
4917   builtin_function ("fast_ftoi",
4918 		    build_function_type
4919 		    (integer_type_node,
4920 		     tree_cons (NULL_TREE, double_type_node, endlink)),
4921 		    C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
4922   builtin_function ("ansi_ftoi",
4923 		    build_function_type
4924 		    (integer_type_node,
4925 		     tree_cons (NULL_TREE, double_type_node, endlink)),
4926 		    C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL, NULL_TREE);
4927   if (TARGET_C3X)
4928     builtin_function ("fast_imult",
4929 		      build_function_type
4930 		      (integer_type_node,
4931 		       tree_cons (NULL_TREE, integer_type_node,
4932 				  tree_cons (NULL_TREE,
4933 					     integer_type_node, endlink))),
4934 		      C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL, NULL_TREE);
4935   else
4936     {
4937       builtin_function ("toieee",
4938 		        build_function_type
4939 			(double_type_node,
4940 			 tree_cons (NULL_TREE, double_type_node, endlink)),
4941 		        C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4942       builtin_function ("frieee",
4943 		        build_function_type
4944 			(double_type_node,
4945 			 tree_cons (NULL_TREE, double_type_node, endlink)),
4946 		        C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4947       builtin_function ("fast_invf",
4948 		        build_function_type
4949 			(double_type_node,
4950 			 tree_cons (NULL_TREE, double_type_node, endlink)),
4951 		        C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL, NULL_TREE);
4952     }
4953 }
4954 
4955 
4956 rtx
c4x_expand_builtin(exp,target,subtarget,mode,ignore)4957 c4x_expand_builtin (exp, target, subtarget, mode, ignore)
4958      tree exp;
4959      rtx target;
4960      rtx subtarget ATTRIBUTE_UNUSED;
4961      enum machine_mode mode ATTRIBUTE_UNUSED;
4962      int ignore ATTRIBUTE_UNUSED;
4963 {
4964   tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4965   unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4966   tree arglist = TREE_OPERAND (exp, 1);
4967   tree arg0, arg1;
4968   rtx r0, r1;
4969 
4970   switch (fcode)
4971     {
4972     case C4X_BUILTIN_FIX:
4973       arg0 = TREE_VALUE (arglist);
4974       r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4975       r0 = protect_from_queue (r0, 0);
4976       if (! target || ! register_operand (target, QImode))
4977 	target = gen_reg_rtx (QImode);
4978       emit_insn (gen_fixqfqi_clobber (target, r0));
4979       return target;
4980 
4981     case C4X_BUILTIN_FIX_ANSI:
4982       arg0 = TREE_VALUE (arglist);
4983       r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4984       r0 = protect_from_queue (r0, 0);
4985       if (! target || ! register_operand (target, QImode))
4986 	target = gen_reg_rtx (QImode);
4987       emit_insn (gen_fix_truncqfqi2 (target, r0));
4988       return target;
4989 
4990     case C4X_BUILTIN_MPYI:
4991       if (! TARGET_C3X)
4992 	break;
4993       arg0 = TREE_VALUE (arglist);
4994       arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4995       r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
4996       r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
4997       r0 = protect_from_queue (r0, 0);
4998       r1 = protect_from_queue (r1, 0);
4999       if (! target || ! register_operand (target, QImode))
5000 	target = gen_reg_rtx (QImode);
5001       emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
5002       return target;
5003 
5004     case C4X_BUILTIN_TOIEEE:
5005       if (TARGET_C3X)
5006 	break;
5007       arg0 = TREE_VALUE (arglist);
5008       r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5009       r0 = protect_from_queue (r0, 0);
5010       if (! target || ! register_operand (target, QFmode))
5011 	target = gen_reg_rtx (QFmode);
5012       emit_insn (gen_toieee (target, r0));
5013       return target;
5014 
5015     case C4X_BUILTIN_FRIEEE:
5016       if (TARGET_C3X)
5017 	break;
5018       arg0 = TREE_VALUE (arglist);
5019       if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
5020 	put_var_into_stack (arg0, /*rescan=*/true);
5021       r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5022       r0 = protect_from_queue (r0, 0);
5023       if (register_operand (r0, QFmode))
5024 	{
5025 	  r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
5026 	  emit_move_insn (r1, r0);
5027 	  r0 = r1;
5028 	}
5029       if (! target || ! register_operand (target, QFmode))
5030 	target = gen_reg_rtx (QFmode);
5031       emit_insn (gen_frieee (target, r0));
5032       return target;
5033 
5034     case C4X_BUILTIN_RCPF:
5035       if (TARGET_C3X)
5036 	break;
5037       arg0 = TREE_VALUE (arglist);
5038       r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5039       r0 = protect_from_queue (r0, 0);
5040       if (! target || ! register_operand (target, QFmode))
5041 	target = gen_reg_rtx (QFmode);
5042       emit_insn (gen_rcpfqf_clobber (target, r0));
5043       return target;
5044     }
5045   return NULL_RTX;
5046 }
5047 
5048 static void
c4x_asm_named_section(name,flags)5049 c4x_asm_named_section (name, flags)
5050      const char *name;
5051      unsigned int flags ATTRIBUTE_UNUSED;
5052 {
5053   fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
5054 }
5055 
5056 static void
c4x_globalize_label(stream,name)5057 c4x_globalize_label (stream, name)
5058      FILE *stream;
5059      const char *name;
5060 {
5061   default_globalize_label (stream, name);
5062   c4x_global_label (name);
5063 }
5064