1 /* Subroutines for assembler code output on the TMS320C[34]x
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004
3 Free Software Foundation, Inc.
4
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
24
25 /* Some output-actions in c4x.md need these. */
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "real.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 #include "conditions.h"
39 #include "output.h"
40 #include "function.h"
41 #include "expr.h"
42 #include "optabs.h"
43 #include "libfuncs.h"
44 #include "flags.h"
45 #include "loop.h"
46 #include "recog.h"
47 #include "ggc.h"
48 #include "cpplib.h"
49 #include "toplev.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
53
54 rtx smulhi3_libfunc;
55 rtx umulhi3_libfunc;
56 rtx fix_truncqfhi2_libfunc;
57 rtx fixuns_truncqfhi2_libfunc;
58 rtx fix_trunchfhi2_libfunc;
59 rtx fixuns_trunchfhi2_libfunc;
60 rtx floathiqf2_libfunc;
61 rtx floatunshiqf2_libfunc;
62 rtx floathihf2_libfunc;
63 rtx floatunshihf2_libfunc;
64
65 static int c4x_leaf_function;
66
67 static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
68
69 /* Array of the smallest class containing reg number REGNO, indexed by
70 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
71 registers are available and set the class to NO_REGS for registers
72 that the target switches say are unavailable. */
73
74 enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
75 {
76 /* Reg Modes Saved. */
77 R0R1_REGS, /* R0 QI, QF, HF No. */
78 R0R1_REGS, /* R1 QI, QF, HF No. */
79 R2R3_REGS, /* R2 QI, QF, HF No. */
80 R2R3_REGS, /* R3 QI, QF, HF No. */
81 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
82 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
83 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
84 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
85 ADDR_REGS, /* AR0 QI No. */
86 ADDR_REGS, /* AR1 QI No. */
87 ADDR_REGS, /* AR2 QI No. */
88 ADDR_REGS, /* AR3 QI QI. */
89 ADDR_REGS, /* AR4 QI QI. */
90 ADDR_REGS, /* AR5 QI QI. */
91 ADDR_REGS, /* AR6 QI QI. */
92 ADDR_REGS, /* AR7 QI QI. */
93 DP_REG, /* DP QI No. */
94 INDEX_REGS, /* IR0 QI No. */
95 INDEX_REGS, /* IR1 QI No. */
96 BK_REG, /* BK QI QI. */
97 SP_REG, /* SP QI No. */
98 ST_REG, /* ST CC No. */
99 NO_REGS, /* DIE/IE No. */
100 NO_REGS, /* IIE/IF No. */
101 NO_REGS, /* IIF/IOF No. */
102 INT_REGS, /* RS QI No. */
103 INT_REGS, /* RE QI No. */
104 RC_REG, /* RC QI No. */
105 EXT_REGS, /* R8 QI, QF, HF QI. */
106 EXT_REGS, /* R9 QI, QF, HF No. */
107 EXT_REGS, /* R10 QI, QF, HF No. */
108 EXT_REGS, /* R11 QI, QF, HF No. */
109 };
110
111 enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
112 {
113 /* Reg Modes Saved. */
114 HFmode, /* R0 QI, QF, HF No. */
115 HFmode, /* R1 QI, QF, HF No. */
116 HFmode, /* R2 QI, QF, HF No. */
117 HFmode, /* R3 QI, QF, HF No. */
118 QFmode, /* R4 QI, QF, HF QI. */
119 QFmode, /* R5 QI, QF, HF QI. */
120 QImode, /* R6 QI, QF, HF QF. */
121 QImode, /* R7 QI, QF, HF QF. */
122 QImode, /* AR0 QI No. */
123 QImode, /* AR1 QI No. */
124 QImode, /* AR2 QI No. */
125 QImode, /* AR3 QI QI. */
126 QImode, /* AR4 QI QI. */
127 QImode, /* AR5 QI QI. */
128 QImode, /* AR6 QI QI. */
129 QImode, /* AR7 QI QI. */
130 VOIDmode, /* DP QI No. */
131 QImode, /* IR0 QI No. */
132 QImode, /* IR1 QI No. */
133 QImode, /* BK QI QI. */
134 VOIDmode, /* SP QI No. */
135 VOIDmode, /* ST CC No. */
136 VOIDmode, /* DIE/IE No. */
137 VOIDmode, /* IIE/IF No. */
138 VOIDmode, /* IIF/IOF No. */
139 QImode, /* RS QI No. */
140 QImode, /* RE QI No. */
141 VOIDmode, /* RC QI No. */
142 QFmode, /* R8 QI, QF, HF QI. */
143 HFmode, /* R9 QI, QF, HF No. */
144 HFmode, /* R10 QI, QF, HF No. */
145 HFmode, /* R11 QI, QF, HF No. */
146 };
147
148
149 /* Test and compare insns in c4x.md store the information needed to
150 generate branch and scc insns here. */
151
152 rtx c4x_compare_op0;
153 rtx c4x_compare_op1;
154
155 const char *c4x_rpts_cycles_string;
156 int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
157 const char *c4x_cpu_version_string;
158 int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
159
160 /* Pragma definitions. */
161
162 tree code_tree = NULL_TREE;
163 tree data_tree = NULL_TREE;
164 tree pure_tree = NULL_TREE;
165 tree noreturn_tree = NULL_TREE;
166 tree interrupt_tree = NULL_TREE;
167 tree naked_tree = NULL_TREE;
168
169 /* Forward declarations */
170 static int c4x_isr_reg_used_p (unsigned int);
171 static int c4x_leaf_function_p (void);
172 static int c4x_naked_function_p (void);
173 static int c4x_immed_float_p (rtx);
174 static int c4x_a_register (rtx);
175 static int c4x_x_register (rtx);
176 static int c4x_immed_int_constant (rtx);
177 static int c4x_immed_float_constant (rtx);
178 static int c4x_K_constant (rtx);
179 static int c4x_N_constant (rtx);
180 static int c4x_O_constant (rtx);
181 static int c4x_R_indirect (rtx);
182 static int c4x_S_indirect (rtx);
183 static void c4x_S_address_parse (rtx , int *, int *, int *, int *);
184 static int c4x_valid_operands (enum rtx_code, rtx *, enum machine_mode, int);
185 static int c4x_arn_reg_operand (rtx, enum machine_mode, unsigned int);
186 static int c4x_arn_mem_operand (rtx, enum machine_mode, unsigned int);
187 static void c4x_file_start (void);
188 static void c4x_file_end (void);
189 static void c4x_check_attribute (const char *, tree, tree, tree *);
190 static int c4x_r11_set_p (rtx);
191 static int c4x_rptb_valid_p (rtx, rtx);
192 static void c4x_reorg (void);
193 static int c4x_label_ref_used_p (rtx, rtx);
194 static tree c4x_handle_fntype_attribute (tree *, tree, tree, int, bool *);
195 const struct attribute_spec c4x_attribute_table[];
196 static void c4x_insert_attributes (tree, tree *);
197 static void c4x_asm_named_section (const char *, unsigned int);
198 static int c4x_adjust_cost (rtx, rtx, rtx, int);
199 static void c4x_globalize_label (FILE *, const char *);
200 static bool c4x_rtx_costs (rtx, int, int, int *);
201 static int c4x_address_cost (rtx);
202 static void c4x_init_libfuncs (void);
203
204 /* Initialize the GCC target structure. */
205 #undef TARGET_ASM_BYTE_OP
206 #define TARGET_ASM_BYTE_OP "\t.word\t"
207 #undef TARGET_ASM_ALIGNED_HI_OP
208 #define TARGET_ASM_ALIGNED_HI_OP NULL
209 #undef TARGET_ASM_ALIGNED_SI_OP
210 #define TARGET_ASM_ALIGNED_SI_OP NULL
211 #undef TARGET_ASM_FILE_START
212 #define TARGET_ASM_FILE_START c4x_file_start
213 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
214 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
215 #undef TARGET_ASM_FILE_END
216 #define TARGET_ASM_FILE_END c4x_file_end
217
218 #undef TARGET_ATTRIBUTE_TABLE
219 #define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
220
221 #undef TARGET_INSERT_ATTRIBUTES
222 #define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
223
224 #undef TARGET_INIT_BUILTINS
225 #define TARGET_INIT_BUILTINS c4x_init_builtins
226
227 #undef TARGET_EXPAND_BUILTIN
228 #define TARGET_EXPAND_BUILTIN c4x_expand_builtin
229
230 #undef TARGET_SCHED_ADJUST_COST
231 #define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
232
233 #undef TARGET_ASM_GLOBALIZE_LABEL
234 #define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
235
236 #undef TARGET_RTX_COSTS
237 #define TARGET_RTX_COSTS c4x_rtx_costs
238 #undef TARGET_ADDRESS_COST
239 #define TARGET_ADDRESS_COST c4x_address_cost
240
241 #undef TARGET_MACHINE_DEPENDENT_REORG
242 #define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
243
244 #undef TARGET_INIT_LIBFUNCS
245 #define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
246
247 struct gcc_target targetm = TARGET_INITIALIZER;
248
249 /* Override command line options.
250 Called once after all options have been parsed.
251 Mostly we process the processor
252 type and sometimes adjust other TARGET_ options. */
253
254 void
c4x_override_options(void)255 c4x_override_options (void)
256 {
257 if (c4x_rpts_cycles_string)
258 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
259 else
260 c4x_rpts_cycles = 0;
261
262 if (TARGET_C30)
263 c4x_cpu_version = 30;
264 else if (TARGET_C31)
265 c4x_cpu_version = 31;
266 else if (TARGET_C32)
267 c4x_cpu_version = 32;
268 else if (TARGET_C33)
269 c4x_cpu_version = 33;
270 else if (TARGET_C40)
271 c4x_cpu_version = 40;
272 else if (TARGET_C44)
273 c4x_cpu_version = 44;
274 else
275 c4x_cpu_version = 40;
276
277 /* -mcpu=xx overrides -m40 etc. */
278 if (c4x_cpu_version_string)
279 {
280 const char *p = c4x_cpu_version_string;
281
282 /* Also allow -mcpu=c30 etc. */
283 if (*p == 'c' || *p == 'C')
284 p++;
285 c4x_cpu_version = atoi (p);
286 }
287
288 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
289 C40_FLAG | C44_FLAG);
290
291 switch (c4x_cpu_version)
292 {
293 case 30: target_flags |= C30_FLAG; break;
294 case 31: target_flags |= C31_FLAG; break;
295 case 32: target_flags |= C32_FLAG; break;
296 case 33: target_flags |= C33_FLAG; break;
297 case 40: target_flags |= C40_FLAG; break;
298 case 44: target_flags |= C44_FLAG; break;
299 default:
300 warning ("unknown CPU version %d, using 40.\n", c4x_cpu_version);
301 c4x_cpu_version = 40;
302 target_flags |= C40_FLAG;
303 }
304
305 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
306 target_flags |= C3X_FLAG;
307 else
308 target_flags &= ~C3X_FLAG;
309
310 /* Convert foo / 8.0 into foo * 0.125, etc. */
311 set_fast_math_flags (1);
312
313 /* We should phase out the following at some stage.
314 This provides compatibility with the old -mno-aliases option. */
315 if (! TARGET_ALIASES && ! flag_argument_noalias)
316 flag_argument_noalias = 1;
317 }
318
319
320 /* This is called before c4x_override_options. */
321
322 void
c4x_optimization_options(int level ATTRIBUTE_UNUSED,int size ATTRIBUTE_UNUSED)323 c4x_optimization_options (int level ATTRIBUTE_UNUSED,
324 int size ATTRIBUTE_UNUSED)
325 {
326 /* Scheduling before register allocation can screw up global
327 register allocation, especially for functions that use MPY||ADD
328 instructions. The benefit we gain we get by scheduling before
329 register allocation is probably marginal anyhow. */
330 flag_schedule_insns = 0;
331 }
332
333
334 /* Write an ASCII string. */
335
336 #define C4X_ASCII_LIMIT 40
337
338 void
c4x_output_ascii(FILE * stream,const char * ptr,int len)339 c4x_output_ascii (FILE *stream, const char *ptr, int len)
340 {
341 char sbuf[C4X_ASCII_LIMIT + 1];
342 int s, l, special, first = 1, onlys;
343
344 if (len)
345 fprintf (stream, "\t.byte\t");
346
347 for (s = l = 0; len > 0; --len, ++ptr)
348 {
349 onlys = 0;
350
351 /* Escape " and \ with a \". */
352 special = *ptr == '\"' || *ptr == '\\';
353
354 /* If printable - add to buff. */
355 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
356 {
357 if (special)
358 sbuf[s++] = '\\';
359 sbuf[s++] = *ptr;
360 if (s < C4X_ASCII_LIMIT - 1)
361 continue;
362 onlys = 1;
363 }
364 if (s)
365 {
366 if (first)
367 first = 0;
368 else
369 {
370 fputc (',', stream);
371 l++;
372 }
373
374 sbuf[s] = 0;
375 fprintf (stream, "\"%s\"", sbuf);
376 l += s + 2;
377 if (TARGET_TI && l >= 80 && len > 1)
378 {
379 fprintf (stream, "\n\t.byte\t");
380 first = 1;
381 l = 0;
382 }
383
384 s = 0;
385 }
386 if (onlys)
387 continue;
388
389 if (first)
390 first = 0;
391 else
392 {
393 fputc (',', stream);
394 l++;
395 }
396
397 fprintf (stream, "%d", *ptr);
398 l += 3;
399 if (TARGET_TI && l >= 80 && len > 1)
400 {
401 fprintf (stream, "\n\t.byte\t");
402 first = 1;
403 l = 0;
404 }
405 }
406 if (s)
407 {
408 if (! first)
409 fputc (',', stream);
410
411 sbuf[s] = 0;
412 fprintf (stream, "\"%s\"", sbuf);
413 s = 0;
414 }
415 fputc ('\n', stream);
416 }
417
418
419 int
c4x_hard_regno_mode_ok(unsigned int regno,enum machine_mode mode)420 c4x_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
421 {
422 switch (mode)
423 {
424 #if Pmode != QImode
425 case Pmode: /* Pointer (24/32 bits). */
426 #endif
427 case QImode: /* Integer (32 bits). */
428 return IS_INT_REGNO (regno);
429
430 case QFmode: /* Float, Double (32 bits). */
431 case HFmode: /* Long Double (40 bits). */
432 return IS_EXT_REGNO (regno);
433
434 case CCmode: /* Condition Codes. */
435 case CC_NOOVmode: /* Condition Codes. */
436 return IS_ST_REGNO (regno);
437
438 case HImode: /* Long Long (64 bits). */
439 /* We need two registers to store long longs. Note that
440 it is much easier to constrain the first register
441 to start on an even boundary. */
442 return IS_INT_REGNO (regno)
443 && IS_INT_REGNO (regno + 1)
444 && (regno & 1) == 0;
445
446 default:
447 return 0; /* We don't support these modes. */
448 }
449
450 return 0;
451 }
452
453 /* Return nonzero if REGNO1 can be renamed to REGNO2. */
454 int
c4x_hard_regno_rename_ok(unsigned int regno1,unsigned int regno2)455 c4x_hard_regno_rename_ok (unsigned int regno1, unsigned int regno2)
456 {
457 /* We can not copy call saved registers from mode QI into QF or from
458 mode QF into QI. */
459 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
460 return 0;
461 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
462 return 0;
463 /* We cannot copy from an extended (40 bit) register to a standard
464 (32 bit) register because we only set the condition codes for
465 extended registers. */
466 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
467 return 0;
468 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
469 return 0;
470 return 1;
471 }
472
473 /* The TI C3x C compiler register argument runtime model uses 6 registers,
474 AR2, R2, R3, RC, RS, RE.
475
476 The first two floating point arguments (float, double, long double)
477 that are found scanning from left to right are assigned to R2 and R3.
478
479 The remaining integer (char, short, int, long) or pointer arguments
480 are assigned to the remaining registers in the order AR2, R2, R3,
481 RC, RS, RE when scanning left to right, except for the last named
482 argument prior to an ellipsis denoting variable number of
483 arguments. We don't have to worry about the latter condition since
484 function.c treats the last named argument as anonymous (unnamed).
485
486 All arguments that cannot be passed in registers are pushed onto
487 the stack in reverse order (right to left). GCC handles that for us.
488
489 c4x_init_cumulative_args() is called at the start, so we can parse
490 the args to see how many floating point arguments and how many
491 integer (or pointer) arguments there are. c4x_function_arg() is
492 then called (sometimes repeatedly) for each argument (parsed left
493 to right) to obtain the register to pass the argument in, or zero
494 if the argument is to be passed on the stack. Once the compiler is
495 happy, c4x_function_arg_advance() is called.
496
497 Don't use R0 to pass arguments in, we use 0 to indicate a stack
498 argument. */
499
500 static const int c4x_int_reglist[3][6] =
501 {
502 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
503 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
504 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
505 };
506
507 static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
508
509
510 /* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
511 function whose data type is FNTYPE.
512 For a library call, FNTYPE is 0. */
513
514 void
c4x_init_cumulative_args(CUMULATIVE_ARGS * cum,tree fntype,rtx libname)515 c4x_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname)
516 {
517 tree param, next_param;
518
519 cum->floats = cum->ints = 0;
520 cum->init = 0;
521 cum->var = 0;
522 cum->args = 0;
523
524 if (TARGET_DEBUG)
525 {
526 fprintf (stderr, "\nc4x_init_cumulative_args (");
527 if (fntype)
528 {
529 tree ret_type = TREE_TYPE (fntype);
530
531 fprintf (stderr, "fntype code = %s, ret code = %s",
532 tree_code_name[(int) TREE_CODE (fntype)],
533 tree_code_name[(int) TREE_CODE (ret_type)]);
534 }
535 else
536 fprintf (stderr, "no fntype");
537
538 if (libname)
539 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
540 }
541
542 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
543
544 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
545 param; param = next_param)
546 {
547 tree type;
548
549 next_param = TREE_CHAIN (param);
550
551 type = TREE_VALUE (param);
552 if (type && type != void_type_node)
553 {
554 enum machine_mode mode;
555
556 /* If the last arg doesn't have void type then we have
557 variable arguments. */
558 if (! next_param)
559 cum->var = 1;
560
561 if ((mode = TYPE_MODE (type)))
562 {
563 if (! MUST_PASS_IN_STACK (mode, type))
564 {
565 /* Look for float, double, or long double argument. */
566 if (mode == QFmode || mode == HFmode)
567 cum->floats++;
568 /* Look for integer, enumeral, boolean, char, or pointer
569 argument. */
570 else if (mode == QImode || mode == Pmode)
571 cum->ints++;
572 }
573 }
574 cum->args++;
575 }
576 }
577
578 if (TARGET_DEBUG)
579 fprintf (stderr, "%s%s, args = %d)\n",
580 cum->prototype ? ", prototype" : "",
581 cum->var ? ", variable args" : "",
582 cum->args);
583 }
584
585
586 /* Update the data in CUM to advance over an argument
587 of mode MODE and data type TYPE.
588 (TYPE is null for libcalls where that information may not be available.) */
589
590 void
c4x_function_arg_advance(CUMULATIVE_ARGS * cum,enum machine_mode mode,tree type,int named)591 c4x_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
592 tree type, int named)
593 {
594 if (TARGET_DEBUG)
595 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
596 GET_MODE_NAME (mode), named);
597 if (! TARGET_MEMPARM
598 && named
599 && type
600 && ! MUST_PASS_IN_STACK (mode, type))
601 {
602 /* Look for float, double, or long double argument. */
603 if (mode == QFmode || mode == HFmode)
604 cum->floats++;
605 /* Look for integer, enumeral, boolean, char, or pointer argument. */
606 else if (mode == QImode || mode == Pmode)
607 cum->ints++;
608 }
609 else if (! TARGET_MEMPARM && ! type)
610 {
611 /* Handle libcall arguments. */
612 if (mode == QFmode || mode == HFmode)
613 cum->floats++;
614 else if (mode == QImode || mode == Pmode)
615 cum->ints++;
616 }
617 return;
618 }
619
620
621 /* Define where to put the arguments to a function. Value is zero to
622 push the argument on the stack, or a hard register in which to
623 store the argument.
624
625 MODE is the argument's machine mode.
626 TYPE is the data type of the argument (as a tree).
627 This is null for libcalls where that information may
628 not be available.
629 CUM is a variable of type CUMULATIVE_ARGS which gives info about
630 the preceding args and about the function being called.
631 NAMED is nonzero if this argument is a named parameter
632 (otherwise it is an extra parameter matching an ellipsis). */
633
634 struct rtx_def *
c4x_function_arg(CUMULATIVE_ARGS * cum,enum machine_mode mode,tree type,int named)635 c4x_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
636 tree type, int named)
637 {
638 int reg = 0; /* Default to passing argument on stack. */
639
640 if (! cum->init)
641 {
642 /* We can handle at most 2 floats in R2, R3. */
643 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
644
645 /* We can handle at most 6 integers minus number of floats passed
646 in registers. */
647 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
648 6 - cum->maxfloats : cum->ints;
649
650 /* If there is no prototype, assume all the arguments are integers. */
651 if (! cum->prototype)
652 cum->maxints = 6;
653
654 cum->ints = cum->floats = 0;
655 cum->init = 1;
656 }
657
658 /* This marks the last argument. We don't need to pass this through
659 to the call insn. */
660 if (type == void_type_node)
661 return 0;
662
663 if (! TARGET_MEMPARM
664 && named
665 && type
666 && ! MUST_PASS_IN_STACK (mode, type))
667 {
668 /* Look for float, double, or long double argument. */
669 if (mode == QFmode || mode == HFmode)
670 {
671 if (cum->floats < cum->maxfloats)
672 reg = c4x_fp_reglist[cum->floats];
673 }
674 /* Look for integer, enumeral, boolean, char, or pointer argument. */
675 else if (mode == QImode || mode == Pmode)
676 {
677 if (cum->ints < cum->maxints)
678 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
679 }
680 }
681 else if (! TARGET_MEMPARM && ! type)
682 {
683 /* We could use a different argument calling model for libcalls,
684 since we're only calling functions in libgcc. Thus we could
685 pass arguments for long longs in registers rather than on the
686 stack. In the meantime, use the odd TI format. We make the
687 assumption that we won't have more than two floating point
688 args, six integer args, and that all the arguments are of the
689 same mode. */
690 if (mode == QFmode || mode == HFmode)
691 reg = c4x_fp_reglist[cum->floats];
692 else if (mode == QImode || mode == Pmode)
693 reg = c4x_int_reglist[0][cum->ints];
694 }
695
696 if (TARGET_DEBUG)
697 {
698 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
699 GET_MODE_NAME (mode), named);
700 if (reg)
701 fprintf (stderr, ", reg=%s", reg_names[reg]);
702 else
703 fprintf (stderr, ", stack");
704 fprintf (stderr, ")\n");
705 }
706 if (reg)
707 return gen_rtx_REG (mode, reg);
708 else
709 return NULL_RTX;
710 }
711
712 /* C[34]x arguments grow in weird ways (downwards) that the standard
713 varargs stuff can't handle.. */
714 rtx
c4x_va_arg(tree valist,tree type)715 c4x_va_arg (tree valist, tree type)
716 {
717 tree t;
718
719 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
720 build_int_2 (int_size_in_bytes (type), 0));
721 TREE_SIDE_EFFECTS (t) = 1;
722
723 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
724 }
725
726
727 static int
c4x_isr_reg_used_p(unsigned int regno)728 c4x_isr_reg_used_p (unsigned int regno)
729 {
730 /* Don't save/restore FP or ST, we handle them separately. */
731 if (regno == FRAME_POINTER_REGNUM
732 || IS_ST_REGNO (regno))
733 return 0;
734
735 /* We could be a little smarter abut saving/restoring DP.
736 We'll only save if for the big memory model or if
737 we're paranoid. ;-) */
738 if (IS_DP_REGNO (regno))
739 return ! TARGET_SMALL || TARGET_PARANOID;
740
741 /* Only save/restore regs in leaf function that are used. */
742 if (c4x_leaf_function)
743 return regs_ever_live[regno] && fixed_regs[regno] == 0;
744
745 /* Only save/restore regs that are used by the ISR and regs
746 that are likely to be used by functions the ISR calls
747 if they are not fixed. */
748 return IS_EXT_REGNO (regno)
749 || ((regs_ever_live[regno] || call_used_regs[regno])
750 && fixed_regs[regno] == 0);
751 }
752
753
754 static int
c4x_leaf_function_p(void)755 c4x_leaf_function_p (void)
756 {
757 /* A leaf function makes no calls, so we only need
758 to save/restore the registers we actually use.
759 For the global variable leaf_function to be set, we need
760 to define LEAF_REGISTERS and all that it entails.
761 Let's check ourselves... */
762
763 if (lookup_attribute ("leaf_pretend",
764 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
765 return 1;
766
767 /* Use the leaf_pretend attribute at your own risk. This is a hack
768 to speed up ISRs that call a function infrequently where the
769 overhead of saving and restoring the additional registers is not
770 warranted. You must save and restore the additional registers
771 required by the called function. Caveat emptor. Here's enough
772 rope... */
773
774 if (leaf_function_p ())
775 return 1;
776
777 return 0;
778 }
779
780
781 static int
c4x_naked_function_p(void)782 c4x_naked_function_p (void)
783 {
784 tree type;
785
786 type = TREE_TYPE (current_function_decl);
787 return lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL;
788 }
789
790
791 int
c4x_interrupt_function_p(void)792 c4x_interrupt_function_p (void)
793 {
794 const char *cfun_name;
795 if (lookup_attribute ("interrupt",
796 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
797 return 1;
798
799 /* Look for TI style c_intnn. */
800 cfun_name = current_function_name ();
801 return cfun_name[0] == 'c'
802 && cfun_name[1] == '_'
803 && cfun_name[2] == 'i'
804 && cfun_name[3] == 'n'
805 && cfun_name[4] == 't'
806 && ISDIGIT (cfun_name[5])
807 && ISDIGIT (cfun_name[6]);
808 }
809
810 void
c4x_expand_prologue(void)811 c4x_expand_prologue (void)
812 {
813 unsigned int regno;
814 int size = get_frame_size ();
815 rtx insn;
816
817 /* In functions where ar3 is not used but frame pointers are still
818 specified, frame pointers are not adjusted (if >= -O2) and this
819 is used so it won't needlessly push the frame pointer. */
820 int dont_push_ar3;
821
822 /* For __naked__ function don't build a prologue. */
823 if (c4x_naked_function_p ())
824 {
825 return;
826 }
827
828 /* For __interrupt__ function build specific prologue. */
829 if (c4x_interrupt_function_p ())
830 {
831 c4x_leaf_function = c4x_leaf_function_p ();
832
833 insn = emit_insn (gen_push_st ());
834 RTX_FRAME_RELATED_P (insn) = 1;
835 if (size)
836 {
837 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
838 RTX_FRAME_RELATED_P (insn) = 1;
839 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
840 gen_rtx_REG (QImode, SP_REGNO)));
841 RTX_FRAME_RELATED_P (insn) = 1;
842 /* We require that an ISR uses fewer than 32768 words of
843 local variables, otherwise we have to go to lots of
844 effort to save a register, load it with the desired size,
845 adjust the stack pointer, and then restore the modified
846 register. Frankly, I think it is a poor ISR that
847 requires more than 32767 words of local temporary
848 storage! */
849 if (size > 32767)
850 error ("ISR %s requires %d words of local vars, max is 32767",
851 current_function_name (), size);
852
853 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
854 gen_rtx_REG (QImode, SP_REGNO),
855 GEN_INT (size)));
856 RTX_FRAME_RELATED_P (insn) = 1;
857 }
858 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
859 {
860 if (c4x_isr_reg_used_p (regno))
861 {
862 if (regno == DP_REGNO)
863 {
864 insn = emit_insn (gen_push_dp ());
865 RTX_FRAME_RELATED_P (insn) = 1;
866 }
867 else
868 {
869 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
870 RTX_FRAME_RELATED_P (insn) = 1;
871 if (IS_EXT_REGNO (regno))
872 {
873 insn = emit_insn (gen_pushqf
874 (gen_rtx_REG (QFmode, regno)));
875 RTX_FRAME_RELATED_P (insn) = 1;
876 }
877 }
878 }
879 }
880 /* We need to clear the repeat mode flag if the ISR is
881 going to use a RPTB instruction or uses the RC, RS, or RE
882 registers. */
883 if (regs_ever_live[RC_REGNO]
884 || regs_ever_live[RS_REGNO]
885 || regs_ever_live[RE_REGNO])
886 {
887 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
888 RTX_FRAME_RELATED_P (insn) = 1;
889 }
890
891 /* Reload DP reg if we are paranoid about some turkey
892 violating small memory model rules. */
893 if (TARGET_SMALL && TARGET_PARANOID)
894 {
895 insn = emit_insn (gen_set_ldp_prologue
896 (gen_rtx_REG (QImode, DP_REGNO),
897 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
898 RTX_FRAME_RELATED_P (insn) = 1;
899 }
900 }
901 else
902 {
903 if (frame_pointer_needed)
904 {
905 if ((size != 0)
906 || (current_function_args_size != 0)
907 || (optimize < 2))
908 {
909 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
910 RTX_FRAME_RELATED_P (insn) = 1;
911 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
912 gen_rtx_REG (QImode, SP_REGNO)));
913 RTX_FRAME_RELATED_P (insn) = 1;
914 dont_push_ar3 = 1;
915 }
916 else
917 {
918 /* Since ar3 is not used, we don't need to push it. */
919 dont_push_ar3 = 1;
920 }
921 }
922 else
923 {
924 /* If we use ar3, we need to push it. */
925 dont_push_ar3 = 0;
926 if ((size != 0) || (current_function_args_size != 0))
927 {
928 /* If we are omitting the frame pointer, we still have
929 to make space for it so the offsets are correct
930 unless we don't use anything on the stack at all. */
931 size += 1;
932 }
933 }
934
935 if (size > 32767)
936 {
937 /* Local vars are too big, it will take multiple operations
938 to increment SP. */
939 if (TARGET_C3X)
940 {
941 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
942 GEN_INT(size >> 16)));
943 RTX_FRAME_RELATED_P (insn) = 1;
944 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
945 gen_rtx_REG (QImode, R1_REGNO),
946 GEN_INT(-16)));
947 RTX_FRAME_RELATED_P (insn) = 1;
948 }
949 else
950 {
951 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
952 GEN_INT(size & ~0xffff)));
953 RTX_FRAME_RELATED_P (insn) = 1;
954 }
955 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
956 gen_rtx_REG (QImode, R1_REGNO),
957 GEN_INT(size & 0xffff)));
958 RTX_FRAME_RELATED_P (insn) = 1;
959 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
960 gen_rtx_REG (QImode, SP_REGNO),
961 gen_rtx_REG (QImode, R1_REGNO)));
962 RTX_FRAME_RELATED_P (insn) = 1;
963 }
964 else if (size != 0)
965 {
966 /* Local vars take up less than 32767 words, so we can directly
967 add the number. */
968 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
969 gen_rtx_REG (QImode, SP_REGNO),
970 GEN_INT (size)));
971 RTX_FRAME_RELATED_P (insn) = 1;
972 }
973
974 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
975 {
976 if (regs_ever_live[regno] && ! call_used_regs[regno])
977 {
978 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
979 {
980 if (TARGET_PRESERVE_FLOAT)
981 {
982 insn = emit_insn (gen_pushqi
983 (gen_rtx_REG (QImode, regno)));
984 RTX_FRAME_RELATED_P (insn) = 1;
985 }
986 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
987 RTX_FRAME_RELATED_P (insn) = 1;
988 }
989 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
990 {
991 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
992 RTX_FRAME_RELATED_P (insn) = 1;
993 }
994 }
995 }
996 }
997 }
998
999
1000 void
c4x_expand_epilogue(void)1001 c4x_expand_epilogue(void)
1002 {
1003 int regno;
1004 int jump = 0;
1005 int dont_pop_ar3;
1006 rtx insn;
1007 int size = get_frame_size ();
1008
1009 /* For __naked__ function build no epilogue. */
1010 if (c4x_naked_function_p ())
1011 {
1012 insn = emit_jump_insn (gen_return_from_epilogue ());
1013 RTX_FRAME_RELATED_P (insn) = 1;
1014 return;
1015 }
1016
1017 /* For __interrupt__ function build specific epilogue. */
1018 if (c4x_interrupt_function_p ())
1019 {
1020 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1021 {
1022 if (! c4x_isr_reg_used_p (regno))
1023 continue;
1024 if (regno == DP_REGNO)
1025 {
1026 insn = emit_insn (gen_pop_dp ());
1027 RTX_FRAME_RELATED_P (insn) = 1;
1028 }
1029 else
1030 {
1031 /* We have to use unspec because the compiler will delete insns
1032 that are not call-saved. */
1033 if (IS_EXT_REGNO (regno))
1034 {
1035 insn = emit_insn (gen_popqf_unspec
1036 (gen_rtx_REG (QFmode, regno)));
1037 RTX_FRAME_RELATED_P (insn) = 1;
1038 }
1039 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1040 RTX_FRAME_RELATED_P (insn) = 1;
1041 }
1042 }
1043 if (size)
1044 {
1045 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1046 gen_rtx_REG (QImode, SP_REGNO),
1047 GEN_INT(size)));
1048 RTX_FRAME_RELATED_P (insn) = 1;
1049 insn = emit_insn (gen_popqi
1050 (gen_rtx_REG (QImode, AR3_REGNO)));
1051 RTX_FRAME_RELATED_P (insn) = 1;
1052 }
1053 insn = emit_insn (gen_pop_st ());
1054 RTX_FRAME_RELATED_P (insn) = 1;
1055 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1056 RTX_FRAME_RELATED_P (insn) = 1;
1057 }
1058 else
1059 {
1060 if (frame_pointer_needed)
1061 {
1062 if ((size != 0)
1063 || (current_function_args_size != 0)
1064 || (optimize < 2))
1065 {
1066 insn = emit_insn
1067 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1068 gen_rtx_MEM (QImode,
1069 gen_rtx_PLUS
1070 (QImode, gen_rtx_REG (QImode,
1071 AR3_REGNO),
1072 GEN_INT(-1)))));
1073 RTX_FRAME_RELATED_P (insn) = 1;
1074
1075 /* We already have the return value and the fp,
1076 so we need to add those to the stack. */
1077 size += 2;
1078 jump = 1;
1079 dont_pop_ar3 = 1;
1080 }
1081 else
1082 {
1083 /* Since ar3 is not used for anything, we don't need to
1084 pop it. */
1085 dont_pop_ar3 = 1;
1086 }
1087 }
1088 else
1089 {
1090 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
1091 if (size || current_function_args_size)
1092 {
1093 /* If we are omitting the frame pointer, we still have
1094 to make space for it so the offsets are correct
1095 unless we don't use anything on the stack at all. */
1096 size += 1;
1097 }
1098 }
1099
1100 /* Now restore the saved registers, putting in the delayed branch
1101 where required. */
1102 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1103 {
1104 if (regs_ever_live[regno] && ! call_used_regs[regno])
1105 {
1106 if (regno == AR3_REGNO && dont_pop_ar3)
1107 continue;
1108
1109 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
1110 {
1111 insn = emit_insn (gen_popqf_unspec
1112 (gen_rtx_REG (QFmode, regno)));
1113 RTX_FRAME_RELATED_P (insn) = 1;
1114 if (TARGET_PRESERVE_FLOAT)
1115 {
1116 insn = emit_insn (gen_popqi_unspec
1117 (gen_rtx_REG (QImode, regno)));
1118 RTX_FRAME_RELATED_P (insn) = 1;
1119 }
1120 }
1121 else
1122 {
1123 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1124 RTX_FRAME_RELATED_P (insn) = 1;
1125 }
1126 }
1127 }
1128
1129 if (frame_pointer_needed)
1130 {
1131 if ((size != 0)
1132 || (current_function_args_size != 0)
1133 || (optimize < 2))
1134 {
1135 /* Restore the old FP. */
1136 insn = emit_insn
1137 (gen_movqi
1138 (gen_rtx_REG (QImode, AR3_REGNO),
1139 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1140
1141 RTX_FRAME_RELATED_P (insn) = 1;
1142 }
1143 }
1144
1145 if (size > 32767)
1146 {
1147 /* Local vars are too big, it will take multiple operations
1148 to decrement SP. */
1149 if (TARGET_C3X)
1150 {
1151 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1152 GEN_INT(size >> 16)));
1153 RTX_FRAME_RELATED_P (insn) = 1;
1154 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1155 gen_rtx_REG (QImode, R3_REGNO),
1156 GEN_INT(-16)));
1157 RTX_FRAME_RELATED_P (insn) = 1;
1158 }
1159 else
1160 {
1161 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1162 GEN_INT(size & ~0xffff)));
1163 RTX_FRAME_RELATED_P (insn) = 1;
1164 }
1165 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1166 gen_rtx_REG (QImode, R3_REGNO),
1167 GEN_INT(size & 0xffff)));
1168 RTX_FRAME_RELATED_P (insn) = 1;
1169 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1170 gen_rtx_REG (QImode, SP_REGNO),
1171 gen_rtx_REG (QImode, R3_REGNO)));
1172 RTX_FRAME_RELATED_P (insn) = 1;
1173 }
1174 else if (size != 0)
1175 {
1176 /* Local vars take up less than 32768 words, so we can directly
1177 subtract the number. */
1178 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1179 gen_rtx_REG (QImode, SP_REGNO),
1180 GEN_INT(size)));
1181 RTX_FRAME_RELATED_P (insn) = 1;
1182 }
1183
1184 if (jump)
1185 {
1186 insn = emit_jump_insn (gen_return_indirect_internal
1187 (gen_rtx_REG (QImode, R2_REGNO)));
1188 RTX_FRAME_RELATED_P (insn) = 1;
1189 }
1190 else
1191 {
1192 insn = emit_jump_insn (gen_return_from_epilogue ());
1193 RTX_FRAME_RELATED_P (insn) = 1;
1194 }
1195 }
1196 }
1197
1198
1199 int
c4x_null_epilogue_p(void)1200 c4x_null_epilogue_p (void)
1201 {
1202 int regno;
1203
1204 if (reload_completed
1205 && ! c4x_naked_function_p ()
1206 && ! c4x_interrupt_function_p ()
1207 && ! current_function_calls_alloca
1208 && ! current_function_args_size
1209 && ! (optimize < 2)
1210 && ! get_frame_size ())
1211 {
1212 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1213 if (regs_ever_live[regno] && ! call_used_regs[regno]
1214 && (regno != AR3_REGNO))
1215 return 1;
1216 return 0;
1217 }
1218 return 1;
1219 }
1220
1221
1222 int
c4x_emit_move_sequence(rtx * operands,enum machine_mode mode)1223 c4x_emit_move_sequence (rtx *operands, enum machine_mode mode)
1224 {
1225 rtx op0 = operands[0];
1226 rtx op1 = operands[1];
1227
1228 if (! reload_in_progress
1229 && ! REG_P (op0)
1230 && ! REG_P (op1)
1231 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1232 op1 = force_reg (mode, op1);
1233
1234 if (GET_CODE (op1) == LO_SUM
1235 && GET_MODE (op1) == Pmode
1236 && dp_reg_operand (XEXP (op1, 0), mode))
1237 {
1238 /* expand_increment will sometimes create a LO_SUM immediate
1239 address. */
1240 op1 = XEXP (op1, 1);
1241 }
1242 else if (symbolic_address_operand (op1, mode))
1243 {
1244 if (TARGET_LOAD_ADDRESS)
1245 {
1246 /* Alias analysis seems to do a better job if we force
1247 constant addresses to memory after reload. */
1248 emit_insn (gen_load_immed_address (op0, op1));
1249 return 1;
1250 }
1251 else
1252 {
1253 /* Stick symbol or label address into the constant pool. */
1254 op1 = force_const_mem (Pmode, op1);
1255 }
1256 }
1257 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1258 {
1259 /* We could be a lot smarter about loading some of these
1260 constants... */
1261 op1 = force_const_mem (mode, op1);
1262 }
1263
1264 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1265 and emit associated (HIGH (SYMREF)) if large memory model.
1266 c4x_legitimize_address could be used to do this,
1267 perhaps by calling validize_address. */
1268 if (TARGET_EXPOSE_LDP
1269 && ! (reload_in_progress || reload_completed)
1270 && GET_CODE (op1) == MEM
1271 && symbolic_address_operand (XEXP (op1, 0), Pmode))
1272 {
1273 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1274 if (! TARGET_SMALL)
1275 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1276 op1 = change_address (op1, mode,
1277 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1278 }
1279
1280 if (TARGET_EXPOSE_LDP
1281 && ! (reload_in_progress || reload_completed)
1282 && GET_CODE (op0) == MEM
1283 && symbolic_address_operand (XEXP (op0, 0), Pmode))
1284 {
1285 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1286 if (! TARGET_SMALL)
1287 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1288 op0 = change_address (op0, mode,
1289 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1290 }
1291
1292 if (GET_CODE (op0) == SUBREG
1293 && mixed_subreg_operand (op0, mode))
1294 {
1295 /* We should only generate these mixed mode patterns
1296 during RTL generation. If we need do it later on
1297 then we'll have to emit patterns that won't clobber CC. */
1298 if (reload_in_progress || reload_completed)
1299 abort ();
1300 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1301 op0 = SUBREG_REG (op0);
1302 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1303 {
1304 op0 = copy_rtx (op0);
1305 PUT_MODE (op0, QImode);
1306 }
1307 else
1308 abort ();
1309
1310 if (mode == QFmode)
1311 emit_insn (gen_storeqf_int_clobber (op0, op1));
1312 else
1313 abort ();
1314 return 1;
1315 }
1316
1317 if (GET_CODE (op1) == SUBREG
1318 && mixed_subreg_operand (op1, mode))
1319 {
1320 /* We should only generate these mixed mode patterns
1321 during RTL generation. If we need do it later on
1322 then we'll have to emit patterns that won't clobber CC. */
1323 if (reload_in_progress || reload_completed)
1324 abort ();
1325 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1326 op1 = SUBREG_REG (op1);
1327 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1328 {
1329 op1 = copy_rtx (op1);
1330 PUT_MODE (op1, QImode);
1331 }
1332 else
1333 abort ();
1334
1335 if (mode == QFmode)
1336 emit_insn (gen_loadqf_int_clobber (op0, op1));
1337 else
1338 abort ();
1339 return 1;
1340 }
1341
1342 if (mode == QImode
1343 && reg_operand (op0, mode)
1344 && const_int_operand (op1, mode)
1345 && ! IS_INT16_CONST (INTVAL (op1))
1346 && ! IS_HIGH_CONST (INTVAL (op1)))
1347 {
1348 emit_insn (gen_loadqi_big_constant (op0, op1));
1349 return 1;
1350 }
1351
1352 if (mode == HImode
1353 && reg_operand (op0, mode)
1354 && const_int_operand (op1, mode))
1355 {
1356 emit_insn (gen_loadhi_big_constant (op0, op1));
1357 return 1;
1358 }
1359
1360 /* Adjust operands in case we have modified them. */
1361 operands[0] = op0;
1362 operands[1] = op1;
1363
1364 /* Emit normal pattern. */
1365 return 0;
1366 }
1367
1368
1369 void
c4x_emit_libcall(rtx libcall,enum rtx_code code,enum machine_mode dmode,enum machine_mode smode,int noperands,rtx * operands)1370 c4x_emit_libcall (rtx libcall, enum rtx_code code,
1371 enum machine_mode dmode, enum machine_mode smode,
1372 int noperands, rtx *operands)
1373 {
1374 rtx ret;
1375 rtx insns;
1376 rtx equiv;
1377
1378 start_sequence ();
1379 switch (noperands)
1380 {
1381 case 2:
1382 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1383 operands[1], smode);
1384 equiv = gen_rtx (code, dmode, operands[1]);
1385 break;
1386
1387 case 3:
1388 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1389 operands[1], smode, operands[2], smode);
1390 equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1391 break;
1392
1393 default:
1394 abort ();
1395 }
1396
1397 insns = get_insns ();
1398 end_sequence ();
1399 emit_libcall_block (insns, operands[0], ret, equiv);
1400 }
1401
1402
1403 void
c4x_emit_libcall3(rtx libcall,enum rtx_code code,enum machine_mode mode,rtx * operands)1404 c4x_emit_libcall3 (rtx libcall, enum rtx_code code,
1405 enum machine_mode mode, rtx *operands)
1406 {
1407 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
1408 }
1409
1410
1411 void
c4x_emit_libcall_mulhi(rtx libcall,enum rtx_code code,enum machine_mode mode,rtx * operands)1412 c4x_emit_libcall_mulhi (rtx libcall, enum rtx_code code,
1413 enum machine_mode mode, rtx *operands)
1414 {
1415 rtx ret;
1416 rtx insns;
1417 rtx equiv;
1418
1419 start_sequence ();
1420 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1421 operands[1], mode, operands[2], mode);
1422 equiv = gen_rtx_TRUNCATE (mode,
1423 gen_rtx_LSHIFTRT (HImode,
1424 gen_rtx_MULT (HImode,
1425 gen_rtx (code, HImode, operands[1]),
1426 gen_rtx (code, HImode, operands[2])),
1427 GEN_INT (32)));
1428 insns = get_insns ();
1429 end_sequence ();
1430 emit_libcall_block (insns, operands[0], ret, equiv);
1431 }
1432
1433
1434 int
c4x_check_legit_addr(enum machine_mode mode,rtx addr,int strict)1435 c4x_check_legit_addr (enum machine_mode mode, rtx addr, int strict)
1436 {
1437 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1438 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1439 rtx disp = NULL_RTX; /* Displacement. */
1440 enum rtx_code code;
1441
1442 code = GET_CODE (addr);
1443 switch (code)
1444 {
1445 /* Register indirect with auto increment/decrement. We don't
1446 allow SP here---push_operand should recognize an operand
1447 being pushed on the stack. */
1448
1449 case PRE_DEC:
1450 case PRE_INC:
1451 case POST_DEC:
1452 if (mode != QImode && mode != QFmode)
1453 return 0;
1454
1455 case POST_INC:
1456 base = XEXP (addr, 0);
1457 if (! REG_P (base))
1458 return 0;
1459 break;
1460
1461 case PRE_MODIFY:
1462 case POST_MODIFY:
1463 {
1464 rtx op0 = XEXP (addr, 0);
1465 rtx op1 = XEXP (addr, 1);
1466
1467 if (mode != QImode && mode != QFmode)
1468 return 0;
1469
1470 if (! REG_P (op0)
1471 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1472 return 0;
1473 base = XEXP (op1, 0);
1474 if (base != op0)
1475 return 0;
1476 if (REG_P (XEXP (op1, 1)))
1477 indx = XEXP (op1, 1);
1478 else
1479 disp = XEXP (op1, 1);
1480 }
1481 break;
1482
1483 /* Register indirect. */
1484 case REG:
1485 base = addr;
1486 break;
1487
1488 /* Register indirect with displacement or index. */
1489 case PLUS:
1490 {
1491 rtx op0 = XEXP (addr, 0);
1492 rtx op1 = XEXP (addr, 1);
1493 enum rtx_code code0 = GET_CODE (op0);
1494
1495 switch (code0)
1496 {
1497 case REG:
1498 if (REG_P (op1))
1499 {
1500 base = op0; /* Base + index. */
1501 indx = op1;
1502 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
1503 {
1504 base = op1;
1505 indx = op0;
1506 }
1507 }
1508 else
1509 {
1510 base = op0; /* Base + displacement. */
1511 disp = op1;
1512 }
1513 break;
1514
1515 default:
1516 return 0;
1517 }
1518 }
1519 break;
1520
1521 /* Direct addressing with DP register. */
1522 case LO_SUM:
1523 {
1524 rtx op0 = XEXP (addr, 0);
1525 rtx op1 = XEXP (addr, 1);
1526
1527 /* HImode and HFmode direct memory references aren't truly
1528 offsettable (consider case at end of data page). We
1529 probably get better code by loading a pointer and using an
1530 indirect memory reference. */
1531 if (mode == HImode || mode == HFmode)
1532 return 0;
1533
1534 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1535 return 0;
1536
1537 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1538 return 1;
1539
1540 if (GET_CODE (op1) == CONST)
1541 return 1;
1542 return 0;
1543 }
1544 break;
1545
1546 /* Direct addressing with some work for the assembler... */
1547 case CONST:
1548 /* Direct addressing. */
1549 case LABEL_REF:
1550 case SYMBOL_REF:
1551 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1552 return 1;
1553 /* These need to be converted to a LO_SUM (...).
1554 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
1555 return 0;
1556
1557 /* Do not allow direct memory access to absolute addresses.
1558 This is more pain than it's worth, especially for the
1559 small memory model where we can't guarantee that
1560 this address is within the data page---we don't want
1561 to modify the DP register in the small memory model,
1562 even temporarily, since an interrupt can sneak in.... */
1563 case CONST_INT:
1564 return 0;
1565
1566 /* Indirect indirect addressing. */
1567 case MEM:
1568 return 0;
1569
1570 case CONST_DOUBLE:
1571 fatal_insn ("using CONST_DOUBLE for address", addr);
1572
1573 default:
1574 return 0;
1575 }
1576
1577 /* Validate the base register. */
1578 if (base)
1579 {
1580 /* Check that the address is offsettable for HImode and HFmode. */
1581 if (indx && (mode == HImode || mode == HFmode))
1582 return 0;
1583
1584 /* Handle DP based stuff. */
1585 if (REGNO (base) == DP_REGNO)
1586 return 1;
1587 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
1588 return 0;
1589 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
1590 return 0;
1591 }
1592
1593 /* Now validate the index register. */
1594 if (indx)
1595 {
1596 if (GET_CODE (indx) != REG)
1597 return 0;
1598 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
1599 return 0;
1600 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
1601 return 0;
1602 }
1603
1604 /* Validate displacement. */
1605 if (disp)
1606 {
1607 if (GET_CODE (disp) != CONST_INT)
1608 return 0;
1609 if (mode == HImode || mode == HFmode)
1610 {
1611 /* The offset displacement must be legitimate. */
1612 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
1613 return 0;
1614 }
1615 else
1616 {
1617 if (! IS_DISP8_CONST (INTVAL (disp)))
1618 return 0;
1619 }
1620 /* Can't add an index with a disp. */
1621 if (indx)
1622 return 0;
1623 }
1624 return 1;
1625 }
1626
1627
1628 rtx
c4x_legitimize_address(rtx orig ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED)1629 c4x_legitimize_address (rtx orig ATTRIBUTE_UNUSED,
1630 enum machine_mode mode ATTRIBUTE_UNUSED)
1631 {
1632 if (GET_CODE (orig) == SYMBOL_REF
1633 || GET_CODE (orig) == LABEL_REF)
1634 {
1635 if (mode == HImode || mode == HFmode)
1636 {
1637 /* We need to force the address into
1638 a register so that it is offsettable. */
1639 rtx addr_reg = gen_reg_rtx (Pmode);
1640 emit_move_insn (addr_reg, orig);
1641 return addr_reg;
1642 }
1643 else
1644 {
1645 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1646
1647 if (! TARGET_SMALL)
1648 emit_insn (gen_set_ldp (dp_reg, orig));
1649
1650 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1651 }
1652 }
1653
1654 return NULL_RTX;
1655 }
1656
1657
1658 /* Provide the costs of an addressing mode that contains ADDR.
1659 If ADDR is not a valid address, its cost is irrelevant.
1660 This is used in cse and loop optimization to determine
1661 if it is worthwhile storing a common address into a register.
1662 Unfortunately, the C4x address cost depends on other operands. */
1663
1664 static int
c4x_address_cost(rtx addr)1665 c4x_address_cost (rtx addr)
1666 {
1667 switch (GET_CODE (addr))
1668 {
1669 case REG:
1670 return 1;
1671
1672 case POST_INC:
1673 case POST_DEC:
1674 case PRE_INC:
1675 case PRE_DEC:
1676 return 1;
1677
1678 /* These shouldn't be directly generated. */
1679 case SYMBOL_REF:
1680 case LABEL_REF:
1681 case CONST:
1682 return 10;
1683
1684 case LO_SUM:
1685 {
1686 rtx op1 = XEXP (addr, 1);
1687
1688 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1689 return TARGET_SMALL ? 3 : 4;
1690
1691 if (GET_CODE (op1) == CONST)
1692 {
1693 rtx offset = const0_rtx;
1694
1695 op1 = eliminate_constant_term (op1, &offset);
1696
1697 /* ??? These costs need rethinking... */
1698 if (GET_CODE (op1) == LABEL_REF)
1699 return 3;
1700
1701 if (GET_CODE (op1) != SYMBOL_REF)
1702 return 4;
1703
1704 if (INTVAL (offset) == 0)
1705 return 3;
1706
1707 return 4;
1708 }
1709 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1710 }
1711 break;
1712
1713 case PLUS:
1714 {
1715 register rtx op0 = XEXP (addr, 0);
1716 register rtx op1 = XEXP (addr, 1);
1717
1718 if (GET_CODE (op0) != REG)
1719 break;
1720
1721 switch (GET_CODE (op1))
1722 {
1723 default:
1724 break;
1725
1726 case REG:
1727 /* This cost for REG+REG must be greater than the cost
1728 for REG if we want autoincrement addressing modes. */
1729 return 2;
1730
1731 case CONST_INT:
1732 /* The following tries to improve GIV combination
1733 in strength reduce but appears not to help. */
1734 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1735 return 1;
1736
1737 if (IS_DISP1_CONST (INTVAL (op1)))
1738 return 1;
1739
1740 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
1741 return 2;
1742
1743 return 3;
1744 }
1745 }
1746 default:
1747 break;
1748 }
1749
1750 return 4;
1751 }
1752
1753
1754 rtx
c4x_gen_compare_reg(enum rtx_code code,rtx x,rtx y)1755 c4x_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
1756 {
1757 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1758 rtx cc_reg;
1759
1760 if (mode == CC_NOOVmode
1761 && (code == LE || code == GE || code == LT || code == GT))
1762 return NULL_RTX;
1763
1764 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1765 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1766 gen_rtx_COMPARE (mode, x, y)));
1767 return cc_reg;
1768 }
1769
1770 char *
c4x_output_cbranch(const char * form,rtx seq)1771 c4x_output_cbranch (const char *form, rtx seq)
1772 {
1773 int delayed = 0;
1774 int annultrue = 0;
1775 int annulfalse = 0;
1776 rtx delay;
1777 char *cp;
1778 static char str[100];
1779
1780 if (final_sequence)
1781 {
1782 delay = XVECEXP (final_sequence, 0, 1);
1783 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1784 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
1785 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
1786 }
1787 strcpy (str, form);
1788 cp = &str [strlen (str)];
1789 if (delayed)
1790 {
1791 *cp++ = '%';
1792 *cp++ = '#';
1793 }
1794 if (annultrue)
1795 {
1796 *cp++ = 'a';
1797 *cp++ = 't';
1798 }
1799 if (annulfalse)
1800 {
1801 *cp++ = 'a';
1802 *cp++ = 'f';
1803 }
1804 *cp++ = '\t';
1805 *cp++ = '%';
1806 *cp++ = 'l';
1807 *cp++ = '1';
1808 *cp = 0;
1809 return str;
1810 }
1811
1812 void
c4x_print_operand(FILE * file,rtx op,int letter)1813 c4x_print_operand (FILE *file, rtx op, int letter)
1814 {
1815 rtx op1;
1816 enum rtx_code code;
1817
1818 switch (letter)
1819 {
1820 case '#': /* Delayed. */
1821 if (final_sequence)
1822 fprintf (file, "d");
1823 return;
1824 }
1825
1826 code = GET_CODE (op);
1827 switch (letter)
1828 {
1829 case 'A': /* Direct address. */
1830 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
1831 fprintf (file, "@");
1832 break;
1833
1834 case 'H': /* Sethi. */
1835 output_addr_const (file, op);
1836 return;
1837
1838 case 'I': /* Reversed condition. */
1839 code = reverse_condition (code);
1840 break;
1841
1842 case 'L': /* Log 2 of constant. */
1843 if (code != CONST_INT)
1844 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1845 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1846 return;
1847
1848 case 'N': /* Ones complement of small constant. */
1849 if (code != CONST_INT)
1850 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1851 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (op));
1852 return;
1853
1854 case 'K': /* Generate ldp(k) if direct address. */
1855 if (! TARGET_SMALL
1856 && code == MEM
1857 && GET_CODE (XEXP (op, 0)) == LO_SUM
1858 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1859 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
1860 {
1861 op1 = XEXP (XEXP (op, 0), 1);
1862 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1863 {
1864 fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1865 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1866 fprintf (file, "\n");
1867 }
1868 }
1869 return;
1870
1871 case 'M': /* Generate ldp(k) if direct address. */
1872 if (! TARGET_SMALL /* Only used in asm statements. */
1873 && code == MEM
1874 && (GET_CODE (XEXP (op, 0)) == CONST
1875 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1876 {
1877 fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
1878 output_address (XEXP (op, 0));
1879 fprintf (file, "\n\t");
1880 }
1881 return;
1882
1883 case 'O': /* Offset address. */
1884 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1885 break;
1886 else if (code == MEM)
1887 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
1888 else if (code == REG)
1889 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1890 else
1891 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1892 return;
1893
1894 case 'C': /* Call. */
1895 break;
1896
1897 case 'U': /* Call/callu. */
1898 if (code != SYMBOL_REF)
1899 fprintf (file, "u");
1900 return;
1901
1902 default:
1903 break;
1904 }
1905
1906 switch (code)
1907 {
1908 case REG:
1909 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1910 && ! TARGET_TI)
1911 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1912 else
1913 fprintf (file, "%s", reg_names[REGNO (op)]);
1914 break;
1915
1916 case MEM:
1917 output_address (XEXP (op, 0));
1918 break;
1919
1920 case CONST_DOUBLE:
1921 {
1922 char str[64];
1923
1924 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
1925 sizeof (str), 0, 1);
1926 fprintf (file, "%s", str);
1927 }
1928 break;
1929
1930 case CONST_INT:
1931 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op));
1932 break;
1933
1934 case NE:
1935 fprintf (file, "ne");
1936 break;
1937
1938 case EQ:
1939 fprintf (file, "eq");
1940 break;
1941
1942 case GE:
1943 fprintf (file, "ge");
1944 break;
1945
1946 case GT:
1947 fprintf (file, "gt");
1948 break;
1949
1950 case LE:
1951 fprintf (file, "le");
1952 break;
1953
1954 case LT:
1955 fprintf (file, "lt");
1956 break;
1957
1958 case GEU:
1959 fprintf (file, "hs");
1960 break;
1961
1962 case GTU:
1963 fprintf (file, "hi");
1964 break;
1965
1966 case LEU:
1967 fprintf (file, "ls");
1968 break;
1969
1970 case LTU:
1971 fprintf (file, "lo");
1972 break;
1973
1974 case SYMBOL_REF:
1975 output_addr_const (file, op);
1976 break;
1977
1978 case CONST:
1979 output_addr_const (file, XEXP (op, 0));
1980 break;
1981
1982 case CODE_LABEL:
1983 break;
1984
1985 default:
1986 fatal_insn ("c4x_print_operand: Bad operand case", op);
1987 break;
1988 }
1989 }
1990
1991
1992 void
c4x_print_operand_address(FILE * file,rtx addr)1993 c4x_print_operand_address (FILE *file, rtx addr)
1994 {
1995 switch (GET_CODE (addr))
1996 {
1997 case REG:
1998 fprintf (file, "*%s", reg_names[REGNO (addr)]);
1999 break;
2000
2001 case PRE_DEC:
2002 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2003 break;
2004
2005 case POST_INC:
2006 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2007 break;
2008
2009 case POST_MODIFY:
2010 {
2011 rtx op0 = XEXP (XEXP (addr, 1), 0);
2012 rtx op1 = XEXP (XEXP (addr, 1), 1);
2013
2014 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2015 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2016 reg_names[REGNO (op1)]);
2017 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2018 fprintf (file, "*%s++(" HOST_WIDE_INT_PRINT_DEC ")",
2019 reg_names[REGNO (op0)], INTVAL (op1));
2020 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2021 fprintf (file, "*%s--(" HOST_WIDE_INT_PRINT_DEC ")",
2022 reg_names[REGNO (op0)], -INTVAL (op1));
2023 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2024 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2025 reg_names[REGNO (op1)]);
2026 else
2027 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2028 }
2029 break;
2030
2031 case PRE_MODIFY:
2032 {
2033 rtx op0 = XEXP (XEXP (addr, 1), 0);
2034 rtx op1 = XEXP (XEXP (addr, 1), 1);
2035
2036 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2037 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2038 reg_names[REGNO (op1)]);
2039 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2040 fprintf (file, "*++%s(" HOST_WIDE_INT_PRINT_DEC ")",
2041 reg_names[REGNO (op0)], INTVAL (op1));
2042 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2043 fprintf (file, "*--%s(" HOST_WIDE_INT_PRINT_DEC ")",
2044 reg_names[REGNO (op0)], -INTVAL (op1));
2045 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2046 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2047 reg_names[REGNO (op1)]);
2048 else
2049 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2050 }
2051 break;
2052
2053 case PRE_INC:
2054 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2055 break;
2056
2057 case POST_DEC:
2058 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2059 break;
2060
2061 case PLUS: /* Indirect with displacement. */
2062 {
2063 rtx op0 = XEXP (addr, 0);
2064 rtx op1 = XEXP (addr, 1);
2065
2066 if (REG_P (op0))
2067 {
2068 if (REG_P (op1))
2069 {
2070 if (IS_INDEX_REG (op0))
2071 {
2072 fprintf (file, "*+%s(%s)",
2073 reg_names[REGNO (op1)],
2074 reg_names[REGNO (op0)]); /* Index + base. */
2075 }
2076 else
2077 {
2078 fprintf (file, "*+%s(%s)",
2079 reg_names[REGNO (op0)],
2080 reg_names[REGNO (op1)]); /* Base + index. */
2081 }
2082 }
2083 else if (INTVAL (op1) < 0)
2084 {
2085 fprintf (file, "*-%s(" HOST_WIDE_INT_PRINT_DEC ")",
2086 reg_names[REGNO (op0)],
2087 -INTVAL (op1)); /* Base - displacement. */
2088 }
2089 else
2090 {
2091 fprintf (file, "*+%s(" HOST_WIDE_INT_PRINT_DEC ")",
2092 reg_names[REGNO (op0)],
2093 INTVAL (op1)); /* Base + displacement. */
2094 }
2095 }
2096 else
2097 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2098 }
2099 break;
2100
2101 case LO_SUM:
2102 {
2103 rtx op0 = XEXP (addr, 0);
2104 rtx op1 = XEXP (addr, 1);
2105
2106 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2107 c4x_print_operand_address (file, op1);
2108 else
2109 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2110 }
2111 break;
2112
2113 case CONST:
2114 case SYMBOL_REF:
2115 case LABEL_REF:
2116 fprintf (file, "@");
2117 output_addr_const (file, addr);
2118 break;
2119
2120 /* We shouldn't access CONST_INT addresses. */
2121 case CONST_INT:
2122
2123 default:
2124 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2125 break;
2126 }
2127 }
2128
2129
2130 /* Return nonzero if the floating point operand will fit
2131 in the immediate field. */
2132
2133 static int
c4x_immed_float_p(rtx op)2134 c4x_immed_float_p (rtx op)
2135 {
2136 long convval[2];
2137 int exponent;
2138 REAL_VALUE_TYPE r;
2139
2140 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2141 if (GET_MODE (op) == HFmode)
2142 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2143 else
2144 {
2145 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2146 convval[1] = 0;
2147 }
2148
2149 /* Sign extend exponent. */
2150 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2151 if (exponent == -128)
2152 return 1; /* 0.0 */
2153 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
2154 return 0; /* Precision doesn't fit. */
2155 return (exponent <= 7) /* Positive exp. */
2156 && (exponent >= -7); /* Negative exp. */
2157 }
2158
2159
2160 /* The last instruction in a repeat block cannot be a Bcond, DBcound,
2161 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2162
2163 None of the last four instructions from the bottom of the block can
2164 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2165 BcondAT or RETIcondD.
2166
2167 This routine scans the four previous insns for a jump insn, and if
2168 one is found, returns 1 so that we bung in a nop instruction.
2169 This simple minded strategy will add a nop, when it may not
2170 be required. Say when there is a JUMP_INSN near the end of the
2171 block that doesn't get converted into a delayed branch.
2172
2173 Note that we cannot have a call insn, since we don't generate
2174 repeat loops with calls in them (although I suppose we could, but
2175 there's no benefit.)
2176
2177 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
2178
2179 int
c4x_rptb_nop_p(rtx insn)2180 c4x_rptb_nop_p (rtx insn)
2181 {
2182 rtx start_label;
2183 int i;
2184
2185 /* Extract the start label from the jump pattern (rptb_end). */
2186 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2187
2188 /* If there is a label at the end of the loop we must insert
2189 a NOP. */
2190 do {
2191 insn = previous_insn (insn);
2192 } while (GET_CODE (insn) == NOTE
2193 || GET_CODE (insn) == USE
2194 || GET_CODE (insn) == CLOBBER);
2195 if (GET_CODE (insn) == CODE_LABEL)
2196 return 1;
2197
2198 for (i = 0; i < 4; i++)
2199 {
2200 /* Search back for prev non-note and non-label insn. */
2201 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2202 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
2203 {
2204 if (insn == start_label)
2205 return i == 0;
2206
2207 insn = previous_insn (insn);
2208 };
2209
2210 /* If we have a jump instruction we should insert a NOP. If we
2211 hit repeat block top we should only insert a NOP if the loop
2212 is empty. */
2213 if (GET_CODE (insn) == JUMP_INSN)
2214 return 1;
2215 insn = previous_insn (insn);
2216 }
2217 return 0;
2218 }
2219
2220
2221 /* The C4x looping instruction needs to be emitted at the top of the
2222 loop. Emitting the true RTL for a looping instruction at the top of
2223 the loop can cause problems with flow analysis. So instead, a dummy
2224 doloop insn is emitted at the end of the loop. This routine checks
2225 for the presence of this doloop insn and then searches back to the
2226 top of the loop, where it inserts the true looping insn (provided
2227 there are no instructions in the loop which would cause problems).
2228 Any additional labels can be emitted at this point. In addition, if
2229 the desired loop count register was not allocated, this routine does
2230 nothing.
2231
2232 Before we can create a repeat block looping instruction we have to
2233 verify that there are no jumps outside the loop and no jumps outside
2234 the loop go into this loop. This can happen in the basic blocks reorder
2235 pass. The C4x cpu can not handle this. */
2236
2237 static int
c4x_label_ref_used_p(rtx x,rtx code_label)2238 c4x_label_ref_used_p (rtx x, rtx code_label)
2239 {
2240 enum rtx_code code;
2241 int i, j;
2242 const char *fmt;
2243
2244 if (x == 0)
2245 return 0;
2246
2247 code = GET_CODE (x);
2248 if (code == LABEL_REF)
2249 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2250
2251 fmt = GET_RTX_FORMAT (code);
2252 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2253 {
2254 if (fmt[i] == 'e')
2255 {
2256 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2257 return 1;
2258 }
2259 else if (fmt[i] == 'E')
2260 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2261 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2262 return 1;
2263 }
2264 return 0;
2265 }
2266
2267
2268 static int
c4x_rptb_valid_p(rtx insn,rtx start_label)2269 c4x_rptb_valid_p (rtx insn, rtx start_label)
2270 {
2271 rtx end = insn;
2272 rtx start;
2273 rtx tmp;
2274
2275 /* Find the start label. */
2276 for (; insn; insn = PREV_INSN (insn))
2277 if (insn == start_label)
2278 break;
2279
2280 /* Note found then we can not use a rptb or rpts. The label was
2281 probably moved by the basic block reorder pass. */
2282 if (! insn)
2283 return 0;
2284
2285 start = insn;
2286 /* If any jump jumps inside this block then we must fail. */
2287 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2288 {
2289 if (GET_CODE (insn) == CODE_LABEL)
2290 {
2291 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2292 if (GET_CODE (tmp) == JUMP_INSN
2293 && c4x_label_ref_used_p (tmp, insn))
2294 return 0;
2295 }
2296 }
2297 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2298 {
2299 if (GET_CODE (insn) == CODE_LABEL)
2300 {
2301 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2302 if (GET_CODE (tmp) == JUMP_INSN
2303 && c4x_label_ref_used_p (tmp, insn))
2304 return 0;
2305 }
2306 }
2307 /* If any jump jumps outside this block then we must fail. */
2308 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2309 {
2310 if (GET_CODE (insn) == CODE_LABEL)
2311 {
2312 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2313 if (GET_CODE (tmp) == JUMP_INSN
2314 && c4x_label_ref_used_p (tmp, insn))
2315 return 0;
2316 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2317 if (GET_CODE (tmp) == JUMP_INSN
2318 && c4x_label_ref_used_p (tmp, insn))
2319 return 0;
2320 }
2321 }
2322
2323 /* All checks OK. */
2324 return 1;
2325 }
2326
2327
2328 void
c4x_rptb_insert(rtx insn)2329 c4x_rptb_insert (rtx insn)
2330 {
2331 rtx end_label;
2332 rtx start_label;
2333 rtx new_start_label;
2334 rtx count_reg;
2335
2336 /* If the count register has not been allocated to RC, say if
2337 there is a movstr pattern in the loop, then do not insert a
2338 RPTB instruction. Instead we emit a decrement and branch
2339 at the end of the loop. */
2340 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2341 if (REGNO (count_reg) != RC_REGNO)
2342 return;
2343
2344 /* Extract the start label from the jump pattern (rptb_end). */
2345 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2346
2347 if (! c4x_rptb_valid_p (insn, start_label))
2348 {
2349 /* We can not use the rptb insn. Replace it so reorg can use
2350 the delay slots of the jump insn. */
2351 emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2352 emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2353 emit_insn_before (gen_bge (start_label), insn);
2354 LABEL_NUSES (start_label)++;
2355 delete_insn (insn);
2356 return;
2357 }
2358
2359 end_label = gen_label_rtx ();
2360 LABEL_NUSES (end_label)++;
2361 emit_label_after (end_label, insn);
2362
2363 new_start_label = gen_label_rtx ();
2364 LABEL_NUSES (new_start_label)++;
2365
2366 for (; insn; insn = PREV_INSN (insn))
2367 {
2368 if (insn == start_label)
2369 break;
2370 if (GET_CODE (insn) == JUMP_INSN &&
2371 JUMP_LABEL (insn) == start_label)
2372 redirect_jump (insn, new_start_label, 0);
2373 }
2374 if (! insn)
2375 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2376
2377 emit_label_after (new_start_label, insn);
2378
2379 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
2380 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
2381 else
2382 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2383 if (LABEL_NUSES (start_label) == 0)
2384 delete_insn (start_label);
2385 }
2386
2387
2388 /* We need to use direct addressing for large constants and addresses
2389 that cannot fit within an instruction. We must check for these
2390 after after the final jump optimization pass, since this may
2391 introduce a local_move insn for a SYMBOL_REF. This pass
2392 must come before delayed branch slot filling since it can generate
2393 additional instructions.
2394
2395 This function also fixes up RTPB style loops that didn't get RC
2396 allocated as the loop counter. */
2397
2398 static void
c4x_reorg(void)2399 c4x_reorg (void)
2400 {
2401 rtx insn;
2402
2403 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2404 {
2405 /* Look for insn. */
2406 if (INSN_P (insn))
2407 {
2408 int insn_code_number;
2409 rtx old;
2410
2411 insn_code_number = recog_memoized (insn);
2412
2413 if (insn_code_number < 0)
2414 continue;
2415
2416 /* Insert the RTX for RPTB at the top of the loop
2417 and a label at the end of the loop. */
2418 if (insn_code_number == CODE_FOR_rptb_end)
2419 c4x_rptb_insert(insn);
2420
2421 /* We need to split the insn here. Otherwise the calls to
2422 force_const_mem will not work for load_immed_address. */
2423 old = insn;
2424
2425 /* Don't split the insn if it has been deleted. */
2426 if (! INSN_DELETED_P (old))
2427 insn = try_split (PATTERN(old), old, 1);
2428
2429 /* When not optimizing, the old insn will be still left around
2430 with only the 'deleted' bit set. Transform it into a note
2431 to avoid confusion of subsequent processing. */
2432 if (INSN_DELETED_P (old))
2433 {
2434 PUT_CODE (old, NOTE);
2435 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2436 NOTE_SOURCE_FILE (old) = 0;
2437 }
2438 }
2439 }
2440 }
2441
2442
2443 static int
c4x_a_register(rtx op)2444 c4x_a_register (rtx op)
2445 {
2446 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
2447 }
2448
2449
2450 static int
c4x_x_register(rtx op)2451 c4x_x_register (rtx op)
2452 {
2453 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
2454 }
2455
2456
2457 static int
c4x_immed_int_constant(rtx op)2458 c4x_immed_int_constant (rtx op)
2459 {
2460 if (GET_CODE (op) != CONST_INT)
2461 return 0;
2462
2463 return GET_MODE (op) == VOIDmode
2464 || GET_MODE_CLASS (GET_MODE (op)) == MODE_INT
2465 || GET_MODE_CLASS (GET_MODE (op)) == MODE_PARTIAL_INT;
2466 }
2467
2468
2469 static int
c4x_immed_float_constant(rtx op)2470 c4x_immed_float_constant (rtx op)
2471 {
2472 if (GET_CODE (op) != CONST_DOUBLE)
2473 return 0;
2474
2475 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2476 present this only means that a MEM rtx has been generated. It does
2477 not mean the rtx is really in memory. */
2478
2479 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2480 }
2481
2482
2483 int
c4x_shiftable_constant(rtx op)2484 c4x_shiftable_constant (rtx op)
2485 {
2486 int i;
2487 int mask;
2488 int val = INTVAL (op);
2489
2490 for (i = 0; i < 16; i++)
2491 {
2492 if (val & (1 << i))
2493 break;
2494 }
2495 mask = ((0xffff >> i) << 16) | 0xffff;
2496 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2497 : (val >> i) & mask))
2498 return i;
2499 return -1;
2500 }
2501
2502
2503 int
c4x_H_constant(rtx op)2504 c4x_H_constant (rtx op)
2505 {
2506 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
2507 }
2508
2509
2510 int
c4x_I_constant(rtx op)2511 c4x_I_constant (rtx op)
2512 {
2513 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
2514 }
2515
2516
2517 int
c4x_J_constant(rtx op)2518 c4x_J_constant (rtx op)
2519 {
2520 if (TARGET_C3X)
2521 return 0;
2522 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
2523 }
2524
2525
2526 static int
c4x_K_constant(rtx op)2527 c4x_K_constant (rtx op)
2528 {
2529 if (TARGET_C3X || ! c4x_immed_int_constant (op))
2530 return 0;
2531 return IS_INT5_CONST (INTVAL (op));
2532 }
2533
2534
2535 int
c4x_L_constant(rtx op)2536 c4x_L_constant (rtx op)
2537 {
2538 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
2539 }
2540
2541
2542 static int
c4x_N_constant(rtx op)2543 c4x_N_constant (rtx op)
2544 {
2545 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
2546 }
2547
2548
2549 static int
c4x_O_constant(rtx op)2550 c4x_O_constant (rtx op)
2551 {
2552 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
2553 }
2554
2555
2556 /* The constraints do not have to check the register class,
2557 except when needed to discriminate between the constraints.
2558 The operand has been checked by the predicates to be valid. */
2559
2560 /* ARx + 9-bit signed const or IRn
2561 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2562 We don't include the pre/post inc/dec forms here since
2563 they are handled by the <> constraints. */
2564
2565 int
c4x_Q_constraint(rtx op)2566 c4x_Q_constraint (rtx op)
2567 {
2568 enum machine_mode mode = GET_MODE (op);
2569
2570 if (GET_CODE (op) != MEM)
2571 return 0;
2572 op = XEXP (op, 0);
2573 switch (GET_CODE (op))
2574 {
2575 case REG:
2576 return 1;
2577
2578 case PLUS:
2579 {
2580 rtx op0 = XEXP (op, 0);
2581 rtx op1 = XEXP (op, 1);
2582
2583 if (! REG_P (op0))
2584 return 0;
2585
2586 if (REG_P (op1))
2587 return 1;
2588
2589 if (GET_CODE (op1) != CONST_INT)
2590 return 0;
2591
2592 /* HImode and HFmode must be offsettable. */
2593 if (mode == HImode || mode == HFmode)
2594 return IS_DISP8_OFF_CONST (INTVAL (op1));
2595
2596 return IS_DISP8_CONST (INTVAL (op1));
2597 }
2598 break;
2599
2600 default:
2601 break;
2602 }
2603 return 0;
2604 }
2605
2606
2607 /* ARx + 5-bit unsigned const
2608 *ARx, *+ARx(n) for n < 32. */
2609
2610 int
c4x_R_constraint(rtx op)2611 c4x_R_constraint (rtx op)
2612 {
2613 enum machine_mode mode = GET_MODE (op);
2614
2615 if (TARGET_C3X)
2616 return 0;
2617 if (GET_CODE (op) != MEM)
2618 return 0;
2619 op = XEXP (op, 0);
2620 switch (GET_CODE (op))
2621 {
2622 case REG:
2623 return 1;
2624
2625 case PLUS:
2626 {
2627 rtx op0 = XEXP (op, 0);
2628 rtx op1 = XEXP (op, 1);
2629
2630 if (! REG_P (op0))
2631 return 0;
2632
2633 if (GET_CODE (op1) != CONST_INT)
2634 return 0;
2635
2636 /* HImode and HFmode must be offsettable. */
2637 if (mode == HImode || mode == HFmode)
2638 return IS_UINT5_CONST (INTVAL (op1) + 1);
2639
2640 return IS_UINT5_CONST (INTVAL (op1));
2641 }
2642 break;
2643
2644 default:
2645 break;
2646 }
2647 return 0;
2648 }
2649
2650
2651 static int
c4x_R_indirect(rtx op)2652 c4x_R_indirect (rtx op)
2653 {
2654 enum machine_mode mode = GET_MODE (op);
2655
2656 if (TARGET_C3X || GET_CODE (op) != MEM)
2657 return 0;
2658
2659 op = XEXP (op, 0);
2660 switch (GET_CODE (op))
2661 {
2662 case REG:
2663 return IS_ADDR_OR_PSEUDO_REG (op);
2664
2665 case PLUS:
2666 {
2667 rtx op0 = XEXP (op, 0);
2668 rtx op1 = XEXP (op, 1);
2669
2670 /* HImode and HFmode must be offsettable. */
2671 if (mode == HImode || mode == HFmode)
2672 return IS_ADDR_OR_PSEUDO_REG (op0)
2673 && GET_CODE (op1) == CONST_INT
2674 && IS_UINT5_CONST (INTVAL (op1) + 1);
2675
2676 return REG_P (op0)
2677 && IS_ADDR_OR_PSEUDO_REG (op0)
2678 && GET_CODE (op1) == CONST_INT
2679 && IS_UINT5_CONST (INTVAL (op1));
2680 }
2681 break;
2682
2683 default:
2684 break;
2685 }
2686 return 0;
2687 }
2688
2689
2690 /* ARx + 1-bit unsigned const or IRn
2691 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2692 We don't include the pre/post inc/dec forms here since
2693 they are handled by the <> constraints. */
2694
2695 int
c4x_S_constraint(rtx op)2696 c4x_S_constraint (rtx op)
2697 {
2698 enum machine_mode mode = GET_MODE (op);
2699 if (GET_CODE (op) != MEM)
2700 return 0;
2701 op = XEXP (op, 0);
2702 switch (GET_CODE (op))
2703 {
2704 case REG:
2705 return 1;
2706
2707 case PRE_MODIFY:
2708 case POST_MODIFY:
2709 {
2710 rtx op0 = XEXP (op, 0);
2711 rtx op1 = XEXP (op, 1);
2712
2713 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2714 || (op0 != XEXP (op1, 0)))
2715 return 0;
2716
2717 op0 = XEXP (op1, 0);
2718 op1 = XEXP (op1, 1);
2719 return REG_P (op0) && REG_P (op1);
2720 /* Pre or post_modify with a displacement of 0 or 1
2721 should not be generated. */
2722 }
2723 break;
2724
2725 case PLUS:
2726 {
2727 rtx op0 = XEXP (op, 0);
2728 rtx op1 = XEXP (op, 1);
2729
2730 if (!REG_P (op0))
2731 return 0;
2732
2733 if (REG_P (op1))
2734 return 1;
2735
2736 if (GET_CODE (op1) != CONST_INT)
2737 return 0;
2738
2739 /* HImode and HFmode must be offsettable. */
2740 if (mode == HImode || mode == HFmode)
2741 return IS_DISP1_OFF_CONST (INTVAL (op1));
2742
2743 return IS_DISP1_CONST (INTVAL (op1));
2744 }
2745 break;
2746
2747 default:
2748 break;
2749 }
2750 return 0;
2751 }
2752
2753
2754 static int
c4x_S_indirect(rtx op)2755 c4x_S_indirect (rtx op)
2756 {
2757 enum machine_mode mode = GET_MODE (op);
2758 if (GET_CODE (op) != MEM)
2759 return 0;
2760
2761 op = XEXP (op, 0);
2762 switch (GET_CODE (op))
2763 {
2764 case PRE_DEC:
2765 case POST_DEC:
2766 if (mode != QImode && mode != QFmode)
2767 return 0;
2768 case PRE_INC:
2769 case POST_INC:
2770 op = XEXP (op, 0);
2771
2772 case REG:
2773 return IS_ADDR_OR_PSEUDO_REG (op);
2774
2775 case PRE_MODIFY:
2776 case POST_MODIFY:
2777 {
2778 rtx op0 = XEXP (op, 0);
2779 rtx op1 = XEXP (op, 1);
2780
2781 if (mode != QImode && mode != QFmode)
2782 return 0;
2783
2784 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2785 || (op0 != XEXP (op1, 0)))
2786 return 0;
2787
2788 op0 = XEXP (op1, 0);
2789 op1 = XEXP (op1, 1);
2790 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2791 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
2792 /* Pre or post_modify with a displacement of 0 or 1
2793 should not be generated. */
2794 }
2795
2796 case PLUS:
2797 {
2798 rtx op0 = XEXP (op, 0);
2799 rtx op1 = XEXP (op, 1);
2800
2801 if (REG_P (op0))
2802 {
2803 /* HImode and HFmode must be offsettable. */
2804 if (mode == HImode || mode == HFmode)
2805 return IS_ADDR_OR_PSEUDO_REG (op0)
2806 && GET_CODE (op1) == CONST_INT
2807 && IS_DISP1_OFF_CONST (INTVAL (op1));
2808
2809 if (REG_P (op1))
2810 return (IS_INDEX_OR_PSEUDO_REG (op1)
2811 && IS_ADDR_OR_PSEUDO_REG (op0))
2812 || (IS_ADDR_OR_PSEUDO_REG (op1)
2813 && IS_INDEX_OR_PSEUDO_REG (op0));
2814
2815 return IS_ADDR_OR_PSEUDO_REG (op0)
2816 && GET_CODE (op1) == CONST_INT
2817 && IS_DISP1_CONST (INTVAL (op1));
2818 }
2819 }
2820 break;
2821
2822 default:
2823 break;
2824 }
2825 return 0;
2826 }
2827
2828
2829 /* Direct memory operand. */
2830
2831 int
c4x_T_constraint(rtx op)2832 c4x_T_constraint (rtx op)
2833 {
2834 if (GET_CODE (op) != MEM)
2835 return 0;
2836 op = XEXP (op, 0);
2837
2838 if (GET_CODE (op) != LO_SUM)
2839 {
2840 /* Allow call operands. */
2841 return GET_CODE (op) == SYMBOL_REF
2842 && GET_MODE (op) == Pmode
2843 && SYMBOL_REF_FUNCTION_P (op);
2844 }
2845
2846 /* HImode and HFmode are not offsettable. */
2847 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2848 return 0;
2849
2850 if ((GET_CODE (XEXP (op, 0)) == REG)
2851 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2852 return c4x_U_constraint (XEXP (op, 1));
2853
2854 return 0;
2855 }
2856
2857
2858 /* Symbolic operand. */
2859
2860 int
c4x_U_constraint(rtx op)2861 c4x_U_constraint (rtx op)
2862 {
2863 /* Don't allow direct addressing to an arbitrary constant. */
2864 return GET_CODE (op) == CONST
2865 || GET_CODE (op) == SYMBOL_REF
2866 || GET_CODE (op) == LABEL_REF;
2867 }
2868
2869
2870 int
c4x_autoinc_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2871 c4x_autoinc_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2872 {
2873 if (GET_CODE (op) == MEM)
2874 {
2875 enum rtx_code code = GET_CODE (XEXP (op, 0));
2876
2877 if (code == PRE_INC
2878 || code == PRE_DEC
2879 || code == POST_INC
2880 || code == POST_DEC
2881 || code == PRE_MODIFY
2882 || code == POST_MODIFY
2883 )
2884 return 1;
2885 }
2886 return 0;
2887 }
2888
2889
2890 /* Match any operand. */
2891
2892 int
any_operand(register rtx op ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED)2893 any_operand (register rtx op ATTRIBUTE_UNUSED,
2894 enum machine_mode mode ATTRIBUTE_UNUSED)
2895 {
2896 return 1;
2897 }
2898
2899
2900 /* Nonzero if OP is a floating point value with value 0.0. */
2901
2902 int
fp_zero_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2903 fp_zero_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2904 {
2905 REAL_VALUE_TYPE r;
2906
2907 if (GET_CODE (op) != CONST_DOUBLE)
2908 return 0;
2909 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2910 return REAL_VALUES_EQUAL (r, dconst0);
2911 }
2912
2913
2914 int
const_operand(register rtx op,register enum machine_mode mode)2915 const_operand (register rtx op, register enum machine_mode mode)
2916 {
2917 switch (mode)
2918 {
2919 case QFmode:
2920 case HFmode:
2921 if (GET_CODE (op) != CONST_DOUBLE
2922 || GET_MODE (op) != mode
2923 || GET_MODE_CLASS (mode) != MODE_FLOAT)
2924 return 0;
2925
2926 return c4x_immed_float_p (op);
2927
2928 #if Pmode != QImode
2929 case Pmode:
2930 #endif
2931 case QImode:
2932 if (GET_CODE (op) == CONSTANT_P_RTX)
2933 return 1;
2934
2935 if (GET_CODE (op) != CONST_INT
2936 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
2937 || GET_MODE_CLASS (mode) != MODE_INT)
2938 return 0;
2939
2940 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
2941
2942 case HImode:
2943 return 0;
2944
2945 default:
2946 return 0;
2947 }
2948 }
2949
2950
2951 int
stik_const_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2952 stik_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2953 {
2954 return c4x_K_constant (op);
2955 }
2956
2957
2958 int
not_const_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2959 not_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2960 {
2961 return c4x_N_constant (op);
2962 }
2963
2964
2965 int
reg_operand(rtx op,enum machine_mode mode)2966 reg_operand (rtx op, enum machine_mode mode)
2967 {
2968 if (GET_CODE (op) == SUBREG
2969 && GET_MODE (op) == QFmode)
2970 return 0;
2971 return register_operand (op, mode);
2972 }
2973
2974
2975 int
mixed_subreg_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2976 mixed_subreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2977 {
2978 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
2979 int and a long double. */
2980 if (GET_CODE (op) == SUBREG
2981 && (GET_MODE (op) == QFmode)
2982 && (GET_MODE (SUBREG_REG (op)) == QImode
2983 || GET_MODE (SUBREG_REG (op)) == HImode))
2984 return 1;
2985 return 0;
2986 }
2987
2988
2989 int
reg_imm_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2990 reg_imm_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2991 {
2992 if (REG_P (op) || CONSTANT_P (op))
2993 return 1;
2994 return 0;
2995 }
2996
2997
2998 int
not_modify_reg(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2999 not_modify_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3000 {
3001 if (REG_P (op) || CONSTANT_P (op))
3002 return 1;
3003 if (GET_CODE (op) != MEM)
3004 return 0;
3005 op = XEXP (op, 0);
3006 switch (GET_CODE (op))
3007 {
3008 case REG:
3009 return 1;
3010
3011 case PLUS:
3012 {
3013 rtx op0 = XEXP (op, 0);
3014 rtx op1 = XEXP (op, 1);
3015
3016 if (! REG_P (op0))
3017 return 0;
3018
3019 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3020 return 1;
3021 }
3022
3023 case LO_SUM:
3024 {
3025 rtx op0 = XEXP (op, 0);
3026
3027 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3028 return 1;
3029 }
3030 break;
3031
3032 case CONST:
3033 case SYMBOL_REF:
3034 case LABEL_REF:
3035 return 1;
3036
3037 default:
3038 break;
3039 }
3040 return 0;
3041 }
3042
3043
3044 int
not_rc_reg(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)3045 not_rc_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3046 {
3047 if (REG_P (op) && REGNO (op) == RC_REGNO)
3048 return 0;
3049 return 1;
3050 }
3051
3052
3053 /* Extended precision register R0-R1. */
3054
3055 int
r0r1_reg_operand(rtx op,enum machine_mode mode)3056 r0r1_reg_operand (rtx op, enum machine_mode mode)
3057 {
3058 if (! reg_operand (op, mode))
3059 return 0;
3060 if (GET_CODE (op) == SUBREG)
3061 op = SUBREG_REG (op);
3062 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
3063 }
3064
3065
3066 /* Extended precision register R2-R3. */
3067
3068 int
r2r3_reg_operand(rtx op,enum machine_mode mode)3069 r2r3_reg_operand (rtx op, enum machine_mode mode)
3070 {
3071 if (! reg_operand (op, mode))
3072 return 0;
3073 if (GET_CODE (op) == SUBREG)
3074 op = SUBREG_REG (op);
3075 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
3076 }
3077
3078
3079 /* Low extended precision register R0-R7. */
3080
3081 int
ext_low_reg_operand(rtx op,enum machine_mode mode)3082 ext_low_reg_operand (rtx op, enum machine_mode mode)
3083 {
3084 if (! reg_operand (op, mode))
3085 return 0;
3086 if (GET_CODE (op) == SUBREG)
3087 op = SUBREG_REG (op);
3088 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
3089 }
3090
3091
3092 /* Extended precision register. */
3093
3094 int
ext_reg_operand(rtx op,enum machine_mode mode)3095 ext_reg_operand (rtx op, enum machine_mode mode)
3096 {
3097 if (! reg_operand (op, mode))
3098 return 0;
3099 if (GET_CODE (op) == SUBREG)
3100 op = SUBREG_REG (op);
3101 if (! REG_P (op))
3102 return 0;
3103 return IS_EXT_OR_PSEUDO_REG (op);
3104 }
3105
3106
3107 /* Standard precision register. */
3108
3109 int
std_reg_operand(rtx op,enum machine_mode mode)3110 std_reg_operand (rtx op, enum machine_mode mode)
3111 {
3112 if (! reg_operand (op, mode))
3113 return 0;
3114 if (GET_CODE (op) == SUBREG)
3115 op = SUBREG_REG (op);
3116 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
3117 }
3118
3119 /* Standard precision or normal register. */
3120
3121 int
std_or_reg_operand(rtx op,enum machine_mode mode)3122 std_or_reg_operand (rtx op, enum machine_mode mode)
3123 {
3124 if (reload_in_progress)
3125 return std_reg_operand (op, mode);
3126 return reg_operand (op, mode);
3127 }
3128
3129 /* Address register. */
3130
3131 int
addr_reg_operand(rtx op,enum machine_mode mode)3132 addr_reg_operand (rtx op, enum machine_mode mode)
3133 {
3134 if (! reg_operand (op, mode))
3135 return 0;
3136 return c4x_a_register (op);
3137 }
3138
3139
3140 /* Index register. */
3141
3142 int
index_reg_operand(rtx op,enum machine_mode mode)3143 index_reg_operand (rtx op, enum machine_mode mode)
3144 {
3145 if (! reg_operand (op, mode))
3146 return 0;
3147 if (GET_CODE (op) == SUBREG)
3148 op = SUBREG_REG (op);
3149 return c4x_x_register (op);
3150 }
3151
3152
3153 /* DP register. */
3154
3155 int
dp_reg_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)3156 dp_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3157 {
3158 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
3159 }
3160
3161
3162 /* SP register. */
3163
3164 int
sp_reg_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)3165 sp_reg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3166 {
3167 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
3168 }
3169
3170
3171 /* ST register. */
3172
3173 int
st_reg_operand(register rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)3174 st_reg_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3175 {
3176 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
3177 }
3178
3179
3180 /* RC register. */
3181
3182 int
rc_reg_operand(register rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)3183 rc_reg_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3184 {
3185 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
3186 }
3187
3188
3189 int
call_address_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)3190 call_address_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3191 {
3192 return (REG_P (op) || symbolic_address_operand (op, mode));
3193 }
3194
3195
3196 /* Symbolic address operand. */
3197
3198 int
symbolic_address_operand(register rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)3199 symbolic_address_operand (register rtx op,
3200 enum machine_mode mode ATTRIBUTE_UNUSED)
3201 {
3202 switch (GET_CODE (op))
3203 {
3204 case CONST:
3205 case SYMBOL_REF:
3206 case LABEL_REF:
3207 return 1;
3208 default:
3209 return 0;
3210 }
3211 }
3212
3213
3214 /* Check dst operand of a move instruction. */
3215
3216 int
dst_operand(rtx op,enum machine_mode mode)3217 dst_operand (rtx op, enum machine_mode mode)
3218 {
3219 if (GET_CODE (op) == SUBREG
3220 && mixed_subreg_operand (op, mode))
3221 return 0;
3222
3223 if (REG_P (op))
3224 return reg_operand (op, mode);
3225
3226 return nonimmediate_operand (op, mode);
3227 }
3228
3229
3230 /* Check src operand of two operand arithmetic instructions. */
3231
3232 int
src_operand(rtx op,enum machine_mode mode)3233 src_operand (rtx op, enum machine_mode mode)
3234 {
3235 if (GET_CODE (op) == SUBREG
3236 && mixed_subreg_operand (op, mode))
3237 return 0;
3238
3239 if (REG_P (op))
3240 return reg_operand (op, mode);
3241
3242 if (mode == VOIDmode)
3243 mode = GET_MODE (op);
3244
3245 if (GET_CODE (op) == CONST_INT)
3246 return (mode == QImode || mode == Pmode || mode == HImode)
3247 && c4x_I_constant (op);
3248
3249 /* We don't like CONST_DOUBLE integers. */
3250 if (GET_CODE (op) == CONST_DOUBLE)
3251 return c4x_H_constant (op);
3252
3253 /* Disallow symbolic addresses. Only the predicate
3254 symbolic_address_operand will match these. */
3255 if (GET_CODE (op) == SYMBOL_REF
3256 || GET_CODE (op) == LABEL_REF
3257 || GET_CODE (op) == CONST)
3258 return 0;
3259
3260 /* If TARGET_LOAD_DIRECT_MEMS is nonzero, disallow direct memory
3261 access to symbolic addresses. These operands will get forced
3262 into a register and the movqi expander will generate a
3263 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is nonzero. */
3264 if (GET_CODE (op) == MEM
3265 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3266 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3267 || GET_CODE (XEXP (op, 0)) == CONST)))
3268 return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
3269
3270 return general_operand (op, mode);
3271 }
3272
3273
3274 int
src_hi_operand(rtx op,enum machine_mode mode)3275 src_hi_operand (rtx op, enum machine_mode mode)
3276 {
3277 if (c4x_O_constant (op))
3278 return 1;
3279 return src_operand (op, mode);
3280 }
3281
3282
3283 /* Check src operand of two operand logical instructions. */
3284
3285 int
lsrc_operand(rtx op,enum machine_mode mode)3286 lsrc_operand (rtx op, enum machine_mode mode)
3287 {
3288 if (mode == VOIDmode)
3289 mode = GET_MODE (op);
3290
3291 if (mode != QImode && mode != Pmode)
3292 fatal_insn ("mode not QImode", op);
3293
3294 if (GET_CODE (op) == CONST_INT)
3295 return c4x_L_constant (op) || c4x_J_constant (op);
3296
3297 return src_operand (op, mode);
3298 }
3299
3300
3301 /* Check src operand of two operand tricky instructions. */
3302
3303 int
tsrc_operand(rtx op,enum machine_mode mode)3304 tsrc_operand (rtx op, enum machine_mode mode)
3305 {
3306 if (mode == VOIDmode)
3307 mode = GET_MODE (op);
3308
3309 if (mode != QImode && mode != Pmode)
3310 fatal_insn ("mode not QImode", op);
3311
3312 if (GET_CODE (op) == CONST_INT)
3313 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3314
3315 return src_operand (op, mode);
3316 }
3317
3318
3319 /* Check src operand of two operand non immedidate instructions. */
3320
3321 int
nonimmediate_src_operand(rtx op,enum machine_mode mode)3322 nonimmediate_src_operand (rtx op, enum machine_mode mode)
3323 {
3324 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3325 return 0;
3326
3327 return src_operand (op, mode);
3328 }
3329
3330
3331 /* Check logical src operand of two operand non immedidate instructions. */
3332
3333 int
nonimmediate_lsrc_operand(rtx op,enum machine_mode mode)3334 nonimmediate_lsrc_operand (rtx op, enum machine_mode mode)
3335 {
3336 if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
3337 return 0;
3338
3339 return lsrc_operand (op, mode);
3340 }
3341
3342
3343 int
reg_or_const_operand(rtx op,enum machine_mode mode)3344 reg_or_const_operand (rtx op, enum machine_mode mode)
3345 {
3346 return reg_operand (op, mode) || const_operand (op, mode);
3347 }
3348
3349
3350 /* Check for indirect operands allowable in parallel instruction. */
3351
3352 int
par_ind_operand(rtx op,enum machine_mode mode)3353 par_ind_operand (rtx op, enum machine_mode mode)
3354 {
3355 if (mode != VOIDmode && mode != GET_MODE (op))
3356 return 0;
3357
3358 return c4x_S_indirect (op);
3359 }
3360
3361
3362 /* Check for operands allowable in parallel instruction. */
3363
3364 int
parallel_operand(rtx op,enum machine_mode mode)3365 parallel_operand (rtx op, enum machine_mode mode)
3366 {
3367 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3368 }
3369
3370
3371 static void
c4x_S_address_parse(rtx op,int * base,int * incdec,int * index,int * disp)3372 c4x_S_address_parse (rtx op, int *base, int *incdec, int *index, int *disp)
3373 {
3374 *base = 0;
3375 *incdec = 0;
3376 *index = 0;
3377 *disp = 0;
3378
3379 if (GET_CODE (op) != MEM)
3380 fatal_insn ("invalid indirect memory address", op);
3381
3382 op = XEXP (op, 0);
3383 switch (GET_CODE (op))
3384 {
3385 case PRE_DEC:
3386 *base = REGNO (XEXP (op, 0));
3387 *incdec = 1;
3388 *disp = -1;
3389 return;
3390
3391 case POST_DEC:
3392 *base = REGNO (XEXP (op, 0));
3393 *incdec = 1;
3394 *disp = 0;
3395 return;
3396
3397 case PRE_INC:
3398 *base = REGNO (XEXP (op, 0));
3399 *incdec = 1;
3400 *disp = 1;
3401 return;
3402
3403 case POST_INC:
3404 *base = REGNO (XEXP (op, 0));
3405 *incdec = 1;
3406 *disp = 0;
3407 return;
3408
3409 case POST_MODIFY:
3410 *base = REGNO (XEXP (op, 0));
3411 if (REG_P (XEXP (XEXP (op, 1), 1)))
3412 {
3413 *index = REGNO (XEXP (XEXP (op, 1), 1));
3414 *disp = 0; /* ??? */
3415 }
3416 else
3417 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3418 *incdec = 1;
3419 return;
3420
3421 case PRE_MODIFY:
3422 *base = REGNO (XEXP (op, 0));
3423 if (REG_P (XEXP (XEXP (op, 1), 1)))
3424 {
3425 *index = REGNO (XEXP (XEXP (op, 1), 1));
3426 *disp = 1; /* ??? */
3427 }
3428 else
3429 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3430 *incdec = 1;
3431
3432 return;
3433
3434 case REG:
3435 *base = REGNO (op);
3436 return;
3437
3438 case PLUS:
3439 {
3440 rtx op0 = XEXP (op, 0);
3441 rtx op1 = XEXP (op, 1);
3442
3443 if (c4x_a_register (op0))
3444 {
3445 if (c4x_x_register (op1))
3446 {
3447 *base = REGNO (op0);
3448 *index = REGNO (op1);
3449 return;
3450 }
3451 else if ((GET_CODE (op1) == CONST_INT
3452 && IS_DISP1_CONST (INTVAL (op1))))
3453 {
3454 *base = REGNO (op0);
3455 *disp = INTVAL (op1);
3456 return;
3457 }
3458 }
3459 else if (c4x_x_register (op0) && c4x_a_register (op1))
3460 {
3461 *base = REGNO (op1);
3462 *index = REGNO (op0);
3463 return;
3464 }
3465 }
3466 /* Fallthrough. */
3467
3468 default:
3469 fatal_insn ("invalid indirect (S) memory address", op);
3470 }
3471 }
3472
3473
3474 int
c4x_address_conflict(rtx op0,rtx op1,int store0,int store1)3475 c4x_address_conflict (rtx op0, rtx op1, int store0, int store1)
3476 {
3477 int base0;
3478 int base1;
3479 int incdec0;
3480 int incdec1;
3481 int index0;
3482 int index1;
3483 int disp0;
3484 int disp1;
3485
3486 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3487 return 1;
3488
3489 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3490 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3491
3492 if (store0 && store1)
3493 {
3494 /* If we have two stores in parallel to the same address, then
3495 the C4x only executes one of the stores. This is unlikely to
3496 cause problems except when writing to a hardware device such
3497 as a FIFO since the second write will be lost. The user
3498 should flag the hardware location as being volatile so that
3499 we don't do this optimization. While it is unlikely that we
3500 have an aliased address if both locations are not marked
3501 volatile, it is probably safer to flag a potential conflict
3502 if either location is volatile. */
3503 if (! flag_argument_noalias)
3504 {
3505 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3506 return 1;
3507 }
3508 }
3509
3510 /* If have a parallel load and a store to the same address, the load
3511 is performed first, so there is no conflict. Similarly, there is
3512 no conflict if have parallel loads from the same address. */
3513
3514 /* Cannot use auto increment or auto decrement twice for same
3515 base register. */
3516 if (base0 == base1 && incdec0 && incdec0)
3517 return 1;
3518
3519 /* It might be too confusing for GCC if we have use a base register
3520 with a side effect and a memory reference using the same register
3521 in parallel. */
3522 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
3523 return 1;
3524
3525 /* We can not optimize the case where op1 and op2 refer to the same
3526 address. */
3527 if (base0 == base1 && disp0 == disp1 && index0 == index1)
3528 return 1;
3529
3530 /* No conflict. */
3531 return 0;
3532 }
3533
3534
3535 /* Check for while loop inside a decrement and branch loop. */
3536
3537 int
c4x_label_conflict(rtx insn,rtx jump,rtx db)3538 c4x_label_conflict (rtx insn, rtx jump, rtx db)
3539 {
3540 while (insn)
3541 {
3542 if (GET_CODE (insn) == CODE_LABEL)
3543 {
3544 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3545 return 1;
3546 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3547 return 0;
3548 }
3549 insn = PREV_INSN (insn);
3550 }
3551 return 1;
3552 }
3553
3554
3555 /* Validate combination of operands for parallel load/store instructions. */
3556
3557 int
valid_parallel_load_store(rtx * operands,enum machine_mode mode ATTRIBUTE_UNUSED)3558 valid_parallel_load_store (rtx *operands,
3559 enum machine_mode mode ATTRIBUTE_UNUSED)
3560 {
3561 rtx op0 = operands[0];
3562 rtx op1 = operands[1];
3563 rtx op2 = operands[2];
3564 rtx op3 = operands[3];
3565
3566 if (GET_CODE (op0) == SUBREG)
3567 op0 = SUBREG_REG (op0);
3568 if (GET_CODE (op1) == SUBREG)
3569 op1 = SUBREG_REG (op1);
3570 if (GET_CODE (op2) == SUBREG)
3571 op2 = SUBREG_REG (op2);
3572 if (GET_CODE (op3) == SUBREG)
3573 op3 = SUBREG_REG (op3);
3574
3575 /* The patterns should only allow ext_low_reg_operand() or
3576 par_ind_operand() operands. Thus of the 4 operands, only 2
3577 should be REGs and the other 2 should be MEMs. */
3578
3579 /* This test prevents the multipack pass from using this pattern if
3580 op0 is used as an index or base register in op2 or op3, since
3581 this combination will require reloading. */
3582 if (GET_CODE (op0) == REG
3583 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3584 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
3585 return 0;
3586
3587 /* LDI||LDI. */
3588 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3589 return (REGNO (op0) != REGNO (op2))
3590 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
3591 && ! c4x_address_conflict (op1, op3, 0, 0);
3592
3593 /* STI||STI. */
3594 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3595 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
3596 && ! c4x_address_conflict (op0, op2, 1, 1);
3597
3598 /* LDI||STI. */
3599 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3600 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
3601 && ! c4x_address_conflict (op1, op2, 0, 1);
3602
3603 /* STI||LDI. */
3604 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3605 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
3606 && ! c4x_address_conflict (op0, op3, 1, 0);
3607
3608 return 0;
3609 }
3610
3611
3612 int
valid_parallel_operands_4(rtx * operands,enum machine_mode mode ATTRIBUTE_UNUSED)3613 valid_parallel_operands_4 (rtx *operands,
3614 enum machine_mode mode ATTRIBUTE_UNUSED)
3615 {
3616 rtx op0 = operands[0];
3617 rtx op2 = operands[2];
3618
3619 if (GET_CODE (op0) == SUBREG)
3620 op0 = SUBREG_REG (op0);
3621 if (GET_CODE (op2) == SUBREG)
3622 op2 = SUBREG_REG (op2);
3623
3624 /* This test prevents the multipack pass from using this pattern if
3625 op0 is used as an index or base register in op2, since this combination
3626 will require reloading. */
3627 if (GET_CODE (op0) == REG
3628 && GET_CODE (op2) == MEM
3629 && reg_mentioned_p (op0, XEXP (op2, 0)))
3630 return 0;
3631
3632 return 1;
3633 }
3634
3635
3636 int
valid_parallel_operands_5(rtx * operands,enum machine_mode mode ATTRIBUTE_UNUSED)3637 valid_parallel_operands_5 (rtx *operands,
3638 enum machine_mode mode ATTRIBUTE_UNUSED)
3639 {
3640 int regs = 0;
3641 rtx op0 = operands[0];
3642 rtx op1 = operands[1];
3643 rtx op2 = operands[2];
3644 rtx op3 = operands[3];
3645
3646 if (GET_CODE (op0) == SUBREG)
3647 op0 = SUBREG_REG (op0);
3648 if (GET_CODE (op1) == SUBREG)
3649 op1 = SUBREG_REG (op1);
3650 if (GET_CODE (op2) == SUBREG)
3651 op2 = SUBREG_REG (op2);
3652
3653 /* The patterns should only allow ext_low_reg_operand() or
3654 par_ind_operand() operands. Operands 1 and 2 may be commutative
3655 but only one of them can be a register. */
3656 if (GET_CODE (op1) == REG)
3657 regs++;
3658 if (GET_CODE (op2) == REG)
3659 regs++;
3660
3661 if (regs != 1)
3662 return 0;
3663
3664 /* This test prevents the multipack pass from using this pattern if
3665 op0 is used as an index or base register in op3, since this combination
3666 will require reloading. */
3667 if (GET_CODE (op0) == REG
3668 && GET_CODE (op3) == MEM
3669 && reg_mentioned_p (op0, XEXP (op3, 0)))
3670 return 0;
3671
3672 return 1;
3673 }
3674
3675
3676 int
valid_parallel_operands_6(rtx * operands,enum machine_mode mode ATTRIBUTE_UNUSED)3677 valid_parallel_operands_6 (rtx *operands,
3678 enum machine_mode mode ATTRIBUTE_UNUSED)
3679 {
3680 int regs = 0;
3681 rtx op0 = operands[0];
3682 rtx op1 = operands[1];
3683 rtx op2 = operands[2];
3684 rtx op4 = operands[4];
3685 rtx op5 = operands[5];
3686
3687 if (GET_CODE (op1) == SUBREG)
3688 op1 = SUBREG_REG (op1);
3689 if (GET_CODE (op2) == SUBREG)
3690 op2 = SUBREG_REG (op2);
3691 if (GET_CODE (op4) == SUBREG)
3692 op4 = SUBREG_REG (op4);
3693 if (GET_CODE (op5) == SUBREG)
3694 op5 = SUBREG_REG (op5);
3695
3696 /* The patterns should only allow ext_low_reg_operand() or
3697 par_ind_operand() operands. Thus of the 4 input operands, only 2
3698 should be REGs and the other 2 should be MEMs. */
3699
3700 if (GET_CODE (op1) == REG)
3701 regs++;
3702 if (GET_CODE (op2) == REG)
3703 regs++;
3704 if (GET_CODE (op4) == REG)
3705 regs++;
3706 if (GET_CODE (op5) == REG)
3707 regs++;
3708
3709 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3710 Perhaps we should count the MEMs as well? */
3711 if (regs != 2)
3712 return 0;
3713
3714 /* This test prevents the multipack pass from using this pattern if
3715 op0 is used as an index or base register in op4 or op5, since
3716 this combination will require reloading. */
3717 if (GET_CODE (op0) == REG
3718 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3719 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3720 return 0;
3721
3722 return 1;
3723 }
3724
3725
3726 /* Validate combination of src operands. Note that the operands have
3727 been screened by the src_operand predicate. We just have to check
3728 that the combination of operands is valid. If FORCE is set, ensure
3729 that the destination regno is valid if we have a 2 operand insn. */
3730
3731 static int
c4x_valid_operands(enum rtx_code code,rtx * operands,enum machine_mode mode ATTRIBUTE_UNUSED,int force)3732 c4x_valid_operands (enum rtx_code code, rtx *operands,
3733 enum machine_mode mode ATTRIBUTE_UNUSED,
3734 int force)
3735 {
3736 rtx op1;
3737 rtx op2;
3738 enum rtx_code code1;
3739 enum rtx_code code2;
3740
3741 if (code == COMPARE)
3742 {
3743 op1 = operands[0];
3744 op2 = operands[1];
3745 }
3746 else
3747 {
3748 op1 = operands[1];
3749 op2 = operands[2];
3750 }
3751
3752 if (GET_CODE (op1) == SUBREG)
3753 op1 = SUBREG_REG (op1);
3754 if (GET_CODE (op2) == SUBREG)
3755 op2 = SUBREG_REG (op2);
3756
3757 code1 = GET_CODE (op1);
3758 code2 = GET_CODE (op2);
3759
3760 if (code1 == REG && code2 == REG)
3761 return 1;
3762
3763 if (code1 == MEM && code2 == MEM)
3764 {
3765 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
3766 return 1;
3767 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
3768 }
3769
3770 if (code1 == code2)
3771 return 0;
3772
3773 if (code1 == REG)
3774 {
3775 switch (code2)
3776 {
3777 case CONST_INT:
3778 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3779 return 1;
3780 break;
3781
3782 case CONST_DOUBLE:
3783 if (! c4x_H_constant (op2))
3784 return 0;
3785 break;
3786
3787 /* Any valid memory operand screened by src_operand is OK. */
3788 case MEM:
3789
3790 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3791 into a stack slot memory address comprising a PLUS and a
3792 constant. */
3793 case ADDRESSOF:
3794 break;
3795
3796 default:
3797 fatal_insn ("c4x_valid_operands: Internal error", op2);
3798 break;
3799 }
3800
3801 /* Check that we have a valid destination register for a two operand
3802 instruction. */
3803 return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
3804 }
3805
3806 /* We assume MINUS is commutative since the subtract patterns
3807 also support the reverse subtract instructions. Since op1
3808 is not a register, and op2 is a register, op1 can only
3809 be a restricted memory operand for a shift instruction. */
3810 if (code == ASHIFTRT || code == LSHIFTRT
3811 || code == ASHIFT || code == COMPARE)
3812 return code2 == REG
3813 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3814
3815 switch (code1)
3816 {
3817 case CONST_INT:
3818 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3819 return 1;
3820 break;
3821
3822 case CONST_DOUBLE:
3823 if (! c4x_H_constant (op1))
3824 return 0;
3825 break;
3826
3827 /* Any valid memory operand screened by src_operand is OK. */
3828 case MEM:
3829 #if 0
3830 if (code2 != REG)
3831 return 0;
3832 #endif
3833 break;
3834
3835 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3836 into a stack slot memory address comprising a PLUS and a
3837 constant. */
3838 case ADDRESSOF:
3839 break;
3840
3841 default:
3842 abort ();
3843 break;
3844 }
3845
3846 /* Check that we have a valid destination register for a two operand
3847 instruction. */
3848 return ! force || REGNO (op1) == REGNO (operands[0]);
3849 }
3850
3851
valid_operands(enum rtx_code code,rtx * operands,enum machine_mode mode)3852 int valid_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3853 {
3854
3855 /* If we are not optimizing then we have to let anything go and let
3856 reload fix things up. instantiate_decl in function.c can produce
3857 invalid insns by changing the offset of a memory operand from a
3858 valid one into an invalid one, when the second operand is also a
3859 memory operand. The alternative is not to allow two memory
3860 operands for an insn when not optimizing. The problem only rarely
3861 occurs, for example with the C-torture program DFcmp.c. */
3862
3863 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
3864 }
3865
3866
3867 int
legitimize_operands(enum rtx_code code,rtx * operands,enum machine_mode mode)3868 legitimize_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
3869 {
3870 /* Compare only has 2 operands. */
3871 if (code == COMPARE)
3872 {
3873 /* During RTL generation, force constants into pseudos so that
3874 they can get hoisted out of loops. This will tie up an extra
3875 register but can save an extra cycle. Only do this if loop
3876 optimization enabled. (We cannot pull this trick for add and
3877 sub instructions since the flow pass won't find
3878 autoincrements etc.) This allows us to generate compare
3879 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
3880 of LDI *AR0++, R0; CMPI 42, R0.
3881
3882 Note that expand_binops will try to load an expensive constant
3883 into a register if it is used within a loop. Unfortunately,
3884 the cost mechanism doesn't allow us to look at the other
3885 operand to decide whether the constant is expensive. */
3886
3887 if (! reload_in_progress
3888 && TARGET_HOIST
3889 && optimize > 0
3890 && GET_CODE (operands[1]) == CONST_INT
3891 && preserve_subexpressions_p ()
3892 && rtx_cost (operands[1], code) > 1)
3893 operands[1] = force_reg (mode, operands[1]);
3894
3895 if (! reload_in_progress
3896 && ! c4x_valid_operands (code, operands, mode, 0))
3897 operands[0] = force_reg (mode, operands[0]);
3898 return 1;
3899 }
3900
3901 /* We cannot do this for ADDI/SUBI insns since we will
3902 defeat the flow pass from finding autoincrement addressing
3903 opportunities. */
3904 if (! reload_in_progress
3905 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
3906 && TARGET_HOIST
3907 && optimize > 1
3908 && GET_CODE (operands[2]) == CONST_INT
3909 && preserve_subexpressions_p ()
3910 && rtx_cost (operands[2], code) > 1)
3911 operands[2] = force_reg (mode, operands[2]);
3912
3913 /* We can get better code on a C30 if we force constant shift counts
3914 into a register. This way they can get hoisted out of loops,
3915 tying up a register, but saving an instruction. The downside is
3916 that they may get allocated to an address or index register, and
3917 thus we will get a pipeline conflict if there is a nearby
3918 indirect address using an address register.
3919
3920 Note that expand_binops will not try to load an expensive constant
3921 into a register if it is used within a loop for a shift insn. */
3922
3923 if (! reload_in_progress
3924 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
3925 {
3926 /* If the operand combination is invalid, we force operand1 into a
3927 register, preventing reload from having doing to do this at a
3928 later stage. */
3929 operands[1] = force_reg (mode, operands[1]);
3930 if (TARGET_FORCE)
3931 {
3932 emit_move_insn (operands[0], operands[1]);
3933 operands[1] = copy_rtx (operands[0]);
3934 }
3935 else
3936 {
3937 /* Just in case... */
3938 if (! c4x_valid_operands (code, operands, mode, 0))
3939 operands[2] = force_reg (mode, operands[2]);
3940 }
3941 }
3942
3943 /* Right shifts require a negative shift count, but GCC expects
3944 a positive count, so we emit a NEG. */
3945 if ((code == ASHIFTRT || code == LSHIFTRT)
3946 && (GET_CODE (operands[2]) != CONST_INT))
3947 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
3948
3949 return 1;
3950 }
3951
3952
3953 /* The following predicates are used for instruction scheduling. */
3954
3955 int
group1_reg_operand(rtx op,enum machine_mode mode)3956 group1_reg_operand (rtx op, enum machine_mode mode)
3957 {
3958 if (mode != VOIDmode && mode != GET_MODE (op))
3959 return 0;
3960 if (GET_CODE (op) == SUBREG)
3961 op = SUBREG_REG (op);
3962 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
3963 }
3964
3965
3966 int
group1_mem_operand(rtx op,enum machine_mode mode)3967 group1_mem_operand (rtx op, enum machine_mode mode)
3968 {
3969 if (mode != VOIDmode && mode != GET_MODE (op))
3970 return 0;
3971
3972 if (GET_CODE (op) == MEM)
3973 {
3974 op = XEXP (op, 0);
3975 if (GET_CODE (op) == PLUS)
3976 {
3977 rtx op0 = XEXP (op, 0);
3978 rtx op1 = XEXP (op, 1);
3979
3980 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
3981 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
3982 return 1;
3983 }
3984 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
3985 return 1;
3986 }
3987
3988 return 0;
3989 }
3990
3991
3992 /* Return true if any one of the address registers. */
3993
3994 int
arx_reg_operand(rtx op,enum machine_mode mode)3995 arx_reg_operand (rtx op, enum machine_mode mode)
3996 {
3997 if (mode != VOIDmode && mode != GET_MODE (op))
3998 return 0;
3999 if (GET_CODE (op) == SUBREG)
4000 op = SUBREG_REG (op);
4001 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
4002 }
4003
4004
4005 static int
c4x_arn_reg_operand(rtx op,enum machine_mode mode,unsigned int regno)4006 c4x_arn_reg_operand (rtx op, enum machine_mode mode, unsigned int regno)
4007 {
4008 if (mode != VOIDmode && mode != GET_MODE (op))
4009 return 0;
4010 if (GET_CODE (op) == SUBREG)
4011 op = SUBREG_REG (op);
4012 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4013 }
4014
4015
4016 static int
c4x_arn_mem_operand(rtx op,enum machine_mode mode,unsigned int regno)4017 c4x_arn_mem_operand (rtx op, enum machine_mode mode, unsigned int regno)
4018 {
4019 if (mode != VOIDmode && mode != GET_MODE (op))
4020 return 0;
4021
4022 if (GET_CODE (op) == MEM)
4023 {
4024 op = XEXP (op, 0);
4025 switch (GET_CODE (op))
4026 {
4027 case PRE_DEC:
4028 case POST_DEC:
4029 case PRE_INC:
4030 case POST_INC:
4031 op = XEXP (op, 0);
4032
4033 case REG:
4034 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
4035
4036 case PRE_MODIFY:
4037 case POST_MODIFY:
4038 if (REG_P (XEXP (op, 0)) && (! reload_completed
4039 || (REGNO (XEXP (op, 0)) == regno)))
4040 return 1;
4041 if (REG_P (XEXP (XEXP (op, 1), 1))
4042 && (! reload_completed
4043 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
4044 return 1;
4045 break;
4046
4047 case PLUS:
4048 {
4049 rtx op0 = XEXP (op, 0);
4050 rtx op1 = XEXP (op, 1);
4051
4052 if ((REG_P (op0) && (! reload_completed
4053 || (REGNO (op0) == regno)))
4054 || (REG_P (op1) && (! reload_completed
4055 || (REGNO (op1) == regno))))
4056 return 1;
4057 }
4058 break;
4059
4060 default:
4061 break;
4062 }
4063 }
4064 return 0;
4065 }
4066
4067
4068 int
ar0_reg_operand(rtx op,enum machine_mode mode)4069 ar0_reg_operand (rtx op, enum machine_mode mode)
4070 {
4071 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4072 }
4073
4074
4075 int
ar0_mem_operand(rtx op,enum machine_mode mode)4076 ar0_mem_operand (rtx op, enum machine_mode mode)
4077 {
4078 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4079 }
4080
4081
4082 int
ar1_reg_operand(rtx op,enum machine_mode mode)4083 ar1_reg_operand (rtx op, enum machine_mode mode)
4084 {
4085 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4086 }
4087
4088
4089 int
ar1_mem_operand(rtx op,enum machine_mode mode)4090 ar1_mem_operand (rtx op, enum machine_mode mode)
4091 {
4092 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4093 }
4094
4095
4096 int
ar2_reg_operand(rtx op,enum machine_mode mode)4097 ar2_reg_operand (rtx op, enum machine_mode mode)
4098 {
4099 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4100 }
4101
4102
4103 int
ar2_mem_operand(rtx op,enum machine_mode mode)4104 ar2_mem_operand (rtx op, enum machine_mode mode)
4105 {
4106 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4107 }
4108
4109
4110 int
ar3_reg_operand(rtx op,enum machine_mode mode)4111 ar3_reg_operand (rtx op, enum machine_mode mode)
4112 {
4113 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4114 }
4115
4116
4117 int
ar3_mem_operand(rtx op,enum machine_mode mode)4118 ar3_mem_operand (rtx op, enum machine_mode mode)
4119 {
4120 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4121 }
4122
4123
4124 int
ar4_reg_operand(rtx op,enum machine_mode mode)4125 ar4_reg_operand (rtx op, enum machine_mode mode)
4126 {
4127 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4128 }
4129
4130
4131 int
ar4_mem_operand(rtx op,enum machine_mode mode)4132 ar4_mem_operand (rtx op, enum machine_mode mode)
4133 {
4134 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4135 }
4136
4137
4138 int
ar5_reg_operand(rtx op,enum machine_mode mode)4139 ar5_reg_operand (rtx op, enum machine_mode mode)
4140 {
4141 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4142 }
4143
4144
4145 int
ar5_mem_operand(rtx op,enum machine_mode mode)4146 ar5_mem_operand (rtx op, enum machine_mode mode)
4147 {
4148 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4149 }
4150
4151
4152 int
ar6_reg_operand(rtx op,enum machine_mode mode)4153 ar6_reg_operand (rtx op, enum machine_mode mode)
4154 {
4155 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4156 }
4157
4158
4159 int
ar6_mem_operand(rtx op,enum machine_mode mode)4160 ar6_mem_operand (rtx op, enum machine_mode mode)
4161 {
4162 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4163 }
4164
4165
4166 int
ar7_reg_operand(rtx op,enum machine_mode mode)4167 ar7_reg_operand (rtx op, enum machine_mode mode)
4168 {
4169 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4170 }
4171
4172
4173 int
ar7_mem_operand(rtx op,enum machine_mode mode)4174 ar7_mem_operand (rtx op, enum machine_mode mode)
4175 {
4176 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4177 }
4178
4179
4180 int
ir0_reg_operand(rtx op,enum machine_mode mode)4181 ir0_reg_operand (rtx op, enum machine_mode mode)
4182 {
4183 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4184 }
4185
4186
4187 int
ir0_mem_operand(rtx op,enum machine_mode mode)4188 ir0_mem_operand (rtx op, enum machine_mode mode)
4189 {
4190 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4191 }
4192
4193
4194 int
ir1_reg_operand(rtx op,enum machine_mode mode)4195 ir1_reg_operand (rtx op, enum machine_mode mode)
4196 {
4197 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4198 }
4199
4200
4201 int
ir1_mem_operand(rtx op,enum machine_mode mode)4202 ir1_mem_operand (rtx op, enum machine_mode mode)
4203 {
4204 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4205 }
4206
4207
4208 /* This is similar to operand_subword but allows autoincrement
4209 addressing. */
4210
4211 rtx
c4x_operand_subword(rtx op,int i,int validate_address,enum machine_mode mode)4212 c4x_operand_subword (rtx op, int i, int validate_address,
4213 enum machine_mode mode)
4214 {
4215 if (mode != HImode && mode != HFmode)
4216 fatal_insn ("c4x_operand_subword: invalid mode", op);
4217
4218 if (mode == HFmode && REG_P (op))
4219 fatal_insn ("c4x_operand_subword: invalid operand", op);
4220
4221 if (GET_CODE (op) == MEM)
4222 {
4223 enum rtx_code code = GET_CODE (XEXP (op, 0));
4224 enum machine_mode mode = GET_MODE (XEXP (op, 0));
4225 enum machine_mode submode;
4226
4227 submode = mode;
4228 if (mode == HImode)
4229 submode = QImode;
4230 else if (mode == HFmode)
4231 submode = QFmode;
4232
4233 switch (code)
4234 {
4235 case POST_INC:
4236 case PRE_INC:
4237 return gen_rtx_MEM (submode, XEXP (op, 0));
4238
4239 case POST_DEC:
4240 case PRE_DEC:
4241 case PRE_MODIFY:
4242 case POST_MODIFY:
4243 /* We could handle these with some difficulty.
4244 e.g., *p-- => *(p-=2); *(p+1). */
4245 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4246
4247 case SYMBOL_REF:
4248 case LABEL_REF:
4249 case CONST:
4250 case CONST_INT:
4251 fatal_insn ("c4x_operand_subword: invalid address", op);
4252
4253 /* Even though offsettable_address_p considers (MEM
4254 (LO_SUM)) to be offsettable, it is not safe if the
4255 address is at the end of the data page since we also have
4256 to fix up the associated high PART. In this case where
4257 we are trying to split a HImode or HFmode memory
4258 reference, we would have to emit another insn to reload a
4259 new HIGH value. It's easier to disable LO_SUM memory references
4260 in HImode or HFmode and we probably get better code. */
4261 case LO_SUM:
4262 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4263
4264 default:
4265 break;
4266 }
4267 }
4268
4269 return operand_subword (op, i, validate_address, mode);
4270 }
4271
4272 struct name_list
4273 {
4274 struct name_list *next;
4275 const char *name;
4276 };
4277
4278 static struct name_list *global_head;
4279 static struct name_list *extern_head;
4280
4281
4282 /* Add NAME to list of global symbols and remove from external list if
4283 present on external list. */
4284
4285 void
c4x_global_label(const char * name)4286 c4x_global_label (const char *name)
4287 {
4288 struct name_list *p, *last;
4289
4290 /* Do not insert duplicate names, so linearly search through list of
4291 existing names. */
4292 p = global_head;
4293 while (p)
4294 {
4295 if (strcmp (p->name, name) == 0)
4296 return;
4297 p = p->next;
4298 }
4299 p = (struct name_list *) xmalloc (sizeof *p);
4300 p->next = global_head;
4301 p->name = name;
4302 global_head = p;
4303
4304 /* Remove this name from ref list if present. */
4305 last = NULL;
4306 p = extern_head;
4307 while (p)
4308 {
4309 if (strcmp (p->name, name) == 0)
4310 {
4311 if (last)
4312 last->next = p->next;
4313 else
4314 extern_head = p->next;
4315 break;
4316 }
4317 last = p;
4318 p = p->next;
4319 }
4320 }
4321
4322
4323 /* Add NAME to list of external symbols. */
4324
4325 void
c4x_external_ref(const char * name)4326 c4x_external_ref (const char *name)
4327 {
4328 struct name_list *p;
4329
4330 /* Do not insert duplicate names. */
4331 p = extern_head;
4332 while (p)
4333 {
4334 if (strcmp (p->name, name) == 0)
4335 return;
4336 p = p->next;
4337 }
4338
4339 /* Do not insert ref if global found. */
4340 p = global_head;
4341 while (p)
4342 {
4343 if (strcmp (p->name, name) == 0)
4344 return;
4345 p = p->next;
4346 }
4347 p = (struct name_list *) xmalloc (sizeof *p);
4348 p->next = extern_head;
4349 p->name = name;
4350 extern_head = p;
4351 }
4352
4353 /* We need to have a data section we can identify so that we can set
4354 the DP register back to a data pointer in the small memory model.
4355 This is only required for ISRs if we are paranoid that someone
4356 may have quietly changed this register on the sly. */
4357 static void
c4x_file_start(void)4358 c4x_file_start (void)
4359 {
4360 int dspversion = 0;
4361 if (TARGET_C30) dspversion = 30;
4362 if (TARGET_C31) dspversion = 31;
4363 if (TARGET_C32) dspversion = 32;
4364 if (TARGET_C33) dspversion = 33;
4365 if (TARGET_C40) dspversion = 40;
4366 if (TARGET_C44) dspversion = 44;
4367
4368 default_file_start ();
4369 fprintf (asm_out_file, "\t.version\t%d\n", dspversion);
4370 fputs ("\n\t.data\ndata_sec:\n", asm_out_file);
4371 }
4372
4373
4374 static void
c4x_file_end(void)4375 c4x_file_end (void)
4376 {
4377 struct name_list *p;
4378
4379 /* Output all external names that are not global. */
4380 p = extern_head;
4381 while (p)
4382 {
4383 fprintf (asm_out_file, "\t.ref\t");
4384 assemble_name (asm_out_file, p->name);
4385 fprintf (asm_out_file, "\n");
4386 p = p->next;
4387 }
4388 fprintf (asm_out_file, "\t.end\n");
4389 }
4390
4391
4392 static void
c4x_check_attribute(const char * attrib,tree list,tree decl,tree * attributes)4393 c4x_check_attribute (const char *attrib, tree list, tree decl, tree *attributes)
4394 {
4395 while (list != NULL_TREE
4396 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4397 != IDENTIFIER_POINTER (DECL_NAME (decl)))
4398 list = TREE_CHAIN (list);
4399 if (list)
4400 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4401 *attributes);
4402 }
4403
4404
4405 static void
c4x_insert_attributes(tree decl,tree * attributes)4406 c4x_insert_attributes (tree decl, tree *attributes)
4407 {
4408 switch (TREE_CODE (decl))
4409 {
4410 case FUNCTION_DECL:
4411 c4x_check_attribute ("section", code_tree, decl, attributes);
4412 c4x_check_attribute ("const", pure_tree, decl, attributes);
4413 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4414 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4415 c4x_check_attribute ("naked", naked_tree, decl, attributes);
4416 break;
4417
4418 case VAR_DECL:
4419 c4x_check_attribute ("section", data_tree, decl, attributes);
4420 break;
4421
4422 default:
4423 break;
4424 }
4425 }
4426
4427 /* Table of valid machine attributes. */
4428 const struct attribute_spec c4x_attribute_table[] =
4429 {
4430 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4431 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4432 { "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4433 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4434 { NULL, 0, 0, false, false, false, NULL }
4435 };
4436
4437 /* Handle an attribute requiring a FUNCTION_TYPE;
4438 arguments as in struct attribute_spec.handler. */
4439 static tree
c4x_handle_fntype_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)4440 c4x_handle_fntype_attribute (tree *node, tree name,
4441 tree args ATTRIBUTE_UNUSED,
4442 int flags ATTRIBUTE_UNUSED,
4443 bool *no_add_attrs)
4444 {
4445 if (TREE_CODE (*node) != FUNCTION_TYPE)
4446 {
4447 warning ("`%s' attribute only applies to functions",
4448 IDENTIFIER_POINTER (name));
4449 *no_add_attrs = true;
4450 }
4451
4452 return NULL_TREE;
4453 }
4454
4455
4456 /* !!! FIXME to emit RPTS correctly. */
4457
4458 int
c4x_rptb_rpts_p(rtx insn,rtx op)4459 c4x_rptb_rpts_p (rtx insn, rtx op)
4460 {
4461 /* The next insn should be our label marking where the
4462 repeat block starts. */
4463 insn = NEXT_INSN (insn);
4464 if (GET_CODE (insn) != CODE_LABEL)
4465 {
4466 /* Some insns may have been shifted between the RPTB insn
4467 and the top label... They were probably destined to
4468 be moved out of the loop. For now, let's leave them
4469 where they are and print a warning. We should
4470 probably move these insns before the repeat block insn. */
4471 if (TARGET_DEBUG)
4472 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4473 insn);
4474 return 0;
4475 }
4476
4477 /* Skip any notes. */
4478 insn = next_nonnote_insn (insn);
4479
4480 /* This should be our first insn in the loop. */
4481 if (! INSN_P (insn))
4482 return 0;
4483
4484 /* Skip any notes. */
4485 insn = next_nonnote_insn (insn);
4486
4487 if (! INSN_P (insn))
4488 return 0;
4489
4490 if (recog_memoized (insn) != CODE_FOR_rptb_end)
4491 return 0;
4492
4493 if (TARGET_RPTS)
4494 return 1;
4495
4496 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4497 }
4498
4499
4500 /* Check if register r11 is used as the destination of an insn. */
4501
4502 static int
c4x_r11_set_p(rtx x)4503 c4x_r11_set_p(rtx x)
4504 {
4505 rtx set;
4506 int i, j;
4507 const char *fmt;
4508
4509 if (x == 0)
4510 return 0;
4511
4512 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
4513 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4514
4515 if (INSN_P (x) && (set = single_set (x)))
4516 x = SET_DEST (set);
4517
4518 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
4519 return 1;
4520
4521 fmt = GET_RTX_FORMAT (GET_CODE (x));
4522 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4523 {
4524 if (fmt[i] == 'e')
4525 {
4526 if (c4x_r11_set_p (XEXP (x, i)))
4527 return 1;
4528 }
4529 else if (fmt[i] == 'E')
4530 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4531 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4532 return 1;
4533 }
4534 return 0;
4535 }
4536
4537
4538 /* The c4x sometimes has a problem when the insn before the laj insn
4539 sets the r11 register. Check for this situation. */
4540
4541 int
c4x_check_laj_p(rtx insn)4542 c4x_check_laj_p (rtx insn)
4543 {
4544 insn = prev_nonnote_insn (insn);
4545
4546 /* If this is the start of the function no nop is needed. */
4547 if (insn == 0)
4548 return 0;
4549
4550 /* If the previous insn is a code label we have to insert a nop. This
4551 could be a jump or table jump. We can find the normal jumps by
4552 scanning the function but this will not find table jumps. */
4553 if (GET_CODE (insn) == CODE_LABEL)
4554 return 1;
4555
4556 /* If the previous insn sets register r11 we have to insert a nop. */
4557 if (c4x_r11_set_p (insn))
4558 return 1;
4559
4560 /* No nop needed. */
4561 return 0;
4562 }
4563
4564
4565 /* Adjust the cost of a scheduling dependency. Return the new cost of
4566 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4567 A set of an address register followed by a use occurs a 2 cycle
4568 stall (reduced to a single cycle on the c40 using LDA), while
4569 a read of an address register followed by a use occurs a single cycle. */
4570
4571 #define SET_USE_COST 3
4572 #define SETLDA_USE_COST 2
4573 #define READ_USE_COST 2
4574
4575 static int
c4x_adjust_cost(rtx insn,rtx link,rtx dep_insn,int cost)4576 c4x_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
4577 {
4578 /* Don't worry about this until we know what registers have been
4579 assigned. */
4580 if (flag_schedule_insns == 0 && ! reload_completed)
4581 return 0;
4582
4583 /* How do we handle dependencies where a read followed by another
4584 read causes a pipeline stall? For example, a read of ar0 followed
4585 by the use of ar0 for a memory reference. It looks like we
4586 need to extend the scheduler to handle this case. */
4587
4588 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4589 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4590 so only deal with insns we know about. */
4591 if (recog_memoized (dep_insn) < 0)
4592 return 0;
4593
4594 if (REG_NOTE_KIND (link) == 0)
4595 {
4596 int max = 0;
4597
4598 /* Data dependency; DEP_INSN writes a register that INSN reads some
4599 cycles later. */
4600 if (TARGET_C3X)
4601 {
4602 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4603 max = SET_USE_COST > max ? SET_USE_COST : max;
4604 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4605 max = READ_USE_COST > max ? READ_USE_COST : max;
4606 }
4607 else
4608 {
4609 /* This could be significantly optimized. We should look
4610 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4611 insn uses ar0-ar7. We then test if the same register
4612 is used. The tricky bit is that some operands will
4613 use several registers... */
4614 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4615 max = SET_USE_COST > max ? SET_USE_COST : max;
4616 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4617 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4618 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4619 max = READ_USE_COST > max ? READ_USE_COST : max;
4620
4621 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4622 max = SET_USE_COST > max ? SET_USE_COST : max;
4623 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4624 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4625 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4626 max = READ_USE_COST > max ? READ_USE_COST : max;
4627
4628 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4629 max = SET_USE_COST > max ? SET_USE_COST : max;
4630 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4631 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4632 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4633 max = READ_USE_COST > max ? READ_USE_COST : max;
4634
4635 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4636 max = SET_USE_COST > max ? SET_USE_COST : max;
4637 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4638 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4639 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4640 max = READ_USE_COST > max ? READ_USE_COST : max;
4641
4642 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4643 max = SET_USE_COST > max ? SET_USE_COST : max;
4644 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4645 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4646 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4647 max = READ_USE_COST > max ? READ_USE_COST : max;
4648
4649 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4650 max = SET_USE_COST > max ? SET_USE_COST : max;
4651 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
4652 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4653 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
4654 max = READ_USE_COST > max ? READ_USE_COST : max;
4655
4656 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
4657 max = SET_USE_COST > max ? SET_USE_COST : max;
4658 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
4659 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4660 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
4661 max = READ_USE_COST > max ? READ_USE_COST : max;
4662
4663 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
4664 max = SET_USE_COST > max ? SET_USE_COST : max;
4665 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
4666 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4667 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
4668 max = READ_USE_COST > max ? READ_USE_COST : max;
4669
4670 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
4671 max = SET_USE_COST > max ? SET_USE_COST : max;
4672 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
4673 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4674
4675 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
4676 max = SET_USE_COST > max ? SET_USE_COST : max;
4677 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
4678 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4679 }
4680
4681 if (max)
4682 cost = max;
4683
4684 /* For other data dependencies, the default cost specified in the
4685 md is correct. */
4686 return cost;
4687 }
4688 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
4689 {
4690 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4691 cycles later. */
4692
4693 /* For c4x anti dependencies, the cost is 0. */
4694 return 0;
4695 }
4696 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
4697 {
4698 /* Output dependency; DEP_INSN writes a register that INSN writes some
4699 cycles later. */
4700
4701 /* For c4x output dependencies, the cost is 0. */
4702 return 0;
4703 }
4704 else
4705 abort ();
4706 }
4707
4708 void
c4x_init_builtins(void)4709 c4x_init_builtins (void)
4710 {
4711 tree endlink = void_list_node;
4712
4713 builtin_function ("fast_ftoi",
4714 build_function_type
4715 (integer_type_node,
4716 tree_cons (NULL_TREE, double_type_node, endlink)),
4717 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
4718 builtin_function ("ansi_ftoi",
4719 build_function_type
4720 (integer_type_node,
4721 tree_cons (NULL_TREE, double_type_node, endlink)),
4722 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL, NULL_TREE);
4723 if (TARGET_C3X)
4724 builtin_function ("fast_imult",
4725 build_function_type
4726 (integer_type_node,
4727 tree_cons (NULL_TREE, integer_type_node,
4728 tree_cons (NULL_TREE,
4729 integer_type_node, endlink))),
4730 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL, NULL_TREE);
4731 else
4732 {
4733 builtin_function ("toieee",
4734 build_function_type
4735 (double_type_node,
4736 tree_cons (NULL_TREE, double_type_node, endlink)),
4737 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4738 builtin_function ("frieee",
4739 build_function_type
4740 (double_type_node,
4741 tree_cons (NULL_TREE, double_type_node, endlink)),
4742 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL, NULL_TREE);
4743 builtin_function ("fast_invf",
4744 build_function_type
4745 (double_type_node,
4746 tree_cons (NULL_TREE, double_type_node, endlink)),
4747 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL, NULL_TREE);
4748 }
4749 }
4750
4751
4752 rtx
c4x_expand_builtin(tree exp,rtx target,rtx subtarget ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)4753 c4x_expand_builtin (tree exp, rtx target,
4754 rtx subtarget ATTRIBUTE_UNUSED,
4755 enum machine_mode mode ATTRIBUTE_UNUSED,
4756 int ignore ATTRIBUTE_UNUSED)
4757 {
4758 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4759 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4760 tree arglist = TREE_OPERAND (exp, 1);
4761 tree arg0, arg1;
4762 rtx r0, r1;
4763
4764 switch (fcode)
4765 {
4766 case C4X_BUILTIN_FIX:
4767 arg0 = TREE_VALUE (arglist);
4768 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4769 r0 = protect_from_queue (r0, 0);
4770 if (! target || ! register_operand (target, QImode))
4771 target = gen_reg_rtx (QImode);
4772 emit_insn (gen_fixqfqi_clobber (target, r0));
4773 return target;
4774
4775 case C4X_BUILTIN_FIX_ANSI:
4776 arg0 = TREE_VALUE (arglist);
4777 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4778 r0 = protect_from_queue (r0, 0);
4779 if (! target || ! register_operand (target, QImode))
4780 target = gen_reg_rtx (QImode);
4781 emit_insn (gen_fix_truncqfqi2 (target, r0));
4782 return target;
4783
4784 case C4X_BUILTIN_MPYI:
4785 if (! TARGET_C3X)
4786 break;
4787 arg0 = TREE_VALUE (arglist);
4788 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4789 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
4790 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
4791 r0 = protect_from_queue (r0, 0);
4792 r1 = protect_from_queue (r1, 0);
4793 if (! target || ! register_operand (target, QImode))
4794 target = gen_reg_rtx (QImode);
4795 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
4796 return target;
4797
4798 case C4X_BUILTIN_TOIEEE:
4799 if (TARGET_C3X)
4800 break;
4801 arg0 = TREE_VALUE (arglist);
4802 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4803 r0 = protect_from_queue (r0, 0);
4804 if (! target || ! register_operand (target, QFmode))
4805 target = gen_reg_rtx (QFmode);
4806 emit_insn (gen_toieee (target, r0));
4807 return target;
4808
4809 case C4X_BUILTIN_FRIEEE:
4810 if (TARGET_C3X)
4811 break;
4812 arg0 = TREE_VALUE (arglist);
4813 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
4814 put_var_into_stack (arg0, /*rescan=*/true);
4815 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4816 r0 = protect_from_queue (r0, 0);
4817 if (register_operand (r0, QFmode))
4818 {
4819 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
4820 emit_move_insn (r1, r0);
4821 r0 = r1;
4822 }
4823 if (! target || ! register_operand (target, QFmode))
4824 target = gen_reg_rtx (QFmode);
4825 emit_insn (gen_frieee (target, r0));
4826 return target;
4827
4828 case C4X_BUILTIN_RCPF:
4829 if (TARGET_C3X)
4830 break;
4831 arg0 = TREE_VALUE (arglist);
4832 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
4833 r0 = protect_from_queue (r0, 0);
4834 if (! target || ! register_operand (target, QFmode))
4835 target = gen_reg_rtx (QFmode);
4836 emit_insn (gen_rcpfqf_clobber (target, r0));
4837 return target;
4838 }
4839 return NULL_RTX;
4840 }
4841
4842 static void
c4x_init_libfuncs(void)4843 c4x_init_libfuncs (void)
4844 {
4845 set_optab_libfunc (smul_optab, QImode, "__mulqi3");
4846 set_optab_libfunc (sdiv_optab, QImode, "__divqi3");
4847 set_optab_libfunc (udiv_optab, QImode, "__udivqi3");
4848 set_optab_libfunc (smod_optab, QImode, "__modqi3");
4849 set_optab_libfunc (umod_optab, QImode, "__umodqi3");
4850 set_optab_libfunc (sdiv_optab, QFmode, "__divqf3");
4851 set_optab_libfunc (smul_optab, HFmode, "__mulhf3");
4852 set_optab_libfunc (sdiv_optab, HFmode, "__divhf3");
4853 set_optab_libfunc (smul_optab, HImode, "__mulhi3");
4854 set_optab_libfunc (sdiv_optab, HImode, "__divhi3");
4855 set_optab_libfunc (udiv_optab, HImode, "__udivhi3");
4856 set_optab_libfunc (smod_optab, HImode, "__modhi3");
4857 set_optab_libfunc (umod_optab, HImode, "__umodhi3");
4858 set_optab_libfunc (ffs_optab, QImode, "__ffs");
4859 smulhi3_libfunc = init_one_libfunc ("__smulhi3_high");
4860 umulhi3_libfunc = init_one_libfunc ("__umulhi3_high");
4861 fix_truncqfhi2_libfunc = init_one_libfunc ("__fix_truncqfhi2");
4862 fixuns_truncqfhi2_libfunc = init_one_libfunc ("__ufix_truncqfhi2");
4863 fix_trunchfhi2_libfunc = init_one_libfunc ("__fix_trunchfhi2");
4864 fixuns_trunchfhi2_libfunc = init_one_libfunc ("__ufix_trunchfhi2");
4865 floathiqf2_libfunc = init_one_libfunc ("__floathiqf2");
4866 floatunshiqf2_libfunc = init_one_libfunc ("__ufloathiqf2");
4867 floathihf2_libfunc = init_one_libfunc ("__floathihf2");
4868 floatunshihf2_libfunc = init_one_libfunc ("__ufloathihf2");
4869 }
4870
4871 static void
c4x_asm_named_section(const char * name,unsigned int flags ATTRIBUTE_UNUSED)4872 c4x_asm_named_section (const char *name, unsigned int flags ATTRIBUTE_UNUSED)
4873 {
4874 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
4875 }
4876
4877 static void
c4x_globalize_label(FILE * stream,const char * name)4878 c4x_globalize_label (FILE *stream, const char *name)
4879 {
4880 default_globalize_label (stream, name);
4881 c4x_global_label (name);
4882 }
4883
4884 #define SHIFT_CODE_P(C) \
4885 ((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
4886 #define LOGICAL_CODE_P(C) \
4887 ((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
4888
4889 /* Compute a (partial) cost for rtx X. Return true if the complete
4890 cost has been computed, and false if subexpressions should be
4891 scanned. In either case, *TOTAL contains the cost result. */
4892
4893 static bool
c4x_rtx_costs(rtx x,int code,int outer_code,int * total)4894 c4x_rtx_costs (rtx x, int code, int outer_code, int *total)
4895 {
4896 HOST_WIDE_INT val;
4897
4898 switch (code)
4899 {
4900 /* Some small integers are effectively free for the C40. We should
4901 also consider if we are using the small memory model. With
4902 the big memory model we require an extra insn for a constant
4903 loaded from memory. */
4904
4905 case CONST_INT:
4906 val = INTVAL (x);
4907 if (c4x_J_constant (x))
4908 *total = 0;
4909 else if (! TARGET_C3X
4910 && outer_code == AND
4911 && (val == 255 || val == 65535))
4912 *total = 0;
4913 else if (! TARGET_C3X
4914 && (outer_code == ASHIFTRT || outer_code == LSHIFTRT)
4915 && (val == 16 || val == 24))
4916 *total = 0;
4917 else if (TARGET_C3X && SHIFT_CODE_P (outer_code))
4918 *total = 3;
4919 else if (LOGICAL_CODE_P (outer_code)
4920 ? c4x_L_constant (x) : c4x_I_constant (x))
4921 *total = 2;
4922 else
4923 *total = 4;
4924 return true;
4925
4926 case CONST:
4927 case LABEL_REF:
4928 case SYMBOL_REF:
4929 *total = 4;
4930 return true;
4931
4932 case CONST_DOUBLE:
4933 if (c4x_H_constant (x))
4934 *total = 2;
4935 else if (GET_MODE (x) == QFmode)
4936 *total = 4;
4937 else
4938 *total = 8;
4939 return true;
4940
4941 /* ??? Note that we return true, rather than false so that rtx_cost
4942 doesn't include the constant costs. Otherwise expand_mult will
4943 think that it is cheaper to synthesize a multiply rather than to
4944 use a multiply instruction. I think this is because the algorithm
4945 synth_mult doesn't take into account the loading of the operands,
4946 whereas the calculation of mult_cost does. */
4947 case PLUS:
4948 case MINUS:
4949 case AND:
4950 case IOR:
4951 case XOR:
4952 case ASHIFT:
4953 case ASHIFTRT:
4954 case LSHIFTRT:
4955 *total = COSTS_N_INSNS (1);
4956 return true;
4957
4958 case MULT:
4959 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
4960 || TARGET_MPYI ? 1 : 14);
4961 return true;
4962
4963 case DIV:
4964 case UDIV:
4965 case MOD:
4966 case UMOD:
4967 *total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
4968 ? 15 : 50);
4969 return true;
4970
4971 default:
4972 return false;
4973 }
4974 }
4975