1 /* Subroutines used for code generation on Renesas RX processors.
2    Copyright (C) 2008-2018 Free Software Foundation, Inc.
3    Contributed by Red Hat.
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify
8    it under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3, or (at your option)
10    any later version.
11 
12    GCC is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15    GNU General Public License for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 /* To Do:
22 
23  * Re-enable memory-to-memory copies and fix up reload.  */
24 
25 #define IN_TARGET_CODE 1
26 
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "backend.h"
31 #include "target.h"
32 #include "rtl.h"
33 #include "tree.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "cfghooks.h"
37 #include "df.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "regs.h"
41 #include "emit-rtl.h"
42 #include "diagnostic-core.h"
43 #include "varasm.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "output.h"
47 #include "flags.h"
48 #include "explow.h"
49 #include "expr.h"
50 #include "toplev.h"
51 #include "langhooks.h"
52 #include "opts.h"
53 #include "builtins.h"
54 
55 /* This file should be included last.  */
56 #include "target-def.h"
57 
58 static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
59 static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
60 static unsigned int rx_num_interrupt_regs;
61 
62 static unsigned int
rx_gp_base_regnum(void)63 rx_gp_base_regnum (void)
64 {
65   if (rx_gp_base_regnum_val == INVALID_REGNUM)
66     gcc_unreachable ();
67   return rx_gp_base_regnum_val;
68 }
69 
70 static unsigned int
rx_pid_base_regnum(void)71 rx_pid_base_regnum (void)
72 {
73   if (rx_pid_base_regnum_val == INVALID_REGNUM)
74     gcc_unreachable ();
75   return rx_pid_base_regnum_val;
76 }
77 
78 /* Find a SYMBOL_REF in a "standard" MEM address and return its decl.  */
79 
80 static tree
rx_decl_for_addr(rtx op)81 rx_decl_for_addr (rtx op)
82 {
83   if (GET_CODE (op) == MEM)
84     op = XEXP (op, 0);
85   if (GET_CODE (op) == CONST)
86     op = XEXP (op, 0);
87   while (GET_CODE (op) == PLUS)
88     op = XEXP (op, 0);
89   if (GET_CODE (op) == SYMBOL_REF)
90     return SYMBOL_REF_DECL (op);
91   return NULL_TREE;
92 }
93 
94 static void rx_print_operand (FILE *, rtx, int);
95 
96 #define CC_FLAG_S	(1 << 0)
97 #define CC_FLAG_Z	(1 << 1)
98 #define CC_FLAG_O	(1 << 2)
99 #define CC_FLAG_C	(1 << 3)
100 #define CC_FLAG_FP	(1 << 4)	/* Fake, to differentiate CC_Fmode.  */
101 
102 static unsigned int flags_from_mode (machine_mode mode);
103 static unsigned int flags_from_code (enum rtx_code code);
104 
105 /* Return true if OP is a reference to an object in a PID data area.  */
106 
107 enum pid_type
108 {
109   PID_NOT_PID = 0,	/* The object is not in the PID data area.  */
110   PID_ENCODED,		/* The object is in the PID data area.  */
111   PID_UNENCODED		/* The object will be placed in the PID data area, but it has not been placed there yet.  */
112 };
113 
114 static enum pid_type
rx_pid_data_operand(rtx op)115 rx_pid_data_operand (rtx op)
116 {
117   tree op_decl;
118 
119   if (!TARGET_PID)
120     return PID_NOT_PID;
121 
122   if (GET_CODE (op) == PLUS
123       && GET_CODE (XEXP (op, 0)) == REG
124       && GET_CODE (XEXP (op, 1)) == CONST
125       && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
126     return PID_ENCODED;
127 
128   op_decl = rx_decl_for_addr (op);
129 
130   if (op_decl)
131     {
132       if (TREE_READONLY (op_decl))
133 	return PID_UNENCODED;
134     }
135   else
136     {
137       /* Sigh, some special cases.  */
138       if (GET_CODE (op) == SYMBOL_REF
139 	  || GET_CODE (op) == LABEL_REF)
140 	return PID_UNENCODED;
141     }
142 
143   return PID_NOT_PID;
144 }
145 
146 static rtx
rx_legitimize_address(rtx x,rtx oldx ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED)147 rx_legitimize_address (rtx x,
148 		       rtx oldx ATTRIBUTE_UNUSED,
149 		       machine_mode mode ATTRIBUTE_UNUSED)
150 {
151   if (rx_pid_data_operand (x) == PID_UNENCODED)
152     {
153       rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
154       return rv;
155     }
156 
157   if (GET_CODE (x) == PLUS
158       && GET_CODE (XEXP (x, 0)) == PLUS
159       && REG_P (XEXP (XEXP (x, 0), 0))
160       && REG_P (XEXP (x, 1)))
161     return force_reg (SImode, x);
162 
163   return x;
164 }
165 
166 /* Return true if OP is a reference to an object in a small data area.  */
167 
168 static bool
rx_small_data_operand(rtx op)169 rx_small_data_operand (rtx op)
170 {
171   if (rx_small_data_limit == 0)
172     return false;
173 
174   if (GET_CODE (op) == SYMBOL_REF)
175     return SYMBOL_REF_SMALL_P (op);
176 
177   return false;
178 }
179 
180 static bool
rx_is_legitimate_address(machine_mode mode,rtx x,bool strict ATTRIBUTE_UNUSED)181 rx_is_legitimate_address (machine_mode mode, rtx x,
182 			  bool strict ATTRIBUTE_UNUSED)
183 {
184   if (RTX_OK_FOR_BASE (x, strict))
185     /* Register Indirect.  */
186     return true;
187 
188   if ((GET_MODE_SIZE (mode) == 4
189        || GET_MODE_SIZE (mode) == 2
190        || GET_MODE_SIZE (mode) == 1)
191       && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
192     /* Pre-decrement Register Indirect or
193        Post-increment Register Indirect.  */
194     return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
195 
196   switch (rx_pid_data_operand (x))
197     {
198     case PID_UNENCODED:
199       return false;
200     case PID_ENCODED:
201       return true;
202     default:
203       break;
204     }
205 
206   if (GET_CODE (x) == PLUS)
207     {
208       rtx arg1 = XEXP (x, 0);
209       rtx arg2 = XEXP (x, 1);
210       rtx index = NULL_RTX;
211 
212       if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
213 	index = arg2;
214       else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
215 	index = arg1;
216       else
217 	return false;
218 
219       switch (GET_CODE (index))
220 	{
221 	case CONST_INT:
222 	  {
223 	    /* Register Relative: REG + INT.
224 	       Only positive, mode-aligned, mode-sized
225 	       displacements are allowed.  */
226 	    HOST_WIDE_INT val = INTVAL (index);
227 	    int factor;
228 
229 	    if (val < 0)
230 	      return false;
231 
232 	    switch (GET_MODE_SIZE (mode))
233 	      {
234 	      default:
235 	      case 4: factor = 4; break;
236 	      case 2: factor = 2; break;
237 	      case 1: factor = 1; break;
238 	      }
239 
240 	    if (val > (65535 * factor))
241 	      return false;
242 	    return (val % factor) == 0;
243 	  }
244 
245 	case REG:
246 	  /* Unscaled Indexed Register Indirect: REG + REG
247 	     Size has to be "QI", REG has to be valid.  */
248 	  return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
249 
250 	case MULT:
251 	  {
252 	    /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
253 	       Factor has to equal the mode size, REG has to be valid.  */
254 	    rtx factor;
255 
256 	    factor = XEXP (index, 1);
257 	    index = XEXP (index, 0);
258 
259 	    return REG_P (index)
260 	      && RTX_OK_FOR_BASE (index, strict)
261 	      && CONST_INT_P (factor)
262 	      && GET_MODE_SIZE (mode) == INTVAL (factor);
263 	  }
264 
265 	default:
266 	  return false;
267 	}
268     }
269 
270   /* Small data area accesses turn into register relative offsets.  */
271   return rx_small_data_operand (x);
272 }
273 
274 /* Returns TRUE for simple memory addresses, ie ones
275    that do not involve register indirect addressing
276    or pre/post increment/decrement.  */
277 
278 bool
rx_is_restricted_memory_address(rtx mem,machine_mode mode)279 rx_is_restricted_memory_address (rtx mem, machine_mode mode)
280 {
281   if (! rx_is_legitimate_address
282       (mode, mem, reload_in_progress || reload_completed))
283     return false;
284 
285   switch (GET_CODE (mem))
286     {
287     case REG:
288       /* Simple memory addresses are OK.  */
289       return true;
290 
291     case SUBREG:
292       return RX_REG_P (SUBREG_REG (mem));
293 
294     case PRE_DEC:
295     case POST_INC:
296       return false;
297 
298     case PLUS:
299       {
300 	rtx base, index;
301 
302 	/* Only allow REG+INT addressing.  */
303 	base = XEXP (mem, 0);
304 	index = XEXP (mem, 1);
305 
306 	if (! RX_REG_P (base) || ! CONST_INT_P (index))
307 	  return false;
308 
309 	return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
310       }
311 
312     case SYMBOL_REF:
313       /* Can happen when small data is being supported.
314          Assume that it will be resolved into GP+INT.  */
315       return true;
316 
317     default:
318       gcc_unreachable ();
319     }
320 }
321 
322 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P.  */
323 
324 static bool
rx_mode_dependent_address_p(const_rtx addr,addr_space_t as ATTRIBUTE_UNUSED)325 rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
326 {
327   if (GET_CODE (addr) == CONST)
328     addr = XEXP (addr, 0);
329 
330   switch (GET_CODE (addr))
331     {
332       /* --REG and REG++ only work in SImode.  */
333     case PRE_DEC:
334     case POST_INC:
335       return true;
336 
337     case MINUS:
338     case PLUS:
339       if (! REG_P (XEXP (addr, 0)))
340 	return true;
341 
342       addr = XEXP (addr, 1);
343 
344       switch (GET_CODE (addr))
345 	{
346 	case REG:
347 	  /* REG+REG only works in SImode.  */
348 	  return true;
349 
350 	case CONST_INT:
351 	  /* REG+INT is only mode independent if INT is a
352 	     multiple of 4, positive and will fit into 16-bits.  */
353 	  if (((INTVAL (addr) & 3) == 0)
354 	      && IN_RANGE (INTVAL (addr), 4, 0xfffc))
355 	    return false;
356 	  return true;
357 
358 	case SYMBOL_REF:
359 	case LABEL_REF:
360 	  return true;
361 
362 	case MULT:
363 	  /* REG+REG*SCALE is always mode dependent.  */
364 	  return true;
365 
366 	default:
367 	  /* Not recognized, so treat as mode dependent.  */
368 	  return true;
369 	}
370 
371     case CONST_INT:
372     case SYMBOL_REF:
373     case LABEL_REF:
374     case REG:
375       /* These are all mode independent.  */
376       return false;
377 
378     default:
379       /* Everything else is unrecognized,
380 	 so treat as mode dependent.  */
381       return true;
382     }
383 }
384 
385 /* A C compound statement to output to stdio stream FILE the
386    assembler syntax for an instruction operand that is a memory
387    reference whose address is ADDR.  */
388 
389 static void
rx_print_operand_address(FILE * file,machine_mode,rtx addr)390 rx_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
391 {
392   switch (GET_CODE (addr))
393     {
394     case REG:
395       fprintf (file, "[");
396       rx_print_operand (file, addr, 0);
397       fprintf (file, "]");
398       break;
399 
400     case PRE_DEC:
401       fprintf (file, "[-");
402       rx_print_operand (file, XEXP (addr, 0), 0);
403       fprintf (file, "]");
404       break;
405 
406     case POST_INC:
407       fprintf (file, "[");
408       rx_print_operand (file, XEXP (addr, 0), 0);
409       fprintf (file, "+]");
410       break;
411 
412     case PLUS:
413       {
414 	rtx arg1 = XEXP (addr, 0);
415 	rtx arg2 = XEXP (addr, 1);
416 	rtx base, index;
417 
418 	if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
419 	  base = arg1, index = arg2;
420 	else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
421 	  base = arg2, index = arg1;
422 	else
423 	  {
424 	    rx_print_operand (file, arg1, 0);
425 	    fprintf (file, " + ");
426 	    rx_print_operand (file, arg2, 0);
427 	    break;
428 	  }
429 
430 	if (REG_P (index) || GET_CODE (index) == MULT)
431 	  {
432 	    fprintf (file, "[");
433 	    rx_print_operand (file, index, 'A');
434 	    fprintf (file, ",");
435 	  }
436 	else /* GET_CODE (index) == CONST_INT  */
437 	  {
438 	    rx_print_operand (file, index, 'A');
439 	    fprintf (file, "[");
440 	  }
441 	rx_print_operand (file, base, 0);
442 	fprintf (file, "]");
443 	break;
444       }
445 
446     case CONST:
447       if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
448 	{
449 	  addr = XEXP (addr, 0);
450 	  gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
451 
452           addr = XVECEXP (addr, 0, 0);
453 	  gcc_assert (CONST_INT_P (addr));
454 	  fprintf (file, "#");
455 	  output_addr_const (file, addr);
456 	  break;
457 	}
458       fprintf (file, "#");
459       output_addr_const (file, XEXP (addr, 0));
460       break;
461 
462     case UNSPEC:
463       addr = XVECEXP (addr, 0, 0);
464       /* Fall through.  */
465     case LABEL_REF:
466     case SYMBOL_REF:
467       fprintf (file, "#");
468       /* Fall through.  */
469     default:
470       output_addr_const (file, addr);
471       break;
472     }
473 }
474 
475 static void
rx_print_integer(FILE * file,HOST_WIDE_INT val)476 rx_print_integer (FILE * file, HOST_WIDE_INT val)
477 {
478   if (val < 64)
479     fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
480   else
481     fprintf (file,
482 	     TARGET_AS100_SYNTAX
483 	     ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
484 	     val);
485 }
486 
487 static bool
rx_assemble_integer(rtx x,unsigned int size,int is_aligned)488 rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
489 {
490   const char *  op = integer_asm_op (size, is_aligned);
491 
492   if (! CONST_INT_P (x))
493     return default_assemble_integer (x, size, is_aligned);
494 
495   if (op == NULL)
496     return false;
497   fputs (op, asm_out_file);
498 
499   rx_print_integer (asm_out_file, INTVAL (x));
500   fputc ('\n', asm_out_file);
501   return true;
502 }
503 
504 
505 /* Handles the insertion of a single operand into the assembler output.
506    The %<letter> directives supported are:
507 
508      %A  Print an operand without a leading # character.
509      %B  Print an integer comparison name.
510      %C  Print a control register name.
511      %F  Print a condition code flag name.
512      %G  Register used for small-data-area addressing
513      %H  Print high part of a DImode register, integer or address.
514      %L  Print low part of a DImode register, integer or address.
515      %N  Print the negation of the immediate value.
516      %P  Register used for PID addressing
517      %Q  If the operand is a MEM, then correctly generate
518          register indirect or register relative addressing.
519      %R  Like %Q but for zero-extending loads.  */
520 
521 static void
rx_print_operand(FILE * file,rtx op,int letter)522 rx_print_operand (FILE * file, rtx op, int letter)
523 {
524   bool unsigned_load = false;
525   bool print_hash = true;
526 
527   if (letter == 'A'
528       && ((GET_CODE (op) == CONST
529 	   && GET_CODE (XEXP (op, 0)) == UNSPEC)
530 	  || GET_CODE (op) == UNSPEC))
531     {
532       print_hash = false;
533       letter = 0;
534     }
535 
536   switch (letter)
537     {
538     case 'A':
539       /* Print an operand without a leading #.  */
540       if (MEM_P (op))
541 	op = XEXP (op, 0);
542 
543       switch (GET_CODE (op))
544 	{
545 	case LABEL_REF:
546 	case SYMBOL_REF:
547 	  output_addr_const (file, op);
548 	  break;
549 	case CONST_INT:
550 	  fprintf (file, "%ld", (long) INTVAL (op));
551 	  break;
552 	default:
553 	  rx_print_operand (file, op, 0);
554 	  break;
555 	}
556       break;
557 
558     case 'B':
559       {
560 	enum rtx_code code = GET_CODE (op);
561 	machine_mode mode = GET_MODE (XEXP (op, 0));
562 	const char *ret;
563 
564 	if (mode == CC_Fmode)
565 	  {
566 	    /* C flag is undefined, and O flag carries unordered.  None of the
567 	       branch combinations that include O use it helpfully.  */
568 	    switch (code)
569 	      {
570 	      case ORDERED:
571 		ret = "no";
572 		break;
573 	      case UNORDERED:
574 		ret = "o";
575 		break;
576 	      case LT:
577 		ret = "n";
578 		break;
579 	      case GE:
580 		ret = "pz";
581 		break;
582 	      case EQ:
583 		ret = "eq";
584 		break;
585 	      case NE:
586 		ret = "ne";
587 		break;
588 	      default:
589 		gcc_unreachable ();
590 	      }
591 	  }
592 	else
593 	  {
594 	    unsigned int flags = flags_from_mode (mode);
595 
596 	    switch (code)
597 	      {
598 	      case LT:
599 		ret = (flags & CC_FLAG_O ? "lt" : "n");
600 		break;
601 	      case GE:
602 		ret = (flags & CC_FLAG_O ? "ge" : "pz");
603 		break;
604 	      case GT:
605 		ret = "gt";
606 		break;
607 	      case LE:
608 		ret = "le";
609 		break;
610 	      case GEU:
611 		ret = "geu";
612 		break;
613 	      case LTU:
614 		ret = "ltu";
615 		break;
616 	      case GTU:
617 		ret = "gtu";
618 		break;
619 	      case LEU:
620 		ret = "leu";
621 		break;
622 	      case EQ:
623 		ret = "eq";
624 		break;
625 	      case NE:
626 		ret = "ne";
627 		break;
628 	      default:
629 		gcc_unreachable ();
630 	      }
631 	    gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
632 	  }
633 	fputs (ret, file);
634 	break;
635       }
636 
637     case 'C':
638       gcc_assert (CONST_INT_P (op));
639       switch (INTVAL (op))
640 	{
641 	case CTRLREG_PSW:   fprintf (file, "psw"); break;
642 	case CTRLREG_USP:   fprintf (file, "usp"); break;
643 	case CTRLREG_FPSW:  fprintf (file, "fpsw"); break;
644 	case CTRLREG_CPEN:  fprintf (file, "cpen"); break;
645 	case CTRLREG_BPSW:  fprintf (file, "bpsw"); break;
646 	case CTRLREG_BPC:   fprintf (file, "bpc"); break;
647 	case CTRLREG_ISP:   fprintf (file, "isp"); break;
648 	case CTRLREG_FINTV: fprintf (file, "fintv"); break;
649 	case CTRLREG_INTB:  fprintf (file, "intb"); break;
650 	default:
651 	  warning (0, "unrecognized control register number: %d - using 'psw'",
652 		   (int) INTVAL (op));
653 	  fprintf (file, "psw");
654 	  break;
655 	}
656       break;
657 
658     case 'F':
659       gcc_assert (CONST_INT_P (op));
660       switch (INTVAL (op))
661 	{
662 	case 0: case 'c': case 'C': fprintf (file, "C"); break;
663 	case 1:	case 'z': case 'Z': fprintf (file, "Z"); break;
664 	case 2: case 's': case 'S': fprintf (file, "S"); break;
665 	case 3: case 'o': case 'O': fprintf (file, "O"); break;
666 	case 8: case 'i': case 'I': fprintf (file, "I"); break;
667 	case 9: case 'u': case 'U': fprintf (file, "U"); break;
668 	default:
669 	  gcc_unreachable ();
670 	}
671       break;
672 
673     case 'G':
674       fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
675       break;
676 
677     case 'H':
678       switch (GET_CODE (op))
679 	{
680 	case REG:
681 	  fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
682 	  break;
683 	case CONST_INT:
684 	  {
685 	    HOST_WIDE_INT v = INTVAL (op);
686 
687 	    fprintf (file, "#");
688 	    /* Trickery to avoid problems with shifting 32 bits at a time.  */
689 	    v = v >> 16;
690 	    v = v >> 16;
691 	    rx_print_integer (file, v);
692 	    break;
693 	  }
694 	case CONST_DOUBLE:
695 	  fprintf (file, "#");
696 	  rx_print_integer (file, CONST_DOUBLE_HIGH (op));
697 	  break;
698 	case MEM:
699 	  if (! WORDS_BIG_ENDIAN)
700 	    op = adjust_address (op, SImode, 4);
701 	  output_address (GET_MODE (op), XEXP (op, 0));
702 	  break;
703 	default:
704 	  gcc_unreachable ();
705 	}
706       break;
707 
708     case 'L':
709       switch (GET_CODE (op))
710 	{
711 	case REG:
712 	  fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
713 	  break;
714 	case CONST_INT:
715 	  fprintf (file, "#");
716 	  rx_print_integer (file, INTVAL (op) & 0xffffffff);
717 	  break;
718 	case CONST_DOUBLE:
719 	  fprintf (file, "#");
720 	  rx_print_integer (file, CONST_DOUBLE_LOW (op));
721 	  break;
722 	case MEM:
723 	  if (WORDS_BIG_ENDIAN)
724 	    op = adjust_address (op, SImode, 4);
725 	  output_address (GET_MODE (op), XEXP (op, 0));
726 	  break;
727 	default:
728 	  gcc_unreachable ();
729 	}
730       break;
731 
732     case 'N':
733       gcc_assert (CONST_INT_P (op));
734       fprintf (file, "#");
735       rx_print_integer (file, - INTVAL (op));
736       break;
737 
738     case 'P':
739       fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
740       break;
741 
742     case 'R':
743       gcc_assert (GET_MODE_SIZE (GET_MODE (op)) <= 4);
744       unsigned_load = true;
745       /* Fall through.  */
746     case 'Q':
747       if (MEM_P (op))
748 	{
749 	  HOST_WIDE_INT offset;
750 	  rtx mem = op;
751 
752 	  op = XEXP (op, 0);
753 
754 	  if (REG_P (op))
755 	    offset = 0;
756 	  else if (GET_CODE (op) == PLUS)
757 	    {
758 	      rtx displacement;
759 
760 	      if (REG_P (XEXP (op, 0)))
761 		{
762 		  displacement = XEXP (op, 1);
763 		  op = XEXP (op, 0);
764 		}
765 	      else
766 		{
767 		  displacement = XEXP (op, 0);
768 		  op = XEXP (op, 1);
769 		  gcc_assert (REG_P (op));
770 		}
771 
772 	      gcc_assert (CONST_INT_P (displacement));
773 	      offset = INTVAL (displacement);
774 	      gcc_assert (offset >= 0);
775 
776 	      fprintf (file, "%ld", offset);
777 	    }
778 	  else
779 	    gcc_unreachable ();
780 
781 	  fprintf (file, "[");
782 	  rx_print_operand (file, op, 0);
783 	  fprintf (file, "].");
784 
785 	  switch (GET_MODE_SIZE (GET_MODE (mem)))
786 	    {
787 	    case 1:
788 	      gcc_assert (offset <= 65535 * 1);
789 	      fprintf (file, unsigned_load ? "UB" : "B");
790 	      break;
791 	    case 2:
792 	      gcc_assert (offset % 2 == 0);
793 	      gcc_assert (offset <= 65535 * 2);
794 	      fprintf (file, unsigned_load ? "UW" : "W");
795 	      break;
796 	    case 4:
797 	      gcc_assert (offset % 4 == 0);
798 	      gcc_assert (offset <= 65535 * 4);
799 	      fprintf (file, "L");
800 	      break;
801 	    default:
802 	      gcc_unreachable ();
803 	    }
804 	  break;
805 	}
806 
807       /* Fall through.  */
808 
809     default:
810       if (GET_CODE (op) == CONST
811 	  && GET_CODE (XEXP (op, 0)) == UNSPEC)
812 	op = XEXP (op, 0);
813       else if (GET_CODE (op) == CONST
814 	       && GET_CODE (XEXP (op, 0)) == PLUS
815 	       && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
816 	       && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
817 	{
818 	  if (print_hash)
819 	    fprintf (file, "#");
820 	  fprintf (file, "(");
821 	  rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
822 	  fprintf (file, " + ");
823 	  output_addr_const (file, XEXP (XEXP (op, 0), 1));
824 	  fprintf (file, ")");
825 	  return;
826 	}
827 
828       switch (GET_CODE (op))
829 	{
830 	case MULT:
831 	  /* Should be the scaled part of an
832 	     indexed register indirect address.  */
833 	  {
834 	    rtx base = XEXP (op, 0);
835 	    rtx index = XEXP (op, 1);
836 
837 	    /* Check for a swaped index register and scaling factor.
838 	       Not sure if this can happen, but be prepared to handle it.  */
839 	    if (CONST_INT_P (base) && REG_P (index))
840 	      {
841 		rtx tmp = base;
842 		base = index;
843 		index = tmp;
844 	      }
845 
846 	    gcc_assert (REG_P (base));
847 	    gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
848 	    gcc_assert (CONST_INT_P (index));
849 	    /* Do not try to verify the value of the scalar as it is based
850 	       on the mode of the MEM not the mode of the MULT.  (Which
851 	       will always be SImode).  */
852 	    fprintf (file, "%s", reg_names [REGNO (base)]);
853 	    break;
854 	  }
855 
856 	case MEM:
857 	  output_address (GET_MODE (op), XEXP (op, 0));
858 	  break;
859 
860 	case PLUS:
861 	  output_address (VOIDmode, op);
862 	  break;
863 
864 	case REG:
865 	  gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
866 	  fprintf (file, "%s", reg_names [REGNO (op)]);
867 	  break;
868 
869 	case SUBREG:
870 	  gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
871 	  fprintf (file, "%s", reg_names [subreg_regno (op)]);
872 	  break;
873 
874 	  /* This will only be single precision....  */
875 	case CONST_DOUBLE:
876 	  {
877 	    unsigned long val;
878 
879 	    REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (op), val);
880 	    if (print_hash)
881 	      fprintf (file, "#");
882 	    fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
883 	    break;
884 	  }
885 
886 	case CONST_INT:
887 	  if (print_hash)
888 	    fprintf (file, "#");
889 	  rx_print_integer (file, INTVAL (op));
890 	  break;
891 
892 	case UNSPEC:
893 	  switch (XINT (op, 1))
894 	    {
895 	    case UNSPEC_PID_ADDR:
896 	      {
897 		rtx sym, add;
898 
899 		if (print_hash)
900 		  fprintf (file, "#");
901 		sym = XVECEXP (op, 0, 0);
902 		add = NULL_RTX;
903 		fprintf (file, "(");
904 		if (GET_CODE (sym) == PLUS)
905 		  {
906 		    add = XEXP (sym, 1);
907 		    sym = XEXP (sym, 0);
908 		  }
909 		output_addr_const (file, sym);
910 		if (add != NULL_RTX)
911 		  {
912 		    fprintf (file, "+");
913 		    output_addr_const (file, add);
914 		  }
915 		fprintf (file, "-__pid_base");
916 		fprintf (file, ")");
917 		return;
918 	      }
919 	    }
920 	  /* Fall through */
921 
922 	case CONST:
923 	case SYMBOL_REF:
924 	case LABEL_REF:
925 	case CODE_LABEL:
926 	  rx_print_operand_address (file, VOIDmode, op);
927 	  break;
928 
929 	default:
930 	  gcc_unreachable ();
931 	}
932       break;
933     }
934 }
935 
936 /* Maybe convert an operand into its PID format.  */
937 
938 rtx
rx_maybe_pidify_operand(rtx op,int copy_to_reg)939 rx_maybe_pidify_operand (rtx op, int copy_to_reg)
940 {
941   if (rx_pid_data_operand (op) == PID_UNENCODED)
942     {
943       if (GET_CODE (op) == MEM)
944 	{
945 	  rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
946 	  op = replace_equiv_address (op, a);
947 	}
948       else
949 	{
950 	  op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
951 	}
952 
953       if (copy_to_reg)
954 	op = copy_to_mode_reg (GET_MODE (op), op);
955     }
956   return op;
957 }
958 
959 /* Returns an assembler template for a move instruction.  */
960 
961 char *
rx_gen_move_template(rtx * operands,bool is_movu)962 rx_gen_move_template (rtx * operands, bool is_movu)
963 {
964   static char  out_template [64];
965   const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
966   const char * src_template;
967   const char * dst_template;
968   rtx          dest = operands[0];
969   rtx          src  = operands[1];
970 
971   /* Decide which extension, if any, should be given to the move instruction.  */
972   switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
973     {
974     case E_QImode:
975       /* The .B extension is not valid when
976 	 loading an immediate into a register.  */
977       if (! REG_P (dest) || ! CONST_INT_P (src))
978 	extension = ".B";
979       break;
980     case E_HImode:
981       if (! REG_P (dest) || ! CONST_INT_P (src))
982 	/* The .W extension is not valid when
983 	   loading an immediate into a register.  */
984 	extension = ".W";
985       break;
986     case E_DFmode:
987     case E_DImode:
988     case E_SFmode:
989     case E_SImode:
990       extension = ".L";
991       break;
992     case E_VOIDmode:
993       /* This mode is used by constants.  */
994       break;
995     default:
996       debug_rtx (src);
997       gcc_unreachable ();
998     }
999 
1000   if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
1001     {
1002       gcc_assert (GET_MODE (src) != DImode);
1003       gcc_assert (GET_MODE (src) != DFmode);
1004 
1005       src_template = "(%A1 - __pid_base)[%P1]";
1006     }
1007   else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
1008     {
1009       gcc_assert (GET_MODE (src) != DImode);
1010       gcc_assert (GET_MODE (src) != DFmode);
1011 
1012       src_template = "%%gp(%A1)[%G1]";
1013     }
1014   else
1015     src_template = "%1";
1016 
1017   if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
1018     {
1019       gcc_assert (GET_MODE (dest) != DImode);
1020       gcc_assert (GET_MODE (dest) != DFmode);
1021 
1022       dst_template = "%%gp(%A0)[%G0]";
1023     }
1024   else
1025     dst_template = "%0";
1026 
1027   if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
1028     {
1029       gcc_assert (! is_movu);
1030 
1031       if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
1032 	sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
1033       else
1034 	sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
1035     }
1036   else
1037     sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1038 	     extension, src_template, dst_template);
1039   return out_template;
1040 }
1041 
1042 /* Return VALUE rounded up to the next ALIGNMENT boundary.  */
1043 
1044 static inline unsigned int
rx_round_up(unsigned int value,unsigned int alignment)1045 rx_round_up (unsigned int value, unsigned int alignment)
1046 {
1047   alignment -= 1;
1048   return (value + alignment) & (~ alignment);
1049 }
1050 
1051 /* Return the number of bytes in the argument registers
1052    occupied by an argument of type TYPE and mode MODE.  */
1053 
1054 static unsigned int
rx_function_arg_size(machine_mode mode,const_tree type)1055 rx_function_arg_size (machine_mode mode, const_tree type)
1056 {
1057   unsigned int num_bytes;
1058 
1059   num_bytes = (mode == BLKmode)
1060     ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1061   return rx_round_up (num_bytes, UNITS_PER_WORD);
1062 }
1063 
1064 #define NUM_ARG_REGS		4
1065 #define MAX_NUM_ARG_BYTES	(NUM_ARG_REGS * UNITS_PER_WORD)
1066 
1067 /* Return an RTL expression describing the register holding a function
1068    parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1069    be passed on the stack.  CUM describes the previous parameters to the
1070    function and NAMED is false if the parameter is part of a variable
1071    parameter list, or the last named parameter before the start of a
1072    variable parameter list.  */
1073 
1074 static rtx
rx_function_arg(cumulative_args_t cum,machine_mode mode,const_tree type,bool named)1075 rx_function_arg (cumulative_args_t cum, machine_mode mode,
1076 		 const_tree type, bool named)
1077 {
1078   unsigned int next_reg;
1079   unsigned int bytes_so_far = *get_cumulative_args (cum);
1080   unsigned int size;
1081   unsigned int rounded_size;
1082 
1083   /* An exploded version of rx_function_arg_size.  */
1084   size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1085   /* If the size is not known it cannot be passed in registers.  */
1086   if (size < 1)
1087     return NULL_RTX;
1088 
1089   rounded_size = rx_round_up (size, UNITS_PER_WORD);
1090 
1091   /* Don't pass this arg via registers if there
1092      are insufficient registers to hold all of it.  */
1093   if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1094     return NULL_RTX;
1095 
1096   /* Unnamed arguments and the last named argument in a
1097      variadic function are always passed on the stack.  */
1098   if (!named)
1099     return NULL_RTX;
1100 
1101   /* Structures must occupy an exact number of registers,
1102      otherwise they are passed on the stack.  */
1103   if ((type == NULL || AGGREGATE_TYPE_P (type))
1104       && (size % UNITS_PER_WORD) != 0)
1105     return NULL_RTX;
1106 
1107   next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1108 
1109   return gen_rtx_REG (mode, next_reg);
1110 }
1111 
1112 static void
rx_function_arg_advance(cumulative_args_t cum,machine_mode mode,const_tree type,bool named ATTRIBUTE_UNUSED)1113 rx_function_arg_advance (cumulative_args_t cum, machine_mode mode,
1114 			 const_tree type, bool named ATTRIBUTE_UNUSED)
1115 {
1116   *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
1117 }
1118 
1119 static unsigned int
rx_function_arg_boundary(machine_mode mode ATTRIBUTE_UNUSED,const_tree type ATTRIBUTE_UNUSED)1120 rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
1121 			  const_tree type ATTRIBUTE_UNUSED)
1122 {
1123   /* Older versions of the RX backend aligned all on-stack arguments
1124      to 32-bits.  The RX C ABI however says that they should be
1125      aligned to their natural alignment.  (See section 5.2.2 of the ABI).  */
1126   if (TARGET_GCC_ABI)
1127     return STACK_BOUNDARY;
1128 
1129   if (type)
1130     {
1131       if (DECL_P (type))
1132 	return DECL_ALIGN (type);
1133       return TYPE_ALIGN (type);
1134     }
1135 
1136   return PARM_BOUNDARY;
1137 }
1138 
1139 /* Return an RTL describing where a function return value of type RET_TYPE
1140    is held.  */
1141 
1142 static rtx
rx_function_value(const_tree ret_type,const_tree fn_decl_or_type ATTRIBUTE_UNUSED,bool outgoing ATTRIBUTE_UNUSED)1143 rx_function_value (const_tree ret_type,
1144 		   const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1145 		   bool       outgoing ATTRIBUTE_UNUSED)
1146 {
1147   machine_mode mode = TYPE_MODE (ret_type);
1148 
1149   /* RX ABI specifies that small integer types are
1150      promoted to int when returned by a function.  */
1151   if (GET_MODE_SIZE (mode) > 0
1152       && GET_MODE_SIZE (mode) < 4
1153       && ! COMPLEX_MODE_P (mode)
1154       && ! VECTOR_TYPE_P (ret_type)
1155       && ! VECTOR_MODE_P (mode)
1156       )
1157     return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1158 
1159   return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1160 }
1161 
1162 /* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1163    regard to function returns as does TARGET_FUNCTION_VALUE.  */
1164 
1165 static machine_mode
rx_promote_function_mode(const_tree type ATTRIBUTE_UNUSED,machine_mode mode,int * punsignedp ATTRIBUTE_UNUSED,const_tree funtype ATTRIBUTE_UNUSED,int for_return)1166 rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1167 			  machine_mode mode,
1168 			  int * punsignedp ATTRIBUTE_UNUSED,
1169 			  const_tree funtype ATTRIBUTE_UNUSED,
1170 			  int for_return)
1171 {
1172   if (for_return != 1
1173       || GET_MODE_SIZE (mode) >= 4
1174       || COMPLEX_MODE_P (mode)
1175       || VECTOR_MODE_P (mode)
1176       || VECTOR_TYPE_P (type)
1177       || GET_MODE_SIZE (mode) < 1)
1178     return mode;
1179 
1180   return SImode;
1181 }
1182 
1183 static bool
rx_return_in_memory(const_tree type,const_tree fntype ATTRIBUTE_UNUSED)1184 rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1185 {
1186   HOST_WIDE_INT size;
1187 
1188   if (TYPE_MODE (type) != BLKmode
1189       && ! AGGREGATE_TYPE_P (type))
1190     return false;
1191 
1192   size = int_size_in_bytes (type);
1193   /* Large structs and those whose size is not an
1194      exact multiple of 4 are returned in memory.  */
1195   return size < 1
1196     || size > 16
1197     || (size % UNITS_PER_WORD) != 0;
1198 }
1199 
1200 static rtx
rx_struct_value_rtx(tree fndecl ATTRIBUTE_UNUSED,int incoming ATTRIBUTE_UNUSED)1201 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1202 		     int incoming ATTRIBUTE_UNUSED)
1203 {
1204   return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1205 }
1206 
1207 static bool
rx_return_in_msb(const_tree valtype)1208 rx_return_in_msb (const_tree valtype)
1209 {
1210   return TARGET_BIG_ENDIAN_DATA
1211     && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1212 }
1213 
1214 /* Returns true if the provided function has the specified attribute.  */
1215 
1216 static inline bool
has_func_attr(const_tree decl,const char * func_attr)1217 has_func_attr (const_tree decl, const char * func_attr)
1218 {
1219   if (decl == NULL_TREE)
1220     decl = current_function_decl;
1221 
1222   return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1223 }
1224 
1225 /* Returns true if the provided function has the "fast_interrupt" attribute.  */
1226 
1227 bool
is_fast_interrupt_func(const_tree decl)1228 is_fast_interrupt_func (const_tree decl)
1229 {
1230   return has_func_attr (decl, "fast_interrupt");
1231 }
1232 
1233 /* Returns true if the provided function has the "interrupt" attribute.  */
1234 
1235 bool
is_interrupt_func(const_tree decl)1236 is_interrupt_func (const_tree decl)
1237 {
1238   return has_func_attr (decl, "interrupt");
1239 }
1240 
1241 /* Returns true if the provided function has the "naked" attribute.  */
1242 
1243 static inline bool
is_naked_func(const_tree decl)1244 is_naked_func (const_tree decl)
1245 {
1246   return has_func_attr (decl, "naked");
1247 }
1248 
1249 static bool use_fixed_regs = false;
1250 
1251 static void
rx_conditional_register_usage(void)1252 rx_conditional_register_usage (void)
1253 {
1254   static bool using_fixed_regs = false;
1255 
1256   if (TARGET_PID)
1257     {
1258       rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1259       fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1260     }
1261 
1262   if (rx_small_data_limit > 0)
1263     {
1264       if (TARGET_PID)
1265 	rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1266       else
1267 	rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1268 
1269       fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1270     }
1271 
1272   if (use_fixed_regs != using_fixed_regs)
1273     {
1274       static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1275       static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1276 
1277       if (use_fixed_regs)
1278 	{
1279 	  unsigned int r;
1280 
1281 	  memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1282 	  memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
1283 
1284 	  /* This is for fast interrupt handlers.  Any register in
1285 	     the range r10 to r13 (inclusive) that is currently
1286 	     marked as fixed is now a viable, call-used register.  */
1287 	  for (r = 10; r <= 13; r++)
1288 	    if (fixed_regs[r])
1289 	      {
1290 		fixed_regs[r] = 0;
1291 		call_used_regs[r] = 1;
1292 	      }
1293 
1294 	  /* Mark r7 as fixed.  This is just a hack to avoid
1295 	     altering the reg_alloc_order array so that the newly
1296 	     freed r10-r13 registers are the preferred registers.  */
1297 	  fixed_regs[7] = call_used_regs[7] = 1;
1298 	}
1299       else
1300 	{
1301 	  /* Restore the normal register masks.  */
1302 	  memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1303 	  memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1304 	}
1305 
1306       using_fixed_regs = use_fixed_regs;
1307     }
1308 }
1309 
1310 struct decl_chain
1311 {
1312   tree fndecl;
1313   struct decl_chain * next;
1314 };
1315 
1316 /* Stack of decls for which we have issued warnings.  */
1317 static struct decl_chain * warned_decls = NULL;
1318 
1319 static void
add_warned_decl(tree fndecl)1320 add_warned_decl (tree fndecl)
1321 {
1322   struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1323 
1324   warned->fndecl = fndecl;
1325   warned->next = warned_decls;
1326   warned_decls = warned;
1327 }
1328 
1329 /* Returns TRUE if FNDECL is on our list of warned about decls.  */
1330 
1331 static bool
already_warned(tree fndecl)1332 already_warned (tree fndecl)
1333 {
1334   struct decl_chain * warned;
1335 
1336   for (warned = warned_decls;
1337        warned != NULL;
1338        warned = warned->next)
1339     if (warned->fndecl == fndecl)
1340       return true;
1341 
1342   return false;
1343 }
1344 
1345 /* Perform any actions necessary before starting to compile FNDECL.
1346    For the RX we use this to make sure that we have the correct
1347    set of register masks selected.  If FNDECL is NULL then we are
1348    compiling top level things.  */
1349 
1350 static void
rx_set_current_function(tree fndecl)1351 rx_set_current_function (tree fndecl)
1352 {
1353   /* Remember the last target of rx_set_current_function.  */
1354   static tree rx_previous_fndecl;
1355   bool prev_was_fast_interrupt;
1356   bool current_is_fast_interrupt;
1357 
1358   /* Only change the context if the function changes.  This hook is called
1359      several times in the course of compiling a function, and we don't want
1360      to slow things down too much or call target_reinit when it isn't safe.  */
1361   if (fndecl == rx_previous_fndecl)
1362     return;
1363 
1364   prev_was_fast_interrupt
1365     = rx_previous_fndecl
1366     ? is_fast_interrupt_func (rx_previous_fndecl) : false;
1367 
1368   current_is_fast_interrupt
1369     = fndecl ? is_fast_interrupt_func (fndecl) : false;
1370 
1371   if (prev_was_fast_interrupt != current_is_fast_interrupt)
1372     {
1373       use_fixed_regs = current_is_fast_interrupt;
1374       target_reinit ();
1375     }
1376 
1377   if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1378     {
1379       /* We do not warn about the first fast interrupt routine that
1380 	 we see.  Instead we just push it onto the stack.  */
1381       if (warned_decls == NULL)
1382 	add_warned_decl (fndecl);
1383 
1384       /* Otherwise if this fast interrupt is one for which we have
1385 	 not already issued a warning, generate one and then push
1386 	 it onto the stack as well.  */
1387       else if (! already_warned (fndecl))
1388 	{
1389 	  warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1390 		   fndecl, warned_decls->fndecl);
1391 	  add_warned_decl (fndecl);
1392 	}
1393     }
1394 
1395   rx_previous_fndecl = fndecl;
1396 }
1397 
1398 /* Typical stack layout should looks like this after the function's prologue:
1399 
1400                             |    |
1401                               --                       ^
1402                             |    | \                   |
1403                             |    |   arguments saved   | Increasing
1404                             |    |   on the stack      |  addresses
1405     PARENT   arg pointer -> |    | /
1406   -------------------------- ---- -------------------
1407     CHILD                   |ret |   return address
1408                               --
1409                             |    | \
1410                             |    |   call saved
1411                             |    |   registers
1412 			    |    | /
1413                               --
1414                             |    | \
1415                             |    |   local
1416                             |    |   variables
1417         frame pointer ->    |    | /
1418                               --
1419                             |    | \
1420                             |    |   outgoing          | Decreasing
1421                             |    |   arguments         |  addresses
1422    current stack pointer -> |    | /                   |
1423   -------------------------- ---- ------------------   V
1424                             |    |                 */
1425 
1426 static unsigned int
bit_count(unsigned int x)1427 bit_count (unsigned int x)
1428 {
1429   const unsigned int m1 = 0x55555555;
1430   const unsigned int m2 = 0x33333333;
1431   const unsigned int m4 = 0x0f0f0f0f;
1432 
1433   x -= (x >> 1) & m1;
1434   x = (x & m2) + ((x >> 2) & m2);
1435   x = (x + (x >> 4)) & m4;
1436   x += x >>  8;
1437 
1438   return (x + (x >> 16)) & 0x3f;
1439 }
1440 
1441 #define MUST_SAVE_ACC_REGISTER			\
1442   (TARGET_SAVE_ACC_REGISTER			\
1443    && (is_interrupt_func (NULL_TREE)		\
1444        || is_fast_interrupt_func (NULL_TREE)))
1445 
1446 /* Returns either the lowest numbered and highest numbered registers that
1447    occupy the call-saved area of the stack frame, if the registers are
1448    stored as a contiguous block, or else a bitmask of the individual
1449    registers if they are stored piecemeal.
1450 
1451    Also computes the size of the frame and the size of the outgoing
1452    arguments block (in bytes).  */
1453 
1454 static void
rx_get_stack_layout(unsigned int * lowest,unsigned int * highest,unsigned int * register_mask,unsigned int * frame_size,unsigned int * stack_size)1455 rx_get_stack_layout (unsigned int * lowest,
1456 		     unsigned int * highest,
1457 		     unsigned int * register_mask,
1458 		     unsigned int * frame_size,
1459 		     unsigned int * stack_size)
1460 {
1461   unsigned int reg;
1462   unsigned int low;
1463   unsigned int high;
1464   unsigned int fixed_reg = 0;
1465   unsigned int save_mask;
1466   unsigned int pushed_mask;
1467   unsigned int unneeded_pushes;
1468 
1469   if (is_naked_func (NULL_TREE))
1470     {
1471       /* Naked functions do not create their own stack frame.
1472 	 Instead the programmer must do that for us.  */
1473       * lowest = 0;
1474       * highest = 0;
1475       * register_mask = 0;
1476       * frame_size = 0;
1477       * stack_size = 0;
1478       return;
1479     }
1480 
1481   for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
1482     {
1483       if ((df_regs_ever_live_p (reg)
1484 	   /* Always save all call clobbered registers inside non-leaf
1485 	      interrupt handlers, even if they are not live - they may
1486 	      be used in (non-interrupt aware) routines called from this one.  */
1487 	   || (call_used_regs[reg]
1488 	       && is_interrupt_func (NULL_TREE)
1489 	       && ! crtl->is_leaf))
1490 	  && (! call_used_regs[reg]
1491 	      /* Even call clobbered registered must
1492 		 be pushed inside interrupt handlers.  */
1493 	      || is_interrupt_func (NULL_TREE)
1494 	      /* Likewise for fast interrupt handlers, except registers r10 -
1495 		 r13.  These are normally call-saved, but may have been set
1496 		 to call-used by rx_conditional_register_usage.  If so then
1497 		 they can be used in the fast interrupt handler without
1498 		 saving them on the stack.  */
1499 	      || (is_fast_interrupt_func (NULL_TREE)
1500 		  && ! IN_RANGE (reg, 10, 13))))
1501 	{
1502 	  if (low == 0)
1503 	    low = reg;
1504 	  high = reg;
1505 
1506 	  save_mask |= 1 << reg;
1507 	}
1508 
1509       /* Remember if we see a fixed register
1510 	 after having found the low register.  */
1511       if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1512 	fixed_reg = reg;
1513     }
1514 
1515   /* If we have to save the accumulator register, make sure
1516      that at least two registers are pushed into the frame.  */
1517   if (MUST_SAVE_ACC_REGISTER
1518       && bit_count (save_mask) < 2)
1519     {
1520       save_mask |= (1 << 13) | (1 << 14);
1521       if (low == 0)
1522 	low = 13;
1523       if (high == 0 || low == high)
1524 	high = low + 1;
1525     }
1526 
1527   /* Decide if it would be faster fill in the call-saved area of the stack
1528      frame using multiple PUSH instructions instead of a single PUSHM
1529      instruction.
1530 
1531      SAVE_MASK is a bitmask of the registers that must be stored in the
1532      call-save area.  PUSHED_MASK is a bitmask of the registers that would
1533      be pushed into the area if we used a PUSHM instruction.  UNNEEDED_PUSHES
1534      is a bitmask of those registers in pushed_mask that are not in
1535      save_mask.
1536 
1537      We use a simple heuristic that says that it is better to use
1538      multiple PUSH instructions if the number of unnecessary pushes is
1539      greater than the number of necessary pushes.
1540 
1541      We also use multiple PUSH instructions if there are any fixed registers
1542      between LOW and HIGH.  The only way that this can happen is if the user
1543      has specified --fixed-<reg-name> on the command line and in such
1544      circumstances we do not want to touch the fixed registers at all.
1545 
1546      Note also that the code in the prologue/epilogue handlers will
1547      automatically merge multiple PUSHes of adjacent registers into a single
1548      PUSHM.
1549 
1550      FIXME: Is it worth improving this heuristic ?  */
1551   pushed_mask = (HOST_WIDE_INT_M1U << low) & ~(HOST_WIDE_INT_M1U << (high + 1));
1552   unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1553 
1554   if ((fixed_reg && fixed_reg <= high)
1555       || (optimize_function_for_speed_p (cfun)
1556 	  && bit_count (save_mask) < bit_count (unneeded_pushes)))
1557     {
1558       /* Use multiple pushes.  */
1559       * lowest = 0;
1560       * highest = 0;
1561       * register_mask = save_mask;
1562     }
1563   else
1564     {
1565       /* Use one push multiple instruction.  */
1566       * lowest = low;
1567       * highest = high;
1568       * register_mask = 0;
1569     }
1570 
1571   * frame_size = rx_round_up
1572     (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1573 
1574   if (crtl->args.size > 0)
1575     * frame_size += rx_round_up
1576       (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1577 
1578   * stack_size = rx_round_up
1579     (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1580 }
1581 
1582 /* Generate a PUSHM instruction that matches the given operands.  */
1583 
1584 void
rx_emit_stack_pushm(rtx * operands)1585 rx_emit_stack_pushm (rtx * operands)
1586 {
1587   HOST_WIDE_INT last_reg;
1588   rtx first_push;
1589 
1590   gcc_assert (CONST_INT_P (operands[0]));
1591   last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1592 
1593   gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1594   first_push = XVECEXP (operands[1], 0, 1);
1595   gcc_assert (SET_P (first_push));
1596   first_push = SET_SRC (first_push);
1597   gcc_assert (REG_P (first_push));
1598 
1599   asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1600 	       reg_names [REGNO (first_push) - last_reg],
1601 	       reg_names [REGNO (first_push)]);
1602 }
1603 
1604 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate.  */
1605 
1606 static rtx
gen_rx_store_vector(unsigned int low,unsigned int high)1607 gen_rx_store_vector (unsigned int low, unsigned int high)
1608 {
1609   unsigned int i;
1610   unsigned int count = (high - low) + 2;
1611   rtx vector;
1612 
1613   vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1614 
1615   XVECEXP (vector, 0, 0) =
1616     gen_rtx_SET (stack_pointer_rtx,
1617 		 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1618 				GEN_INT ((count - 1) * UNITS_PER_WORD)));
1619 
1620   for (i = 0; i < count - 1; i++)
1621     XVECEXP (vector, 0, i + 1) =
1622       gen_rtx_SET (gen_rtx_MEM (SImode,
1623 				gen_rtx_MINUS (SImode, stack_pointer_rtx,
1624 					       GEN_INT ((i + 1) * UNITS_PER_WORD))),
1625 		   gen_rtx_REG (SImode, high - i));
1626   return vector;
1627 }
1628 
1629 /* Mark INSN as being frame related.  If it is a PARALLEL
1630    then mark each element as being frame related as well.  */
1631 
1632 static void
mark_frame_related(rtx insn)1633 mark_frame_related (rtx insn)
1634 {
1635   RTX_FRAME_RELATED_P (insn) = 1;
1636   insn = PATTERN (insn);
1637 
1638   if (GET_CODE (insn) == PARALLEL)
1639     {
1640       unsigned int i;
1641 
1642       for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1643 	RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1644     }
1645 }
1646 
1647 /* Create CFI notes for register pops.  */
1648 static void
add_pop_cfi_notes(rtx_insn * insn,unsigned int high,unsigned int low)1649 add_pop_cfi_notes (rtx_insn *insn, unsigned int high, unsigned int low)
1650 {
1651   rtx t = plus_constant (Pmode, stack_pointer_rtx,
1652                         (high - low + 1) * UNITS_PER_WORD);
1653   t = gen_rtx_SET (stack_pointer_rtx, t);
1654   add_reg_note (insn, REG_CFA_ADJUST_CFA, t);
1655   RTX_FRAME_RELATED_P (insn) = 1;
1656   for (unsigned int i = low; i <= high; i++)
1657     add_reg_note (insn, REG_CFA_RESTORE, gen_rtx_REG (word_mode, i));
1658 }
1659 
1660 
1661 static bool
ok_for_max_constant(HOST_WIDE_INT val)1662 ok_for_max_constant (HOST_WIDE_INT val)
1663 {
1664   if (rx_max_constant_size == 0  || rx_max_constant_size == 4)
1665     /* If there is no constraint on the size of constants
1666        used as operands, then any value is legitimate.  */
1667     return true;
1668 
1669   /* rx_max_constant_size specifies the maximum number
1670      of bytes that can be used to hold a signed value.  */
1671   return IN_RANGE (val, (HOST_WIDE_INT_M1U << (rx_max_constant_size * 8)),
1672 		        ( 1 << (rx_max_constant_size * 8)));
1673 }
1674 
1675 /* Generate an ADD of SRC plus VAL into DEST.
1676    Handles the case where VAL is too big for max_constant_value.
1677    Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true.  */
1678 
1679 static void
gen_safe_add(rtx dest,rtx src,rtx val,bool is_frame_related)1680 gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1681 {
1682   rtx insn;
1683 
1684   if (val == NULL_RTX || INTVAL (val) == 0)
1685     {
1686       gcc_assert (dest != src);
1687 
1688       insn = emit_move_insn (dest, src);
1689     }
1690   else if (ok_for_max_constant (INTVAL (val)))
1691     insn = emit_insn (gen_addsi3 (dest, src, val));
1692   else
1693     {
1694       /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
1695 	 will not reject it.  */
1696       val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1697       insn = emit_insn (gen_addsi3 (dest, src, val));
1698 
1699       if (is_frame_related)
1700 	/* We have to provide our own frame related note here
1701 	   as the dwarf2out code cannot be expected to grok
1702 	   our unspec.  */
1703 	add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1704 		      gen_rtx_SET (dest, gen_rtx_PLUS (SImode, src, val)));
1705       return;
1706     }
1707 
1708   if (is_frame_related)
1709     RTX_FRAME_RELATED_P (insn) = 1;
1710 }
1711 
1712 static void
push_regs(unsigned int high,unsigned int low)1713 push_regs (unsigned int high, unsigned int low)
1714 {
1715   rtx insn;
1716 
1717   if (low == high)
1718     insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1719   else
1720     insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
1721 				       gen_rx_store_vector (low, high)));
1722   mark_frame_related (insn);
1723 }
1724 
1725 void
rx_expand_prologue(void)1726 rx_expand_prologue (void)
1727 {
1728   unsigned int stack_size;
1729   unsigned int frame_size;
1730   unsigned int mask;
1731   unsigned int low;
1732   unsigned int high;
1733   unsigned int reg;
1734 
1735   /* Naked functions use their own, programmer provided prologues.  */
1736   if (is_naked_func (NULL_TREE))
1737     return;
1738 
1739   rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1740 
1741   if (flag_stack_usage_info)
1742     current_function_static_stack_size = frame_size + stack_size;
1743 
1744   /* If we use any of the callee-saved registers, save them now.  */
1745   if (mask)
1746     {
1747       /* Push registers in reverse order.  */
1748       for (reg = CC_REGNUM; reg --;)
1749 	if (mask & (1 << reg))
1750 	  {
1751 	    low = high = reg;
1752 
1753 	    /* Look for a span of registers.
1754 	       Note - we do not have to worry about -Os and whether
1755 	       it is better to use a single, longer PUSHM as
1756 	       rx_get_stack_layout has already done that for us.  */
1757 	    while (reg-- > 0)
1758 	      if ((mask & (1 << reg)) == 0)
1759 		break;
1760 	      else
1761 		--low;
1762 
1763 	    push_regs (high, low);
1764 	    if (reg == (unsigned) -1)
1765 	      break;
1766 	  }
1767     }
1768   else if (low)
1769     push_regs (high, low);
1770 
1771   if (MUST_SAVE_ACC_REGISTER)
1772     {
1773       unsigned int acc_high, acc_low;
1774 
1775       /* Interrupt handlers have to preserve the accumulator
1776 	 register if so requested by the user.  Use the first
1777          two pushed registers as intermediaries.  */
1778       if (mask)
1779 	{
1780 	  acc_low = acc_high = 0;
1781 
1782 	  for (reg = 1; reg < CC_REGNUM; reg ++)
1783 	    if (mask & (1 << reg))
1784 	      {
1785 		if (acc_low == 0)
1786 		  acc_low = reg;
1787 		else
1788 		  {
1789 		    acc_high = reg;
1790 		    break;
1791 		  }
1792 	      }
1793 
1794 	  /* We have assumed that there are at least two registers pushed... */
1795 	  gcc_assert (acc_high != 0);
1796 
1797 	  /* Note - the bottom 16 bits of the accumulator are inaccessible.
1798 	     We just assume that they are zero.  */
1799 	  emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1800 	  emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1801 	  emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1802 	  emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1803 	}
1804       else
1805 	{
1806 	  acc_low = low;
1807 	  acc_high = low + 1;
1808 
1809 	  /* We have assumed that there are at least two registers pushed... */
1810 	  gcc_assert (acc_high <= high);
1811 
1812 	  emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1813 	  emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1814 	  emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1815 				      gen_rx_store_vector (acc_low, acc_high)));
1816 	}
1817     }
1818 
1819   /* If needed, set up the frame pointer.  */
1820   if (frame_pointer_needed)
1821     gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1822 		  GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1823 
1824   /* Allocate space for the outgoing args.
1825      If the stack frame has not already been set up then handle this as well.  */
1826   if (stack_size)
1827     {
1828       if (frame_size)
1829 	{
1830 	  if (frame_pointer_needed)
1831 	    gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1832 			  GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1833 	  else
1834 	    gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1835 			  GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1836 			  true);
1837 	}
1838       else
1839 	gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1840 		      GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1841     }
1842   else if (frame_size)
1843     {
1844       if (! frame_pointer_needed)
1845 	gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1846 		      GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1847       else
1848 	gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1849 		      false /* False because the epilogue will use the FP not the SP.  */);
1850     }
1851 }
1852 
1853 static void
add_vector_labels(FILE * file,const char * aname)1854 add_vector_labels (FILE *file, const char *aname)
1855 {
1856   tree vec_attr;
1857   tree val_attr;
1858   const char *vname = "vect";
1859   const char *s;
1860   int vnum;
1861 
1862   /* This node is for the vector/interrupt tag itself */
1863   vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1864   if (!vec_attr)
1865     return;
1866 
1867   /* Now point it at the first argument */
1868   vec_attr = TREE_VALUE (vec_attr);
1869 
1870   /* Iterate through the arguments.  */
1871   while (vec_attr)
1872     {
1873       val_attr = TREE_VALUE (vec_attr);
1874       switch (TREE_CODE (val_attr))
1875 	{
1876 	case STRING_CST:
1877 	  s = TREE_STRING_POINTER (val_attr);
1878 	  goto string_id_common;
1879 
1880 	case IDENTIFIER_NODE:
1881 	  s = IDENTIFIER_POINTER (val_attr);
1882 
1883 	string_id_common:
1884 	  if (strcmp (s, "$default") == 0)
1885 	    {
1886 	      fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1887 	      fprintf (file, "$tableentry$default$%s:\n", vname);
1888 	    }
1889 	  else
1890 	    vname = s;
1891 	  break;
1892 
1893 	case INTEGER_CST:
1894 	  vnum = TREE_INT_CST_LOW (val_attr);
1895 
1896 	  fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1897 	  fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1898 	  break;
1899 
1900 	default:
1901 	  ;
1902 	}
1903 
1904       vec_attr = TREE_CHAIN (vec_attr);
1905     }
1906 
1907 }
1908 
1909 static void
rx_output_function_prologue(FILE * file)1910 rx_output_function_prologue (FILE * file)
1911 {
1912   add_vector_labels (file, "interrupt");
1913   add_vector_labels (file, "vector");
1914 
1915   if (is_fast_interrupt_func (NULL_TREE))
1916     asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1917 
1918   if (is_interrupt_func (NULL_TREE))
1919     asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1920 
1921   if (is_naked_func (NULL_TREE))
1922     asm_fprintf (file, "\t; Note: Naked Function\n");
1923 
1924   if (cfun->static_chain_decl != NULL)
1925     asm_fprintf (file, "\t; Note: Nested function declared "
1926 		 "inside another function.\n");
1927 
1928   if (crtl->calls_eh_return)
1929     asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1930 }
1931 
1932 /* Generate a POPM or RTSD instruction that matches the given operands.  */
1933 
1934 void
rx_emit_stack_popm(rtx * operands,bool is_popm)1935 rx_emit_stack_popm (rtx * operands, bool is_popm)
1936 {
1937   HOST_WIDE_INT stack_adjust;
1938   HOST_WIDE_INT last_reg;
1939   rtx first_push;
1940 
1941   gcc_assert (CONST_INT_P (operands[0]));
1942   stack_adjust = INTVAL (operands[0]);
1943 
1944   gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1945   last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1946 
1947   first_push = XVECEXP (operands[1], 0, 1);
1948   gcc_assert (SET_P (first_push));
1949   first_push = SET_DEST (first_push);
1950   gcc_assert (REG_P (first_push));
1951 
1952   if (is_popm)
1953     asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1954 		 reg_names [REGNO (first_push)],
1955 		 reg_names [REGNO (first_push) + last_reg]);
1956   else
1957     asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1958 		 (int) stack_adjust,
1959 		 reg_names [REGNO (first_push)],
1960 		 reg_names [REGNO (first_push) + last_reg]);
1961 }
1962 
1963 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate.  */
1964 
1965 static rtx
gen_rx_rtsd_vector(unsigned int adjust,unsigned int low,unsigned int high)1966 gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1967 {
1968   unsigned int i;
1969   unsigned int bias = 3;
1970   unsigned int count = (high - low) + bias;
1971   rtx vector;
1972 
1973   vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1974 
1975   XVECEXP (vector, 0, 0) =
1976     gen_rtx_SET (stack_pointer_rtx,
1977 		 plus_constant (Pmode, stack_pointer_rtx, adjust));
1978 
1979   for (i = 0; i < count - 2; i++)
1980     XVECEXP (vector, 0, i + 1) =
1981       gen_rtx_SET (gen_rtx_REG (SImode, low + i),
1982 		   gen_rtx_MEM (SImode,
1983 				i == 0 ? stack_pointer_rtx
1984 				: plus_constant (Pmode, stack_pointer_rtx,
1985 						 i * UNITS_PER_WORD)));
1986 
1987   XVECEXP (vector, 0, count - 1) = ret_rtx;
1988 
1989   return vector;
1990 }
1991 
1992 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate.  */
1993 
1994 static rtx
gen_rx_popm_vector(unsigned int low,unsigned int high)1995 gen_rx_popm_vector (unsigned int low, unsigned int high)
1996 {
1997   unsigned int i;
1998   unsigned int count = (high - low) + 2;
1999   rtx vector;
2000 
2001   vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2002 
2003   XVECEXP (vector, 0, 0) =
2004     gen_rtx_SET (stack_pointer_rtx,
2005 		 plus_constant (Pmode, stack_pointer_rtx,
2006 				(count - 1) * UNITS_PER_WORD));
2007 
2008   for (i = 0; i < count - 1; i++)
2009     XVECEXP (vector, 0, i + 1) =
2010       gen_rtx_SET (gen_rtx_REG (SImode, low + i),
2011 		   gen_rtx_MEM (SImode,
2012 				i == 0 ? stack_pointer_rtx
2013 				: plus_constant (Pmode, stack_pointer_rtx,
2014 						 i * UNITS_PER_WORD)));
2015 
2016   return vector;
2017 }
2018 
2019 /* Returns true if a simple return insn can be used.  */
2020 
2021 bool
rx_can_use_simple_return(void)2022 rx_can_use_simple_return (void)
2023 {
2024   unsigned int low;
2025   unsigned int high;
2026   unsigned int frame_size;
2027   unsigned int stack_size;
2028   unsigned int register_mask;
2029 
2030   if (is_naked_func (NULL_TREE)
2031       || is_fast_interrupt_func (NULL_TREE)
2032       || is_interrupt_func (NULL_TREE))
2033     return false;
2034 
2035   rx_get_stack_layout (& low, & high, & register_mask,
2036 		       & frame_size, & stack_size);
2037 
2038   return (register_mask == 0
2039 	  && (frame_size + stack_size) == 0
2040 	  && low == 0);
2041 }
2042 
2043 static void
pop_regs(unsigned int high,unsigned int low)2044 pop_regs (unsigned int high, unsigned int low)
2045 {
2046   rtx_insn *insn;
2047   if (high == low)
2048     insn = emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2049   else
2050     insn = emit_insn (gen_stack_popm (GEN_INT (((high - low) + 1)
2051 						* UNITS_PER_WORD),
2052 				      gen_rx_popm_vector (low, high)));
2053   add_pop_cfi_notes (insn, high, low);
2054 }
2055 
2056 void
rx_expand_epilogue(bool is_sibcall)2057 rx_expand_epilogue (bool is_sibcall)
2058 {
2059   unsigned int low;
2060   unsigned int high;
2061   unsigned int frame_size;
2062   unsigned int stack_size;
2063   unsigned int register_mask;
2064   unsigned int regs_size;
2065   unsigned int reg;
2066   unsigned HOST_WIDE_INT total_size;
2067 
2068   /* FIXME: We do not support indirect sibcalls at the moment becaause we
2069      cannot guarantee that the register holding the function address is a
2070      call-used register.  If it is a call-saved register then the stack
2071      pop instructions generated in the epilogue will corrupt the address
2072      before it is used.
2073 
2074      Creating a new call-used-only register class works but then the
2075      reload pass gets stuck because it cannot always find a call-used
2076      register for spilling sibcalls.
2077 
2078      The other possible solution is for this pass to scan forward for the
2079      sibcall instruction (if it has been generated) and work out if it
2080      is an indirect sibcall using a call-saved register.  If it is then
2081      the address can copied into a call-used register in this epilogue
2082      code and the sibcall instruction modified to use that register.  */
2083 
2084   if (is_naked_func (NULL_TREE))
2085     {
2086       gcc_assert (! is_sibcall);
2087 
2088       /* Naked functions use their own, programmer provided epilogues.
2089 	 But, in order to keep gcc happy we have to generate some kind of
2090 	 epilogue RTL.  */
2091       emit_jump_insn (gen_naked_return ());
2092       return;
2093     }
2094 
2095   rx_get_stack_layout (& low, & high, & register_mask,
2096 		       & frame_size, & stack_size);
2097 
2098   total_size = frame_size + stack_size;
2099   regs_size = ((high - low) + 1) * UNITS_PER_WORD;
2100 
2101   /* See if we are unable to use the special stack frame deconstruct and
2102      return instructions.  In most cases we can use them, but the exceptions
2103      are:
2104 
2105      - Sibling calling functions deconstruct the frame but do not return to
2106        their caller.  Instead they branch to their sibling and allow their
2107        return instruction to return to this function's parent.
2108 
2109      - Fast and normal interrupt handling functions have to use special
2110        return instructions.
2111 
2112      - Functions where we have pushed a fragmented set of registers into the
2113        call-save area must have the same set of registers popped.  */
2114   if (is_sibcall
2115       || is_fast_interrupt_func (NULL_TREE)
2116       || is_interrupt_func (NULL_TREE)
2117       || register_mask)
2118     {
2119       /* Cannot use the special instructions - deconstruct by hand.  */
2120       if (total_size)
2121 	gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2122 		      GEN_INT (total_size), false);
2123 
2124       if (MUST_SAVE_ACC_REGISTER)
2125 	{
2126 	  unsigned int acc_low, acc_high;
2127 
2128 	  /* Reverse the saving of the accumulator register onto the stack.
2129 	     Note we must adjust the saved "low" accumulator value as it
2130 	     is really the middle 32-bits of the accumulator.  */
2131 	  if (register_mask)
2132 	    {
2133 	      acc_low = acc_high = 0;
2134 
2135 	      for (reg = 1; reg < CC_REGNUM; reg ++)
2136 		if (register_mask & (1 << reg))
2137 		  {
2138 		    if (acc_low == 0)
2139 		      acc_low = reg;
2140 		    else
2141 		      {
2142 			acc_high = reg;
2143 			break;
2144 		      }
2145 		  }
2146 	      emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
2147 	      emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
2148 	    }
2149 	  else
2150 	    {
2151 	      acc_low = low;
2152 	      acc_high = low + 1;
2153 	      emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
2154 					 gen_rx_popm_vector (acc_low, acc_high)));
2155 	    }
2156 
2157 	  emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
2158 				  gen_rtx_REG (SImode, acc_low),
2159 				  GEN_INT (16)));
2160 	  emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
2161 	  emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
2162 	}
2163 
2164       if (register_mask)
2165 	{
2166 	  for (reg = 0; reg < CC_REGNUM; reg ++)
2167 	    if (register_mask & (1 << reg))
2168 	      {
2169 		low = high = reg;
2170 		while (register_mask & (1 << high))
2171 		  high ++;
2172 		pop_regs (high - 1, low);
2173 		reg = high;
2174 	      }
2175 	}
2176       else if (low)
2177 	pop_regs (high, low);
2178 
2179       if (is_fast_interrupt_func (NULL_TREE))
2180 	{
2181 	  gcc_assert (! is_sibcall);
2182 	  emit_jump_insn (gen_fast_interrupt_return ());
2183 	}
2184       else if (is_interrupt_func (NULL_TREE))
2185 	{
2186 	  gcc_assert (! is_sibcall);
2187 	  emit_jump_insn (gen_exception_return ());
2188 	}
2189       else if (! is_sibcall)
2190 	emit_jump_insn (gen_simple_return ());
2191 
2192       return;
2193     }
2194 
2195   /* If we allocated space on the stack, free it now.  */
2196   if (total_size)
2197     {
2198       unsigned HOST_WIDE_INT rtsd_size;
2199 
2200       /* See if we can use the RTSD instruction.  */
2201       rtsd_size = total_size + regs_size;
2202       if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
2203 	{
2204 	  if (low)
2205 	    emit_jump_insn (gen_pop_and_return
2206 			    (GEN_INT (rtsd_size),
2207 			     gen_rx_rtsd_vector (rtsd_size, low, high)));
2208 	  else
2209 	    emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2210 
2211 	  return;
2212 	}
2213 
2214       gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2215 		    GEN_INT (total_size), false);
2216     }
2217 
2218   if (low)
2219     emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2220 					gen_rx_rtsd_vector (regs_size,
2221 							    low, high)));
2222   else
2223     emit_jump_insn (gen_simple_return ());
2224 }
2225 
2226 
2227 /* Compute the offset (in words) between FROM (arg pointer
2228    or frame pointer) and TO (frame pointer or stack pointer).
2229    See ASCII art comment at the start of rx_expand_prologue
2230    for more information.  */
2231 
2232 int
rx_initial_elimination_offset(int from,int to)2233 rx_initial_elimination_offset (int from, int to)
2234 {
2235   unsigned int low;
2236   unsigned int high;
2237   unsigned int frame_size;
2238   unsigned int stack_size;
2239   unsigned int mask;
2240 
2241   rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2242 
2243   if (from == ARG_POINTER_REGNUM)
2244     {
2245       /* Extend the computed size of the stack frame to
2246 	 include the registers pushed in the prologue.  */
2247       if (low)
2248 	frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2249       else
2250 	frame_size += bit_count (mask) * UNITS_PER_WORD;
2251 
2252       /* Remember to include the return address.  */
2253       frame_size += 1 * UNITS_PER_WORD;
2254 
2255       if (to == FRAME_POINTER_REGNUM)
2256 	return frame_size;
2257 
2258       gcc_assert (to == STACK_POINTER_REGNUM);
2259       return frame_size + stack_size;
2260     }
2261 
2262   gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2263   return stack_size;
2264 }
2265 
2266 /* Decide if a variable should go into one of the small data sections.  */
2267 
2268 static bool
rx_in_small_data(const_tree decl)2269 rx_in_small_data (const_tree decl)
2270 {
2271   int size;
2272   const char * section;
2273 
2274   if (rx_small_data_limit == 0)
2275     return false;
2276 
2277   if (TREE_CODE (decl) != VAR_DECL)
2278     return false;
2279 
2280   /* We do not put read-only variables into a small data area because
2281      they would be placed with the other read-only sections, far away
2282      from the read-write data sections, and we only have one small
2283      data area pointer.
2284      Similarly commons are placed in the .bss section which might be
2285      far away (and out of alignment with respect to) the .data section.  */
2286   if (TREE_READONLY (decl) || DECL_COMMON (decl))
2287     return false;
2288 
2289   section = DECL_SECTION_NAME (decl);
2290   if (section)
2291     return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
2292 
2293   size = int_size_in_bytes (TREE_TYPE (decl));
2294 
2295   return (size > 0) && (size <= rx_small_data_limit);
2296 }
2297 
2298 /* Return a section for X.
2299    The only special thing we do here is to honor small data.  */
2300 
2301 static section *
rx_select_rtx_section(machine_mode mode,rtx x,unsigned HOST_WIDE_INT align)2302 rx_select_rtx_section (machine_mode mode,
2303 		       rtx x,
2304 		       unsigned HOST_WIDE_INT align)
2305 {
2306   if (rx_small_data_limit > 0
2307       && GET_MODE_SIZE (mode) <= rx_small_data_limit
2308       && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2309     return sdata_section;
2310 
2311   return default_elf_select_rtx_section (mode, x, align);
2312 }
2313 
2314 static section *
rx_select_section(tree decl,int reloc,unsigned HOST_WIDE_INT align)2315 rx_select_section (tree decl,
2316 		   int reloc,
2317 		   unsigned HOST_WIDE_INT align)
2318 {
2319   if (rx_small_data_limit > 0)
2320     {
2321       switch (categorize_decl_for_section (decl, reloc))
2322 	{
2323 	case SECCAT_SDATA:	return sdata_section;
2324 	case SECCAT_SBSS:	return sbss_section;
2325 	case SECCAT_SRODATA:
2326 	  /* Fall through.  We do not put small, read only
2327 	     data into the C_2 section because we are not
2328 	     using the C_2 section.  We do not use the C_2
2329 	     section because it is located with the other
2330 	     read-only data sections, far away from the read-write
2331 	     data sections and we only have one small data
2332 	     pointer (r13).  */
2333 	default:
2334 	  break;
2335 	}
2336     }
2337 
2338   /* If we are supporting the Renesas assembler
2339      we cannot use mergeable sections.  */
2340   if (TARGET_AS100_SYNTAX)
2341     switch (categorize_decl_for_section (decl, reloc))
2342       {
2343       case SECCAT_RODATA_MERGE_CONST:
2344       case SECCAT_RODATA_MERGE_STR_INIT:
2345       case SECCAT_RODATA_MERGE_STR:
2346 	return readonly_data_section;
2347 
2348       default:
2349 	break;
2350       }
2351 
2352   return default_elf_select_section (decl, reloc, align);
2353 }
2354 
2355 enum rx_builtin
2356 {
2357   RX_BUILTIN_BRK,
2358   RX_BUILTIN_CLRPSW,
2359   RX_BUILTIN_INT,
2360   RX_BUILTIN_MACHI,
2361   RX_BUILTIN_MACLO,
2362   RX_BUILTIN_MULHI,
2363   RX_BUILTIN_MULLO,
2364   RX_BUILTIN_MVFACHI,
2365   RX_BUILTIN_MVFACMI,
2366   RX_BUILTIN_MVFC,
2367   RX_BUILTIN_MVTACHI,
2368   RX_BUILTIN_MVTACLO,
2369   RX_BUILTIN_MVTC,
2370   RX_BUILTIN_MVTIPL,
2371   RX_BUILTIN_RACW,
2372   RX_BUILTIN_REVW,
2373   RX_BUILTIN_RMPA,
2374   RX_BUILTIN_ROUND,
2375   RX_BUILTIN_SETPSW,
2376   RX_BUILTIN_WAIT,
2377   RX_BUILTIN_max
2378 };
2379 
2380 static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2381 
2382 static void
rx_init_builtins(void)2383 rx_init_builtins (void)
2384 {
2385 #define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE)		\
2386    rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2387    add_builtin_function ("__builtin_rx_" LC_NAME,			\
2388 			build_function_type_list (RET_TYPE##_type_node, \
2389 						  NULL_TREE),		\
2390 			RX_BUILTIN_##UC_NAME,				\
2391 			BUILT_IN_MD, NULL, NULL_TREE)
2392 
2393 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE)		\
2394    rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2395    add_builtin_function ("__builtin_rx_" LC_NAME,			\
2396 			build_function_type_list (RET_TYPE##_type_node, \
2397 						  ARG_TYPE##_type_node, \
2398 						  NULL_TREE),		\
2399 			RX_BUILTIN_##UC_NAME,				\
2400 			BUILT_IN_MD, NULL, NULL_TREE)
2401 
2402 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
2403   rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2404   add_builtin_function ("__builtin_rx_" LC_NAME,			\
2405 			build_function_type_list (RET_TYPE##_type_node, \
2406 						  ARG_TYPE1##_type_node,\
2407 						  ARG_TYPE2##_type_node,\
2408 						  NULL_TREE),		\
2409 			RX_BUILTIN_##UC_NAME,				\
2410 			BUILT_IN_MD, NULL, NULL_TREE)
2411 
2412 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
2413   rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2414   add_builtin_function ("__builtin_rx_" LC_NAME,			\
2415 			build_function_type_list (RET_TYPE##_type_node, \
2416 						  ARG_TYPE1##_type_node,\
2417 						  ARG_TYPE2##_type_node,\
2418 						  ARG_TYPE3##_type_node,\
2419 						  NULL_TREE),		\
2420 			RX_BUILTIN_##UC_NAME,				\
2421 			BUILT_IN_MD, NULL, NULL_TREE)
2422 
2423   ADD_RX_BUILTIN0 (BRK,     "brk",     void);
2424   ADD_RX_BUILTIN1 (CLRPSW,  "clrpsw",  void,  integer);
2425   ADD_RX_BUILTIN1 (SETPSW,  "setpsw",  void,  integer);
2426   ADD_RX_BUILTIN1 (INT,     "int",     void,  integer);
2427   ADD_RX_BUILTIN2 (MACHI,   "machi",   void,  intSI, intSI);
2428   ADD_RX_BUILTIN2 (MACLO,   "maclo",   void,  intSI, intSI);
2429   ADD_RX_BUILTIN2 (MULHI,   "mulhi",   void,  intSI, intSI);
2430   ADD_RX_BUILTIN2 (MULLO,   "mullo",   void,  intSI, intSI);
2431   ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
2432   ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
2433   ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void,  intSI);
2434   ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void,  intSI);
2435   ADD_RX_BUILTIN0 (RMPA,    "rmpa",    void);
2436   ADD_RX_BUILTIN1 (MVFC,    "mvfc",    intSI, integer);
2437   ADD_RX_BUILTIN2 (MVTC,    "mvtc",    void,  integer, integer);
2438   ADD_RX_BUILTIN1 (MVTIPL,  "mvtipl",  void,  integer);
2439   ADD_RX_BUILTIN1 (RACW,    "racw",    void,  integer);
2440   ADD_RX_BUILTIN1 (ROUND,   "round",   intSI, float);
2441   ADD_RX_BUILTIN1 (REVW,    "revw",    intSI, intSI);
2442   ADD_RX_BUILTIN0 (WAIT,    "wait",    void);
2443 }
2444 
2445 /* Return the RX builtin for CODE.  */
2446 
2447 static tree
rx_builtin_decl(unsigned code,bool initialize_p ATTRIBUTE_UNUSED)2448 rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2449 {
2450   if (code >= RX_BUILTIN_max)
2451     return error_mark_node;
2452 
2453   return rx_builtins[code];
2454 }
2455 
2456 static rtx
rx_expand_void_builtin_1_arg(rtx arg,rtx (* gen_func)(rtx),bool reg)2457 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2458 {
2459   if (reg && ! REG_P (arg))
2460     arg = force_reg (SImode, arg);
2461 
2462   emit_insn (gen_func (arg));
2463 
2464   return NULL_RTX;
2465 }
2466 
2467 static rtx
rx_expand_builtin_mvtc(tree exp)2468 rx_expand_builtin_mvtc (tree exp)
2469 {
2470   rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2471   rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2472 
2473   if (! CONST_INT_P (arg1))
2474     return NULL_RTX;
2475 
2476   if (! REG_P (arg2))
2477     arg2 = force_reg (SImode, arg2);
2478 
2479   emit_insn (gen_mvtc (arg1, arg2));
2480 
2481   return NULL_RTX;
2482 }
2483 
2484 static rtx
rx_expand_builtin_mvfc(tree t_arg,rtx target)2485 rx_expand_builtin_mvfc (tree t_arg, rtx target)
2486 {
2487   rtx arg = expand_normal (t_arg);
2488 
2489   if (! CONST_INT_P (arg))
2490     return NULL_RTX;
2491 
2492   if (target == NULL_RTX)
2493     return NULL_RTX;
2494 
2495   if (! REG_P (target))
2496     target = force_reg (SImode, target);
2497 
2498   emit_insn (gen_mvfc (target, arg));
2499 
2500   return target;
2501 }
2502 
2503 static rtx
rx_expand_builtin_mvtipl(rtx arg)2504 rx_expand_builtin_mvtipl (rtx arg)
2505 {
2506   /* The RX610 does not support the MVTIPL instruction.  */
2507   if (rx_cpu_type == RX610)
2508     return NULL_RTX;
2509 
2510   if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
2511     return NULL_RTX;
2512 
2513   emit_insn (gen_mvtipl (arg));
2514 
2515   return NULL_RTX;
2516 }
2517 
2518 static rtx
rx_expand_builtin_mac(tree exp,rtx (* gen_func)(rtx,rtx))2519 rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2520 {
2521   rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2522   rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2523 
2524   if (! REG_P (arg1))
2525     arg1 = force_reg (SImode, arg1);
2526 
2527   if (! REG_P (arg2))
2528     arg2 = force_reg (SImode, arg2);
2529 
2530   emit_insn (gen_func (arg1, arg2));
2531 
2532   return NULL_RTX;
2533 }
2534 
2535 static rtx
rx_expand_int_builtin_1_arg(rtx arg,rtx target,rtx (* gen_func)(rtx,rtx),bool mem_ok)2536 rx_expand_int_builtin_1_arg (rtx arg,
2537 			     rtx target,
2538 			     rtx (* gen_func)(rtx, rtx),
2539 			     bool mem_ok)
2540 {
2541   if (! REG_P (arg))
2542     if (!mem_ok || ! MEM_P (arg))
2543       arg = force_reg (SImode, arg);
2544 
2545   if (target == NULL_RTX || ! REG_P (target))
2546     target = gen_reg_rtx (SImode);
2547 
2548   emit_insn (gen_func (target, arg));
2549 
2550   return target;
2551 }
2552 
2553 static rtx
rx_expand_int_builtin_0_arg(rtx target,rtx (* gen_func)(rtx))2554 rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2555 {
2556   if (target == NULL_RTX || ! REG_P (target))
2557     target = gen_reg_rtx (SImode);
2558 
2559   emit_insn (gen_func (target));
2560 
2561   return target;
2562 }
2563 
2564 static rtx
rx_expand_builtin_round(rtx arg,rtx target)2565 rx_expand_builtin_round (rtx arg, rtx target)
2566 {
2567   if ((! REG_P (arg) && ! MEM_P (arg))
2568       || GET_MODE (arg) != SFmode)
2569     arg = force_reg (SFmode, arg);
2570 
2571   if (target == NULL_RTX || ! REG_P (target))
2572     target = gen_reg_rtx (SImode);
2573 
2574   emit_insn (gen_lrintsf2 (target, arg));
2575 
2576   return target;
2577 }
2578 
2579 static int
valid_psw_flag(rtx op,const char * which)2580 valid_psw_flag (rtx op, const char *which)
2581 {
2582   static int mvtc_inform_done = 0;
2583 
2584   if (GET_CODE (op) == CONST_INT)
2585     switch (INTVAL (op))
2586       {
2587       case 0: case 'c': case 'C':
2588       case 1: case 'z': case 'Z':
2589       case 2: case 's': case 'S':
2590       case 3: case 'o': case 'O':
2591       case 8: case 'i': case 'I':
2592       case 9: case 'u': case 'U':
2593 	return 1;
2594       }
2595 
2596   error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2597   if (!mvtc_inform_done)
2598     error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2599   mvtc_inform_done = 1;
2600 
2601   return 0;
2602 }
2603 
2604 static rtx
rx_expand_builtin(tree exp,rtx target,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)2605 rx_expand_builtin (tree exp,
2606 		   rtx target,
2607 		   rtx subtarget ATTRIBUTE_UNUSED,
2608 		   machine_mode mode ATTRIBUTE_UNUSED,
2609 		   int ignore ATTRIBUTE_UNUSED)
2610 {
2611   tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2612   tree arg    = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
2613   rtx  op     = arg ? expand_normal (arg) : NULL_RTX;
2614   unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2615 
2616   switch (fcode)
2617     {
2618     case RX_BUILTIN_BRK:     emit_insn (gen_brk ()); return NULL_RTX;
2619     case RX_BUILTIN_CLRPSW:
2620       if (!valid_psw_flag (op, "clrpsw"))
2621 	return NULL_RTX;
2622       return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2623     case RX_BUILTIN_SETPSW:
2624       if (!valid_psw_flag (op, "setpsw"))
2625 	return NULL_RTX;
2626       return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
2627     case RX_BUILTIN_INT:     return rx_expand_void_builtin_1_arg
2628 	(op, gen_int, false);
2629     case RX_BUILTIN_MACHI:   return rx_expand_builtin_mac (exp, gen_machi);
2630     case RX_BUILTIN_MACLO:   return rx_expand_builtin_mac (exp, gen_maclo);
2631     case RX_BUILTIN_MULHI:   return rx_expand_builtin_mac (exp, gen_mulhi);
2632     case RX_BUILTIN_MULLO:   return rx_expand_builtin_mac (exp, gen_mullo);
2633     case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2634 	(target, gen_mvfachi);
2635     case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2636 	(target, gen_mvfacmi);
2637     case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2638 	(op, gen_mvtachi, true);
2639     case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2640 	(op, gen_mvtaclo, true);
2641     case RX_BUILTIN_RMPA:
2642       if (rx_allow_string_insns)
2643 	emit_insn (gen_rmpa ());
2644       else
2645 	error ("-mno-allow-string-insns forbids the generation of the RMPA instruction");
2646       return NULL_RTX;
2647     case RX_BUILTIN_MVFC:    return rx_expand_builtin_mvfc (arg, target);
2648     case RX_BUILTIN_MVTC:    return rx_expand_builtin_mvtc (exp);
2649     case RX_BUILTIN_MVTIPL:  return rx_expand_builtin_mvtipl (op);
2650     case RX_BUILTIN_RACW:    return rx_expand_void_builtin_1_arg
2651 	(op, gen_racw, false);
2652     case RX_BUILTIN_ROUND:   return rx_expand_builtin_round (op, target);
2653     case RX_BUILTIN_REVW:    return rx_expand_int_builtin_1_arg
2654 	(op, target, gen_revw, false);
2655     case RX_BUILTIN_WAIT:    emit_insn (gen_wait ()); return NULL_RTX;
2656 
2657     default:
2658       internal_error ("bad builtin code");
2659       break;
2660     }
2661 
2662   return NULL_RTX;
2663 }
2664 
2665 /* Place an element into a constructor or destructor section.
2666    Like default_ctor_section_asm_out_constructor in varasm.c
2667    except that it uses .init_array (or .fini_array) and it
2668    handles constructor priorities.  */
2669 
2670 static void
rx_elf_asm_cdtor(rtx symbol,int priority,bool is_ctor)2671 rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2672 {
2673   section * s;
2674 
2675   if (priority != DEFAULT_INIT_PRIORITY)
2676     {
2677       char buf[18];
2678 
2679       sprintf (buf, "%s.%.5u",
2680 	       is_ctor ? ".init_array" : ".fini_array",
2681 	       priority);
2682       s = get_section (buf, SECTION_WRITE, NULL_TREE);
2683     }
2684   else if (is_ctor)
2685     s = ctors_section;
2686   else
2687     s = dtors_section;
2688 
2689   switch_to_section (s);
2690   assemble_align (POINTER_SIZE);
2691   assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2692 }
2693 
2694 static void
rx_elf_asm_constructor(rtx symbol,int priority)2695 rx_elf_asm_constructor (rtx symbol, int priority)
2696 {
2697   rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2698 }
2699 
2700 static void
rx_elf_asm_destructor(rtx symbol,int priority)2701 rx_elf_asm_destructor (rtx symbol, int priority)
2702 {
2703   rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2704 }
2705 
2706 /* Check "fast_interrupt", "interrupt" and "naked" attributes.  */
2707 
2708 static tree
rx_handle_func_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)2709 rx_handle_func_attribute (tree * node,
2710 			  tree   name,
2711 			  tree   args ATTRIBUTE_UNUSED,
2712 			  int    flags ATTRIBUTE_UNUSED,
2713 			  bool * no_add_attrs)
2714 {
2715   gcc_assert (DECL_P (* node));
2716 
2717   if (TREE_CODE (* node) != FUNCTION_DECL)
2718     {
2719       warning (OPT_Wattributes, "%qE attribute only applies to functions",
2720 	       name);
2721       * no_add_attrs = true;
2722     }
2723 
2724   /* FIXME: We ought to check for conflicting attributes.  */
2725 
2726   /* FIXME: We ought to check that the interrupt and exception
2727      handler attributes have been applied to void functions.  */
2728   return NULL_TREE;
2729 }
2730 
2731 /* Check "vector" attribute.  */
2732 
2733 static tree
rx_handle_vector_attribute(tree * node,tree name,tree args,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)2734 rx_handle_vector_attribute (tree * node,
2735 			    tree   name,
2736 			    tree   args,
2737 			    int    flags ATTRIBUTE_UNUSED,
2738 			    bool * no_add_attrs)
2739 {
2740   gcc_assert (DECL_P (* node));
2741   gcc_assert (args != NULL_TREE);
2742 
2743   if (TREE_CODE (* node) != FUNCTION_DECL)
2744     {
2745       warning (OPT_Wattributes, "%qE attribute only applies to functions",
2746 	       name);
2747       * no_add_attrs = true;
2748     }
2749 
2750   return NULL_TREE;
2751 }
2752 
2753 /* Table of RX specific attributes.  */
2754 const struct attribute_spec rx_attribute_table[] =
2755 {
2756   /* Name, min_len, max_len, decl_req, type_req, fn_type_req,
2757      affects_type_identity, handler, exclude.  */
2758   { "fast_interrupt", 0, 0, true, false, false, false,
2759     rx_handle_func_attribute, NULL },
2760   { "interrupt",      0, -1, true, false, false, false,
2761     rx_handle_func_attribute, NULL },
2762   { "naked",          0, 0, true, false, false, false,
2763     rx_handle_func_attribute, NULL },
2764   { "vector",         1, -1, true, false, false, false,
2765     rx_handle_vector_attribute, NULL },
2766   { NULL,             0, 0, false, false, false, false, NULL, NULL }
2767 };
2768 
2769 /* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE.  */
2770 
2771 static void
rx_override_options_after_change(void)2772 rx_override_options_after_change (void)
2773 {
2774   static bool first_time = TRUE;
2775 
2776   if (first_time)
2777     {
2778       /* If this is the first time through and the user has not disabled
2779 	 the use of RX FPU hardware then enable -ffinite-math-only,
2780 	 since the FPU instructions do not support NaNs and infinities.  */
2781       if (TARGET_USE_FPU)
2782 	flag_finite_math_only = 1;
2783 
2784       first_time = FALSE;
2785     }
2786   else
2787     {
2788       /* Alert the user if they are changing the optimization options
2789 	 to use IEEE compliant floating point arithmetic with RX FPU insns.  */
2790       if (TARGET_USE_FPU
2791 	  && !flag_finite_math_only)
2792 	warning (0, "RX FPU instructions do not support NaNs and infinities");
2793     }
2794 }
2795 
2796 static void
rx_option_override(void)2797 rx_option_override (void)
2798 {
2799   unsigned int i;
2800   cl_deferred_option *opt;
2801   vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
2802 
2803   if (v)
2804     FOR_EACH_VEC_ELT (*v, i, opt)
2805       {
2806 	switch (opt->opt_index)
2807 	  {
2808 	  case OPT_mint_register_:
2809 	    switch (opt->value)
2810 	      {
2811 	      case 4:
2812 		fixed_regs[10] = call_used_regs [10] = 1;
2813 		/* Fall through.  */
2814 	      case 3:
2815 		fixed_regs[11] = call_used_regs [11] = 1;
2816 		/* Fall through.  */
2817 	      case 2:
2818 		fixed_regs[12] = call_used_regs [12] = 1;
2819 		/* Fall through.  */
2820 	      case 1:
2821 		fixed_regs[13] = call_used_regs [13] = 1;
2822 		/* Fall through.  */
2823 	      case 0:
2824 		rx_num_interrupt_regs = opt->value;
2825 		break;
2826 	      default:
2827 		rx_num_interrupt_regs = 0;
2828 		/* Error message already given because rx_handle_option
2829 		  returned false.  */
2830 		break;
2831 	      }
2832 	    break;
2833 
2834 	  default:
2835 	    gcc_unreachable ();
2836 	  }
2837       }
2838 
2839   /* This target defaults to strict volatile bitfields.  */
2840   if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
2841     flag_strict_volatile_bitfields = 1;
2842 
2843   rx_override_options_after_change ();
2844 
2845   /* These values are bytes, not log.  */
2846   if (align_jumps == 0 && ! optimize_size)
2847     align_jumps = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2848   if (align_loops == 0 && ! optimize_size)
2849     align_loops = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2850   if (align_labels == 0 && ! optimize_size)
2851     align_labels = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2852 }
2853 
2854 
2855 static bool
rx_allocate_stack_slots_for_args(void)2856 rx_allocate_stack_slots_for_args (void)
2857 {
2858   /* Naked functions should not allocate stack slots for arguments.  */
2859   return ! is_naked_func (NULL_TREE);
2860 }
2861 
2862 static bool
rx_func_attr_inlinable(const_tree decl)2863 rx_func_attr_inlinable (const_tree decl)
2864 {
2865   return ! is_fast_interrupt_func (decl)
2866     &&   ! is_interrupt_func (decl)
2867     &&   ! is_naked_func (decl);
2868 }
2869 
2870 static bool
rx_warn_func_return(tree decl)2871 rx_warn_func_return (tree decl)
2872 {
2873   /* Naked functions are implemented entirely in assembly, including the
2874      return sequence, so suppress warnings about this.  */
2875   return !is_naked_func (decl);
2876 }
2877 
2878 /* Return nonzero if it is ok to make a tail-call to DECL,
2879    a function_decl or NULL if this is an indirect call, using EXP  */
2880 
2881 static bool
rx_function_ok_for_sibcall(tree decl,tree exp ATTRIBUTE_UNUSED)2882 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2883 {
2884   if (TARGET_JSR)
2885     return false;
2886 
2887   /* Do not allow indirect tailcalls.  The
2888      sibcall patterns do not support them.  */
2889   if (decl == NULL)
2890     return false;
2891 
2892   /* Never tailcall from inside interrupt handlers or naked functions.  */
2893   if (is_fast_interrupt_func (NULL_TREE)
2894       || is_interrupt_func (NULL_TREE)
2895       || is_naked_func (NULL_TREE))
2896     return false;
2897 
2898   return true;
2899 }
2900 
2901 static void
rx_file_start(void)2902 rx_file_start (void)
2903 {
2904   if (! TARGET_AS100_SYNTAX)
2905     default_file_start ();
2906 }
2907 
2908 static bool
rx_is_ms_bitfield_layout(const_tree record_type ATTRIBUTE_UNUSED)2909 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2910 {
2911   /* The packed attribute overrides the MS behavior.  */
2912   return ! TYPE_PACKED (record_type);
2913 }
2914 
2915 /* Returns true if X a legitimate constant for an immediate
2916    operand on the RX.  X is already known to satisfy CONSTANT_P.  */
2917 
2918 bool
rx_is_legitimate_constant(machine_mode mode ATTRIBUTE_UNUSED,rtx x)2919 rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2920 {
2921   switch (GET_CODE (x))
2922     {
2923     case CONST:
2924       x = XEXP (x, 0);
2925 
2926       if (GET_CODE (x) == PLUS)
2927 	{
2928 	  if (! CONST_INT_P (XEXP (x, 1)))
2929 	    return false;
2930 
2931 	  /* GCC would not pass us CONST_INT + CONST_INT so we
2932 	     know that we have {SYMBOL|LABEL} + CONST_INT.  */
2933 	  x = XEXP (x, 0);
2934 	  gcc_assert (! CONST_INT_P (x));
2935 	}
2936 
2937       switch (GET_CODE (x))
2938 	{
2939 	case LABEL_REF:
2940 	case SYMBOL_REF:
2941 	  return true;
2942 
2943 	case UNSPEC:
2944 	  return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
2945 
2946 	default:
2947 	  /* FIXME: Can this ever happen ?  */
2948 	  gcc_unreachable ();
2949 	}
2950       break;
2951 
2952     case LABEL_REF:
2953     case SYMBOL_REF:
2954       return true;
2955     case CONST_DOUBLE:
2956       return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2957     case CONST_VECTOR:
2958       return false;
2959     default:
2960       gcc_assert (CONST_INT_P (x));
2961       break;
2962     }
2963 
2964   return ok_for_max_constant (INTVAL (x));
2965 }
2966 
2967 static int
rx_address_cost(rtx addr,machine_mode mode ATTRIBUTE_UNUSED,addr_space_t as ATTRIBUTE_UNUSED,bool speed)2968 rx_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
2969 		 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
2970 {
2971   rtx a, b;
2972 
2973   if (GET_CODE (addr) != PLUS)
2974     return COSTS_N_INSNS (1);
2975 
2976   a = XEXP (addr, 0);
2977   b = XEXP (addr, 1);
2978 
2979   if (REG_P (a) && REG_P (b))
2980     /* Try to discourage REG+REG addressing as it keeps two registers live.  */
2981     return COSTS_N_INSNS (4);
2982 
2983   if (speed)
2984     /* [REG+OFF] is just as fast as [REG].  */
2985     return COSTS_N_INSNS (1);
2986 
2987   if (CONST_INT_P (b)
2988       && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2989     /* Try to discourage REG + <large OFF> when optimizing for size.  */
2990     return COSTS_N_INSNS (2);
2991 
2992   return COSTS_N_INSNS (1);
2993 }
2994 
2995 static bool
rx_rtx_costs(rtx x,machine_mode mode,int outer_code ATTRIBUTE_UNUSED,int opno ATTRIBUTE_UNUSED,int * total,bool speed)2996 rx_rtx_costs (rtx x, machine_mode mode, int outer_code ATTRIBUTE_UNUSED,
2997 	      int opno ATTRIBUTE_UNUSED, int* total, bool speed)
2998 {
2999   if (x == const0_rtx)
3000     {
3001       *total = 0;
3002       return true;
3003     }
3004 
3005   switch (GET_CODE (x))
3006     {
3007     case MULT:
3008       if (mode == DImode)
3009 	{
3010 	  *total = COSTS_N_INSNS (2);
3011 	  return true;
3012 	}
3013       /* fall through */
3014 
3015     case PLUS:
3016     case MINUS:
3017     case AND:
3018     case COMPARE:
3019     case IOR:
3020     case XOR:
3021       *total = COSTS_N_INSNS (1);
3022       return true;
3023 
3024     case DIV:
3025       if (speed)
3026 	/* This is the worst case for a division.  Pessimize divisions when
3027 	   not optimizing for size and allow reciprocal optimizations which
3028 	   produce bigger code.  */
3029 	*total = COSTS_N_INSNS (20);
3030       else
3031 	*total = COSTS_N_INSNS (3);
3032       return true;
3033 
3034     case UDIV:
3035       if (speed)
3036 	/* This is the worst case for a division.  Pessimize divisions when
3037 	   not optimizing for size and allow reciprocal optimizations which
3038 	   produce bigger code.  */
3039 	*total = COSTS_N_INSNS (18);
3040       else
3041 	*total = COSTS_N_INSNS (3);
3042       return true;
3043 
3044     default:
3045       break;
3046     }
3047 
3048   return false;
3049 }
3050 
3051 static bool
rx_can_eliminate(const int from ATTRIBUTE_UNUSED,const int to)3052 rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3053 {
3054   /* We can always eliminate to the frame pointer.
3055      We can eliminate to the stack pointer unless a frame
3056      pointer is needed.  */
3057 
3058   return to == FRAME_POINTER_REGNUM
3059     || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
3060 }
3061 
3062 
3063 static void
rx_trampoline_template(FILE * file)3064 rx_trampoline_template (FILE * file)
3065 {
3066   /* Output assembler code for a block containing the constant
3067      part of a trampoline, leaving space for the variable parts.
3068 
3069      On the RX, (where r8 is the static chain regnum) the trampoline
3070      looks like:
3071 
3072 	   mov 		#<static chain value>, r8
3073 	   mov          #<function's address>, r9
3074 	   jmp		r9
3075 
3076      In big-endian-data-mode however instructions are read into the CPU
3077      4 bytes at a time.  These bytes are then swapped around before being
3078      passed to the decoder.  So...we must partition our trampoline into
3079      4 byte packets and swap these packets around so that the instruction
3080      reader will reverse the process.  But, in order to avoid splitting
3081      the 32-bit constants across these packet boundaries, (making inserting
3082      them into the constructed trampoline very difficult) we have to pad the
3083      instruction sequence with NOP insns.  ie:
3084 
3085            nop
3086 	   nop
3087            mov.l	#<...>, r8
3088 	   nop
3089 	   nop
3090            mov.l	#<...>, r9
3091            jmp		r9
3092 	   nop
3093 	   nop             */
3094 
3095   if (! TARGET_BIG_ENDIAN_DATA)
3096     {
3097       asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
3098       asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
3099       asm_fprintf (file, "\tjmp\tr%d\n",                TRAMPOLINE_TEMP_REGNUM);
3100     }
3101   else
3102     {
3103       char r8 = '0' + STATIC_CHAIN_REGNUM;
3104       char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
3105 
3106       if (TARGET_AS100_SYNTAX)
3107         {
3108           asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H,  003H\n", r8);
3109           asm_fprintf (file, "\t.BYTE 0deH,  0adH, 0beH,  0efH\n");
3110           asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H,  003H\n", r9);
3111           asm_fprintf (file, "\t.BYTE 0deH,  0adH, 0beH,  0efH\n");
3112           asm_fprintf (file, "\t.BYTE 003H,  003H, 00%cH, 07fH\n", r9);
3113         }
3114       else
3115         {
3116           asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03,  0x03\n", r8);
3117           asm_fprintf (file, "\t.byte 0xde,  0xad, 0xbe,  0xef\n");
3118           asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03,  0x03\n", r9);
3119           asm_fprintf (file, "\t.byte 0xde,  0xad, 0xbe,  0xef\n");
3120           asm_fprintf (file, "\t.byte 0x03,  0x03, 0x0%c, 0x7f\n", r9);
3121         }
3122     }
3123 }
3124 
3125 static void
rx_trampoline_init(rtx tramp,tree fndecl,rtx chain)3126 rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
3127 {
3128   rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3129 
3130   emit_block_move (tramp, assemble_trampoline_template (),
3131 		   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3132 
3133   if (TARGET_BIG_ENDIAN_DATA)
3134     {
3135       emit_move_insn (adjust_address (tramp, SImode, 4), chain);
3136       emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
3137     }
3138   else
3139     {
3140       emit_move_insn (adjust_address (tramp, SImode, 2), chain);
3141       emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
3142     }
3143 }
3144 
3145 static int
rx_memory_move_cost(machine_mode mode ATTRIBUTE_UNUSED,reg_class_t regclass ATTRIBUTE_UNUSED,bool in)3146 rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
3147 		     reg_class_t regclass ATTRIBUTE_UNUSED,
3148 		     bool in)
3149 {
3150   return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
3151 }
3152 
3153 /* Convert a CC_MODE to the set of flags that it represents.  */
3154 
3155 static unsigned int
flags_from_mode(machine_mode mode)3156 flags_from_mode (machine_mode mode)
3157 {
3158   switch (mode)
3159     {
3160     case E_CC_ZSmode:
3161       return CC_FLAG_S | CC_FLAG_Z;
3162     case E_CC_ZSOmode:
3163       return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
3164     case E_CC_ZSCmode:
3165       return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
3166     case E_CCmode:
3167       return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
3168     case E_CC_Fmode:
3169       return CC_FLAG_FP;
3170     default:
3171       gcc_unreachable ();
3172     }
3173 }
3174 
3175 /* Convert a set of flags to a CC_MODE that can implement it.  */
3176 
3177 static machine_mode
mode_from_flags(unsigned int f)3178 mode_from_flags (unsigned int f)
3179 {
3180   if (f & CC_FLAG_FP)
3181     return CC_Fmode;
3182   if (f & CC_FLAG_O)
3183     {
3184       if (f & CC_FLAG_C)
3185 	return CCmode;
3186       else
3187 	return CC_ZSOmode;
3188     }
3189   else if (f & CC_FLAG_C)
3190     return CC_ZSCmode;
3191   else
3192     return CC_ZSmode;
3193 }
3194 
3195 /* Convert an RTX_CODE to the set of flags needed to implement it.
3196    This assumes an integer comparison.  */
3197 
3198 static unsigned int
flags_from_code(enum rtx_code code)3199 flags_from_code (enum rtx_code code)
3200 {
3201   switch (code)
3202     {
3203     case LT:
3204     case GE:
3205       return CC_FLAG_S;
3206     case GT:
3207     case LE:
3208       return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
3209     case GEU:
3210     case LTU:
3211       return CC_FLAG_C;
3212     case GTU:
3213     case LEU:
3214       return CC_FLAG_C | CC_FLAG_Z;
3215     case EQ:
3216     case NE:
3217       return CC_FLAG_Z;
3218     default:
3219       gcc_unreachable ();
3220     }
3221 }
3222 
3223 /* Return a CC_MODE of which both M1 and M2 are subsets.  */
3224 
3225 static machine_mode
rx_cc_modes_compatible(machine_mode m1,machine_mode m2)3226 rx_cc_modes_compatible (machine_mode m1, machine_mode m2)
3227 {
3228   unsigned f;
3229 
3230   /* Early out for identical modes.  */
3231   if (m1 == m2)
3232     return m1;
3233 
3234   /* There's no valid combination for FP vs non-FP.  */
3235   f = flags_from_mode (m1) | flags_from_mode (m2);
3236   if (f & CC_FLAG_FP)
3237     return VOIDmode;
3238 
3239   /* Otherwise, see what mode can implement all the flags.  */
3240   return mode_from_flags (f);
3241 }
3242 
3243 /* Return the minimal CC mode needed to implement (CMP_CODE X Y).  */
3244 
3245 machine_mode
rx_select_cc_mode(enum rtx_code cmp_code,rtx x,rtx y)3246 rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
3247 {
3248   if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3249     return CC_Fmode;
3250 
3251   if (y != const0_rtx)
3252     return CCmode;
3253 
3254   return mode_from_flags (flags_from_code (cmp_code));
3255 }
3256 
3257 /* Split the conditional branch.  Emit (COMPARE C1 C2) into CC_REG with
3258    CC_MODE, and use that in branches based on that compare.  */
3259 
3260 void
rx_split_cbranch(machine_mode cc_mode,enum rtx_code cmp1,rtx c1,rtx c2,rtx label)3261 rx_split_cbranch (machine_mode cc_mode, enum rtx_code cmp1,
3262 		  rtx c1, rtx c2, rtx label)
3263 {
3264   rtx flags, x;
3265 
3266   flags = gen_rtx_REG (cc_mode, CC_REG);
3267   x = gen_rtx_COMPARE (cc_mode, c1, c2);
3268   x = gen_rtx_SET (flags, x);
3269   emit_insn (x);
3270 
3271   x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
3272   x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
3273   x = gen_rtx_SET (pc_rtx, x);
3274   emit_jump_insn (x);
3275 }
3276 
3277 /* A helper function for matching parallels that set the flags.  */
3278 
3279 bool
rx_match_ccmode(rtx insn,machine_mode cc_mode)3280 rx_match_ccmode (rtx insn, machine_mode cc_mode)
3281 {
3282   rtx op1, flags;
3283   machine_mode flags_mode;
3284 
3285   gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
3286 
3287   op1 = XVECEXP (PATTERN (insn), 0, 0);
3288   gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
3289 
3290   flags = SET_DEST (op1);
3291   flags_mode = GET_MODE (flags);
3292 
3293   if (GET_MODE (SET_SRC (op1)) != flags_mode)
3294     return false;
3295   if (GET_MODE_CLASS (flags_mode) != MODE_CC)
3296     return false;
3297 
3298   /* Ensure that the mode of FLAGS is compatible with CC_MODE.  */
3299   if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3300     return false;
3301 
3302   return true;
3303 }
3304 
3305 int
rx_align_for_label(rtx lab,int uses_threshold)3306 rx_align_for_label (rtx lab, int uses_threshold)
3307 {
3308   /* This is a simple heuristic to guess when an alignment would not be useful
3309      because the delay due to the inserted NOPs would be greater than the delay
3310      due to the misaligned branch.  If uses_threshold is zero then the alignment
3311      is always useful.  */
3312   if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
3313     return 0;
3314 
3315   if (optimize_size)
3316     return 0;
3317   /* These values are log, not bytes.  */
3318   if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
3319     return 2; /* 4 bytes */
3320   return 3;   /* 8 bytes */
3321 }
3322 
3323 static int
rx_max_skip_for_label(rtx_insn * lab)3324 rx_max_skip_for_label (rtx_insn *lab)
3325 {
3326   int opsize;
3327   rtx_insn *op;
3328 
3329   if (optimize_size)
3330     return 0;
3331 
3332   if (lab == NULL)
3333     return 0;
3334 
3335   op = lab;
3336   do
3337     {
3338       op = next_nonnote_nondebug_insn (op);
3339     }
3340   while (op && (LABEL_P (op)
3341 		|| (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3342   if (!op)
3343     return 0;
3344 
3345   opsize = get_attr_length (op);
3346   if (opsize >= 0 && opsize < 8)
3347     return opsize - 1;
3348   return 0;
3349 }
3350 
3351 /* Compute the real length of the extending load-and-op instructions.  */
3352 
3353 int
rx_adjust_insn_length(rtx_insn * insn,int current_length)3354 rx_adjust_insn_length (rtx_insn *insn, int current_length)
3355 {
3356   rtx extend, mem, offset;
3357   bool zero;
3358   int factor;
3359 
3360   if (!INSN_P (insn))
3361     return current_length;
3362 
3363   switch (INSN_CODE (insn))
3364     {
3365     default:
3366       return current_length;
3367 
3368     case CODE_FOR_plussi3_zero_extendhi:
3369     case CODE_FOR_andsi3_zero_extendhi:
3370     case CODE_FOR_iorsi3_zero_extendhi:
3371     case CODE_FOR_xorsi3_zero_extendhi:
3372     case CODE_FOR_divsi3_zero_extendhi:
3373     case CODE_FOR_udivsi3_zero_extendhi:
3374     case CODE_FOR_minussi3_zero_extendhi:
3375     case CODE_FOR_smaxsi3_zero_extendhi:
3376     case CODE_FOR_sminsi3_zero_extendhi:
3377     case CODE_FOR_multsi3_zero_extendhi:
3378     case CODE_FOR_comparesi3_zero_extendhi:
3379       zero = true;
3380       factor = 2;
3381       break;
3382 
3383     case CODE_FOR_plussi3_sign_extendhi:
3384     case CODE_FOR_andsi3_sign_extendhi:
3385     case CODE_FOR_iorsi3_sign_extendhi:
3386     case CODE_FOR_xorsi3_sign_extendhi:
3387     case CODE_FOR_divsi3_sign_extendhi:
3388     case CODE_FOR_udivsi3_sign_extendhi:
3389     case CODE_FOR_minussi3_sign_extendhi:
3390     case CODE_FOR_smaxsi3_sign_extendhi:
3391     case CODE_FOR_sminsi3_sign_extendhi:
3392     case CODE_FOR_multsi3_sign_extendhi:
3393     case CODE_FOR_comparesi3_sign_extendhi:
3394       zero = false;
3395       factor = 2;
3396       break;
3397 
3398     case CODE_FOR_plussi3_zero_extendqi:
3399     case CODE_FOR_andsi3_zero_extendqi:
3400     case CODE_FOR_iorsi3_zero_extendqi:
3401     case CODE_FOR_xorsi3_zero_extendqi:
3402     case CODE_FOR_divsi3_zero_extendqi:
3403     case CODE_FOR_udivsi3_zero_extendqi:
3404     case CODE_FOR_minussi3_zero_extendqi:
3405     case CODE_FOR_smaxsi3_zero_extendqi:
3406     case CODE_FOR_sminsi3_zero_extendqi:
3407     case CODE_FOR_multsi3_zero_extendqi:
3408     case CODE_FOR_comparesi3_zero_extendqi:
3409       zero = true;
3410       factor = 1;
3411       break;
3412 
3413     case CODE_FOR_plussi3_sign_extendqi:
3414     case CODE_FOR_andsi3_sign_extendqi:
3415     case CODE_FOR_iorsi3_sign_extendqi:
3416     case CODE_FOR_xorsi3_sign_extendqi:
3417     case CODE_FOR_divsi3_sign_extendqi:
3418     case CODE_FOR_udivsi3_sign_extendqi:
3419     case CODE_FOR_minussi3_sign_extendqi:
3420     case CODE_FOR_smaxsi3_sign_extendqi:
3421     case CODE_FOR_sminsi3_sign_extendqi:
3422     case CODE_FOR_multsi3_sign_extendqi:
3423     case CODE_FOR_comparesi3_sign_extendqi:
3424       zero = false;
3425       factor = 1;
3426       break;
3427     }
3428 
3429   /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))).  */
3430   extend = single_set (insn);
3431   gcc_assert (extend != NULL_RTX);
3432 
3433   extend = SET_SRC (extend);
3434   if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3435       || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3436     extend = XEXP (extend, 0);
3437   else
3438     extend = XEXP (extend, 1);
3439 
3440   gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3441 	      || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3442 
3443   mem = XEXP (extend, 0);
3444   gcc_checking_assert (MEM_P (mem));
3445   if (REG_P (XEXP (mem, 0)))
3446     return (zero && factor == 1) ? 2 : 3;
3447 
3448   /* We are expecting: (MEM (PLUS (REG) (CONST_INT))).  */
3449   gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3450   gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3451 
3452   offset = XEXP (XEXP (mem, 0), 1);
3453   gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3454 
3455   if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3456     return (zero && factor == 1) ? 3 : 4;
3457 
3458   return (zero && factor == 1) ? 4 : 5;
3459 }
3460 
3461 static bool
rx_narrow_volatile_bitfield(void)3462 rx_narrow_volatile_bitfield (void)
3463 {
3464   return true;
3465 }
3466 
3467 static bool
rx_ok_to_inline(tree caller,tree callee)3468 rx_ok_to_inline (tree caller, tree callee)
3469 {
3470   /* Do not inline functions with local variables
3471      into a naked CALLER - naked function have no stack frame and
3472      locals need a frame in order to have somewhere to live.
3473 
3474      Unfortunately we have no way to determine the presence of
3475      local variables in CALLEE, so we have to be cautious and
3476      assume that there might be some there.
3477 
3478      We do allow inlining when CALLEE has the "inline" type
3479      modifier or the "always_inline" or "gnu_inline" attributes.  */
3480   return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3481     || DECL_DECLARED_INLINE_P (callee)
3482     || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3483     || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3484 }
3485 
3486 static bool
rx_enable_lra(void)3487 rx_enable_lra (void)
3488 {
3489   return TARGET_ENABLE_LRA;
3490 }
3491 
rx_atomic_sequence(const_tree fun_decl)3492 rx_atomic_sequence::rx_atomic_sequence (const_tree fun_decl)
3493 {
3494   if (is_fast_interrupt_func (fun_decl) || is_interrupt_func (fun_decl))
3495     {
3496       /* If we are inside an interrupt handler, assume that interrupts are
3497 	 off -- which is the default hardware behavior.  In this case, there
3498 	 is no need to disable the interrupts.  */
3499       m_prev_psw_reg = NULL;
3500     }
3501   else
3502     {
3503       m_prev_psw_reg = gen_reg_rtx (SImode);
3504       emit_insn (gen_mvfc (m_prev_psw_reg, GEN_INT (CTRLREG_PSW)));
3505       emit_insn (gen_clrpsw (GEN_INT ('I')));
3506     }
3507 }
3508 
~rx_atomic_sequence(void)3509 rx_atomic_sequence::~rx_atomic_sequence (void)
3510 {
3511   if (m_prev_psw_reg != NULL)
3512     emit_insn (gen_mvtc (GEN_INT (CTRLREG_PSW), m_prev_psw_reg));
3513 }
3514 
3515 /* Given an insn and a reg number, tell whether the reg dies or is unused
3516    after the insn.  */
3517 bool
rx_reg_dead_or_unused_after_insn(const rtx_insn * i,int regno)3518 rx_reg_dead_or_unused_after_insn (const rtx_insn* i, int regno)
3519 {
3520   return find_regno_note (i, REG_DEAD, regno) != NULL
3521 	 || find_regno_note (i, REG_UNUSED, regno) != NULL;
3522 }
3523 
3524 /* Copy dead and unused notes from SRC to DST for the specified REGNO.  */
3525 void
rx_copy_reg_dead_or_unused_notes(rtx reg,const rtx_insn * src,rtx_insn * dst)3526 rx_copy_reg_dead_or_unused_notes (rtx reg, const rtx_insn* src, rtx_insn* dst)
3527 {
3528   int regno = REGNO (SUBREG_P (reg) ? SUBREG_REG (reg) : reg);
3529 
3530   if (rtx note = find_regno_note (src, REG_DEAD, regno))
3531     add_shallow_copy_of_reg_note (dst, note);
3532 
3533   if (rtx note = find_regno_note (src, REG_UNUSED, regno))
3534     add_shallow_copy_of_reg_note (dst, note);
3535 }
3536 
3537 /* Try to fuse the current bit-operation insn with the surrounding memory load
3538    and store.  */
3539 bool
rx_fuse_in_memory_bitop(rtx * operands,rtx_insn * curr_insn,rtx (* gen_insn)(rtx,rtx))3540 rx_fuse_in_memory_bitop (rtx* operands, rtx_insn* curr_insn,
3541 			 rtx (*gen_insn)(rtx, rtx))
3542 {
3543   rtx op2_reg = SUBREG_P (operands[2]) ? SUBREG_REG (operands[2]) : operands[2];
3544 
3545   set_of_reg op2_def = rx_find_set_of_reg (op2_reg, curr_insn,
3546 					   prev_nonnote_nondebug_insn_bb);
3547   if (op2_def.set_src == NULL_RTX
3548       || !MEM_P (op2_def.set_src)
3549       || GET_MODE (op2_def.set_src) != QImode
3550       || !rx_is_restricted_memory_address (XEXP (op2_def.set_src, 0),
3551 					   GET_MODE (op2_def.set_src))
3552       || reg_used_between_p (operands[2], op2_def.insn, curr_insn)
3553       || !rx_reg_dead_or_unused_after_insn (curr_insn, REGNO (op2_reg))
3554     )
3555     return false;
3556 
3557   /* The register operand originates from a memory load and the memory load
3558      could be fused with the bitop insn.
3559      Look for the following memory store with the same memory operand.  */
3560   rtx mem = op2_def.set_src;
3561 
3562   /* If the memory is an auto-mod address, it can't be fused.  */
3563   if (GET_CODE (XEXP (mem, 0)) == POST_INC
3564       || GET_CODE (XEXP (mem, 0)) == PRE_INC
3565       || GET_CODE (XEXP (mem, 0)) == POST_DEC
3566       || GET_CODE (XEXP (mem, 0)) == PRE_DEC)
3567     return false;
3568 
3569   rtx_insn* op0_use = rx_find_use_of_reg (operands[0], curr_insn,
3570 					  next_nonnote_nondebug_insn_bb);
3571   if (op0_use == NULL
3572       || !(GET_CODE (PATTERN (op0_use)) == SET
3573 	   && RX_REG_P (XEXP (PATTERN (op0_use), 1))
3574 	   && reg_overlap_mentioned_p (operands[0], XEXP (PATTERN (op0_use), 1))
3575 	   && rtx_equal_p (mem, XEXP (PATTERN (op0_use), 0)))
3576       || !rx_reg_dead_or_unused_after_insn (op0_use, REGNO (operands[0]))
3577       || reg_set_between_p (operands[2], curr_insn, op0_use))
3578     return false;
3579 
3580   /* If the load-modify-store operation is fused it could potentially modify
3581      load/store ordering if there are other memory accesses between the load
3582      and the store for this insn.  If there are volatile mems between the load
3583      and store it's better not to change the ordering.  If there is a call
3584      between the load and store, it's also not safe to fuse it.  */
3585   for (rtx_insn* i = next_nonnote_nondebug_insn_bb (op2_def.insn);
3586        i != NULL && i != op0_use;
3587        i = next_nonnote_nondebug_insn_bb (i))
3588     if (volatile_insn_p (PATTERN (i)) || CALL_P (i))
3589       return false;
3590 
3591   emit_insn (gen_insn (mem, gen_lowpart (QImode, operands[1])));
3592   set_insn_deleted (op2_def.insn);
3593   set_insn_deleted (op0_use);
3594   return true;
3595 }
3596 
3597 /* Implement TARGET_HARD_REGNO_NREGS.  */
3598 
3599 static unsigned int
rx_hard_regno_nregs(unsigned int,machine_mode mode)3600 rx_hard_regno_nregs (unsigned int, machine_mode mode)
3601 {
3602   return CLASS_MAX_NREGS (0, mode);
3603 }
3604 
3605 /* Implement TARGET_HARD_REGNO_MODE_OK.  */
3606 
3607 static bool
rx_hard_regno_mode_ok(unsigned int regno,machine_mode)3608 rx_hard_regno_mode_ok (unsigned int regno, machine_mode)
3609 {
3610   return REGNO_REG_CLASS (regno) == GR_REGS;
3611 }
3612 
3613 /* Implement TARGET_MODES_TIEABLE_P.  */
3614 
3615 static bool
rx_modes_tieable_p(machine_mode mode1,machine_mode mode2)3616 rx_modes_tieable_p (machine_mode mode1, machine_mode mode2)
3617 {
3618   return ((GET_MODE_CLASS (mode1) == MODE_FLOAT
3619 	   || GET_MODE_CLASS (mode1) == MODE_COMPLEX_FLOAT)
3620 	  == (GET_MODE_CLASS (mode2) == MODE_FLOAT
3621 	      || GET_MODE_CLASS (mode2) == MODE_COMPLEX_FLOAT));
3622 }
3623 
3624 #undef  TARGET_NARROW_VOLATILE_BITFIELD
3625 #define TARGET_NARROW_VOLATILE_BITFIELD		rx_narrow_volatile_bitfield
3626 
3627 #undef  TARGET_CAN_INLINE_P
3628 #define TARGET_CAN_INLINE_P			rx_ok_to_inline
3629 
3630 #undef  TARGET_ASM_JUMP_ALIGN_MAX_SKIP
3631 #define TARGET_ASM_JUMP_ALIGN_MAX_SKIP			rx_max_skip_for_label
3632 #undef  TARGET_ASM_LOOP_ALIGN_MAX_SKIP
3633 #define TARGET_ASM_LOOP_ALIGN_MAX_SKIP			rx_max_skip_for_label
3634 #undef  TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
3635 #define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP	rx_max_skip_for_label
3636 #undef  TARGET_ASM_LABEL_ALIGN_MAX_SKIP
3637 #define TARGET_ASM_LABEL_ALIGN_MAX_SKIP			rx_max_skip_for_label
3638 
3639 #undef  TARGET_FUNCTION_VALUE
3640 #define TARGET_FUNCTION_VALUE		rx_function_value
3641 
3642 #undef  TARGET_RETURN_IN_MSB
3643 #define TARGET_RETURN_IN_MSB		rx_return_in_msb
3644 
3645 #undef  TARGET_IN_SMALL_DATA_P
3646 #define TARGET_IN_SMALL_DATA_P		rx_in_small_data
3647 
3648 #undef  TARGET_RETURN_IN_MEMORY
3649 #define TARGET_RETURN_IN_MEMORY		rx_return_in_memory
3650 
3651 #undef  TARGET_HAVE_SRODATA_SECTION
3652 #define TARGET_HAVE_SRODATA_SECTION	true
3653 
3654 #undef	TARGET_ASM_SELECT_RTX_SECTION
3655 #define	TARGET_ASM_SELECT_RTX_SECTION	rx_select_rtx_section
3656 
3657 #undef	TARGET_ASM_SELECT_SECTION
3658 #define	TARGET_ASM_SELECT_SECTION	rx_select_section
3659 
3660 #undef  TARGET_INIT_BUILTINS
3661 #define TARGET_INIT_BUILTINS		rx_init_builtins
3662 
3663 #undef  TARGET_BUILTIN_DECL
3664 #define TARGET_BUILTIN_DECL		rx_builtin_decl
3665 
3666 #undef  TARGET_EXPAND_BUILTIN
3667 #define TARGET_EXPAND_BUILTIN		rx_expand_builtin
3668 
3669 #undef  TARGET_ASM_CONSTRUCTOR
3670 #define TARGET_ASM_CONSTRUCTOR		rx_elf_asm_constructor
3671 
3672 #undef  TARGET_ASM_DESTRUCTOR
3673 #define TARGET_ASM_DESTRUCTOR		rx_elf_asm_destructor
3674 
3675 #undef  TARGET_STRUCT_VALUE_RTX
3676 #define TARGET_STRUCT_VALUE_RTX		rx_struct_value_rtx
3677 
3678 #undef  TARGET_ATTRIBUTE_TABLE
3679 #define TARGET_ATTRIBUTE_TABLE		rx_attribute_table
3680 
3681 #undef  TARGET_ASM_FILE_START
3682 #define TARGET_ASM_FILE_START			rx_file_start
3683 
3684 #undef  TARGET_MS_BITFIELD_LAYOUT_P
3685 #define TARGET_MS_BITFIELD_LAYOUT_P		rx_is_ms_bitfield_layout
3686 
3687 #undef  TARGET_LEGITIMATE_ADDRESS_P
3688 #define TARGET_LEGITIMATE_ADDRESS_P		rx_is_legitimate_address
3689 
3690 #undef  TARGET_MODE_DEPENDENT_ADDRESS_P
3691 #define TARGET_MODE_DEPENDENT_ADDRESS_P		rx_mode_dependent_address_p
3692 
3693 #undef  TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3694 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS	rx_allocate_stack_slots_for_args
3695 
3696 #undef  TARGET_ASM_FUNCTION_PROLOGUE
3697 #define TARGET_ASM_FUNCTION_PROLOGUE 		rx_output_function_prologue
3698 
3699 #undef  TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3700 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P 	rx_func_attr_inlinable
3701 
3702 #undef  TARGET_FUNCTION_OK_FOR_SIBCALL
3703 #define TARGET_FUNCTION_OK_FOR_SIBCALL		rx_function_ok_for_sibcall
3704 
3705 #undef  TARGET_FUNCTION_ARG
3706 #define TARGET_FUNCTION_ARG     		rx_function_arg
3707 
3708 #undef  TARGET_FUNCTION_ARG_ADVANCE
3709 #define TARGET_FUNCTION_ARG_ADVANCE     	rx_function_arg_advance
3710 
3711 #undef	TARGET_FUNCTION_ARG_BOUNDARY
3712 #define	TARGET_FUNCTION_ARG_BOUNDARY		rx_function_arg_boundary
3713 
3714 #undef  TARGET_SET_CURRENT_FUNCTION
3715 #define TARGET_SET_CURRENT_FUNCTION		rx_set_current_function
3716 
3717 #undef  TARGET_ASM_INTEGER
3718 #define TARGET_ASM_INTEGER			rx_assemble_integer
3719 
3720 #undef  TARGET_USE_BLOCKS_FOR_CONSTANT_P
3721 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P	hook_bool_mode_const_rtx_true
3722 
3723 #undef  TARGET_MAX_ANCHOR_OFFSET
3724 #define TARGET_MAX_ANCHOR_OFFSET		32
3725 
3726 #undef  TARGET_ADDRESS_COST
3727 #define TARGET_ADDRESS_COST			rx_address_cost
3728 
3729 #undef  TARGET_CAN_ELIMINATE
3730 #define TARGET_CAN_ELIMINATE			rx_can_eliminate
3731 
3732 #undef  TARGET_CONDITIONAL_REGISTER_USAGE
3733 #define TARGET_CONDITIONAL_REGISTER_USAGE	rx_conditional_register_usage
3734 
3735 #undef  TARGET_ASM_TRAMPOLINE_TEMPLATE
3736 #define TARGET_ASM_TRAMPOLINE_TEMPLATE		rx_trampoline_template
3737 
3738 #undef  TARGET_TRAMPOLINE_INIT
3739 #define TARGET_TRAMPOLINE_INIT			rx_trampoline_init
3740 
3741 #undef  TARGET_PRINT_OPERAND
3742 #define TARGET_PRINT_OPERAND			rx_print_operand
3743 
3744 #undef  TARGET_PRINT_OPERAND_ADDRESS
3745 #define TARGET_PRINT_OPERAND_ADDRESS		rx_print_operand_address
3746 
3747 #undef  TARGET_CC_MODES_COMPATIBLE
3748 #define TARGET_CC_MODES_COMPATIBLE		rx_cc_modes_compatible
3749 
3750 #undef  TARGET_MEMORY_MOVE_COST
3751 #define TARGET_MEMORY_MOVE_COST			rx_memory_move_cost
3752 
3753 #undef  TARGET_OPTION_OVERRIDE
3754 #define TARGET_OPTION_OVERRIDE			rx_option_override
3755 
3756 #undef  TARGET_PROMOTE_FUNCTION_MODE
3757 #define TARGET_PROMOTE_FUNCTION_MODE		rx_promote_function_mode
3758 
3759 #undef  TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3760 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE	rx_override_options_after_change
3761 
3762 #undef  TARGET_FLAGS_REGNUM
3763 #define TARGET_FLAGS_REGNUM			CC_REG
3764 
3765 #undef  TARGET_LEGITIMATE_CONSTANT_P
3766 #define TARGET_LEGITIMATE_CONSTANT_P		rx_is_legitimate_constant
3767 
3768 #undef  TARGET_LEGITIMIZE_ADDRESS
3769 #define TARGET_LEGITIMIZE_ADDRESS		rx_legitimize_address
3770 
3771 #undef  TARGET_WARN_FUNC_RETURN
3772 #define TARGET_WARN_FUNC_RETURN 		rx_warn_func_return
3773 
3774 #undef  TARGET_LRA_P
3775 #define TARGET_LRA_P 				rx_enable_lra
3776 
3777 #undef  TARGET_HARD_REGNO_NREGS
3778 #define TARGET_HARD_REGNO_NREGS			rx_hard_regno_nregs
3779 #undef  TARGET_HARD_REGNO_MODE_OK
3780 #define TARGET_HARD_REGNO_MODE_OK		rx_hard_regno_mode_ok
3781 
3782 #undef  TARGET_MODES_TIEABLE_P
3783 #define TARGET_MODES_TIEABLE_P			rx_modes_tieable_p
3784 
3785 #undef  TARGET_RTX_COSTS
3786 #define TARGET_RTX_COSTS rx_rtx_costs
3787 
3788 struct gcc_target targetm = TARGET_INITIALIZER;
3789 
3790 #include "gt-rx.h"
3791