1 /* Subroutines used for code generation on Renesas RX processors.
2    Copyright (C) 2008-2013 Free Software Foundation, Inc.
3    Contributed by Red Hat.
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify
8    it under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3, or (at your option)
10    any later version.
11 
12    GCC is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15    GNU General Public License for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 /* To Do:
22 
23  * Re-enable memory-to-memory copies and fix up reload.  */
24 
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "tree.h"
30 #include "rtl.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "function.h"
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "toplev.h"
45 #include "reload.h"
46 #include "df.h"
47 #include "ggc.h"
48 #include "tm_p.h"
49 #include "debug.h"
50 #include "target.h"
51 #include "target-def.h"
52 #include "langhooks.h"
53 #include "opts.h"
54 #include "cgraph.h"
55 
56 static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
57 static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
58 static unsigned int rx_num_interrupt_regs;
59 
60 static unsigned int
rx_gp_base_regnum(void)61 rx_gp_base_regnum (void)
62 {
63   if (rx_gp_base_regnum_val == INVALID_REGNUM)
64     gcc_unreachable ();
65   return rx_gp_base_regnum_val;
66 }
67 
68 static unsigned int
rx_pid_base_regnum(void)69 rx_pid_base_regnum (void)
70 {
71   if (rx_pid_base_regnum_val == INVALID_REGNUM)
72     gcc_unreachable ();
73   return rx_pid_base_regnum_val;
74 }
75 
76 /* Find a SYMBOL_REF in a "standard" MEM address and return its decl.  */
77 
78 static tree
rx_decl_for_addr(rtx op)79 rx_decl_for_addr (rtx op)
80 {
81   if (GET_CODE (op) == MEM)
82     op = XEXP (op, 0);
83   if (GET_CODE (op) == CONST)
84     op = XEXP (op, 0);
85   while (GET_CODE (op) == PLUS)
86     op = XEXP (op, 0);
87   if (GET_CODE (op) == SYMBOL_REF)
88     return SYMBOL_REF_DECL (op);
89   return NULL_TREE;
90 }
91 
92 static void rx_print_operand (FILE *, rtx, int);
93 
94 #define CC_FLAG_S	(1 << 0)
95 #define CC_FLAG_Z	(1 << 1)
96 #define CC_FLAG_O	(1 << 2)
97 #define CC_FLAG_C	(1 << 3)
98 #define CC_FLAG_FP	(1 << 4)	/* Fake, to differentiate CC_Fmode.  */
99 
100 static unsigned int flags_from_mode (enum machine_mode mode);
101 static unsigned int flags_from_code (enum rtx_code code);
102 
103 /* Return true if OP is a reference to an object in a PID data area.  */
104 
105 enum pid_type
106 {
107   PID_NOT_PID = 0,	/* The object is not in the PID data area.  */
108   PID_ENCODED,		/* The object is in the PID data area.  */
109   PID_UNENCODED		/* The object will be placed in the PID data area, but it has not been placed there yet.  */
110 };
111 
112 static enum pid_type
rx_pid_data_operand(rtx op)113 rx_pid_data_operand (rtx op)
114 {
115   tree op_decl;
116 
117   if (!TARGET_PID)
118     return PID_NOT_PID;
119 
120   if (GET_CODE (op) == PLUS
121       && GET_CODE (XEXP (op, 0)) == REG
122       && GET_CODE (XEXP (op, 1)) == CONST
123       && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
124     return PID_ENCODED;
125 
126   op_decl = rx_decl_for_addr (op);
127 
128   if (op_decl)
129     {
130       if (TREE_READONLY (op_decl))
131 	return PID_UNENCODED;
132     }
133   else
134     {
135       /* Sigh, some special cases.  */
136       if (GET_CODE (op) == SYMBOL_REF
137 	  || GET_CODE (op) == LABEL_REF)
138 	return PID_UNENCODED;
139     }
140 
141   return PID_NOT_PID;
142 }
143 
144 static rtx
rx_legitimize_address(rtx x,rtx oldx ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED)145 rx_legitimize_address (rtx x,
146 		       rtx oldx ATTRIBUTE_UNUSED,
147 		       enum machine_mode mode ATTRIBUTE_UNUSED)
148 {
149   if (rx_pid_data_operand (x) == PID_UNENCODED)
150     {
151       rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
152       return rv;
153     }
154 
155   if (GET_CODE (x) == PLUS
156       && GET_CODE (XEXP (x, 0)) == PLUS
157       && REG_P (XEXP (XEXP (x, 0), 0))
158       && REG_P (XEXP (x, 1)))
159     return force_reg (SImode, x);
160 
161   return x;
162 }
163 
164 /* Return true if OP is a reference to an object in a small data area.  */
165 
166 static bool
rx_small_data_operand(rtx op)167 rx_small_data_operand (rtx op)
168 {
169   if (rx_small_data_limit == 0)
170     return false;
171 
172   if (GET_CODE (op) == SYMBOL_REF)
173     return SYMBOL_REF_SMALL_P (op);
174 
175   return false;
176 }
177 
178 static bool
rx_is_legitimate_address(enum machine_mode mode,rtx x,bool strict ATTRIBUTE_UNUSED)179 rx_is_legitimate_address (enum machine_mode mode, rtx x,
180 			  bool strict ATTRIBUTE_UNUSED)
181 {
182   if (RTX_OK_FOR_BASE (x, strict))
183     /* Register Indirect.  */
184     return true;
185 
186   if ((GET_MODE_SIZE (mode) == 4
187        || GET_MODE_SIZE (mode) == 2
188        || GET_MODE_SIZE (mode) == 1)
189       && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
190     /* Pre-decrement Register Indirect or
191        Post-increment Register Indirect.  */
192     return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
193 
194   switch (rx_pid_data_operand (x))
195     {
196     case PID_UNENCODED:
197       return false;
198     case PID_ENCODED:
199       return true;
200     default:
201       break;
202     }
203 
204   if (GET_CODE (x) == PLUS)
205     {
206       rtx arg1 = XEXP (x, 0);
207       rtx arg2 = XEXP (x, 1);
208       rtx index = NULL_RTX;
209 
210       if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
211 	index = arg2;
212       else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
213 	index = arg1;
214       else
215 	return false;
216 
217       switch (GET_CODE (index))
218 	{
219 	case CONST_INT:
220 	  {
221 	    /* Register Relative: REG + INT.
222 	       Only positive, mode-aligned, mode-sized
223 	       displacements are allowed.  */
224 	    HOST_WIDE_INT val = INTVAL (index);
225 	    int factor;
226 
227 	    if (val < 0)
228 	      return false;
229 
230 	    switch (GET_MODE_SIZE (mode))
231 	      {
232 	      default:
233 	      case 4: factor = 4; break;
234 	      case 2: factor = 2; break;
235 	      case 1: factor = 1; break;
236 	      }
237 
238 	    if (val > (65535 * factor))
239 	      return false;
240 	    return (val % factor) == 0;
241 	  }
242 
243 	case REG:
244 	  /* Unscaled Indexed Register Indirect: REG + REG
245 	     Size has to be "QI", REG has to be valid.  */
246 	  return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
247 
248 	case MULT:
249 	  {
250 	    /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
251 	       Factor has to equal the mode size, REG has to be valid.  */
252 	    rtx factor;
253 
254 	    factor = XEXP (index, 1);
255 	    index = XEXP (index, 0);
256 
257 	    return REG_P (index)
258 	      && RTX_OK_FOR_BASE (index, strict)
259 	      && CONST_INT_P (factor)
260 	      && GET_MODE_SIZE (mode) == INTVAL (factor);
261 	  }
262 
263 	default:
264 	  return false;
265 	}
266     }
267 
268   /* Small data area accesses turn into register relative offsets.  */
269   return rx_small_data_operand (x);
270 }
271 
272 /* Returns TRUE for simple memory addreses, ie ones
273    that do not involve register indirect addressing
274    or pre/post increment/decrement.  */
275 
276 bool
rx_is_restricted_memory_address(rtx mem,enum machine_mode mode)277 rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
278 {
279   if (! rx_is_legitimate_address
280       (mode, mem, reload_in_progress || reload_completed))
281     return false;
282 
283   switch (GET_CODE (mem))
284     {
285     case REG:
286       /* Simple memory addresses are OK.  */
287       return true;
288 
289     case PRE_DEC:
290     case POST_INC:
291       return false;
292 
293     case PLUS:
294       {
295 	rtx base, index;
296 
297 	/* Only allow REG+INT addressing.  */
298 	base = XEXP (mem, 0);
299 	index = XEXP (mem, 1);
300 
301 	if (! RX_REG_P (base) || ! CONST_INT_P (index))
302 	  return false;
303 
304 	return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
305       }
306 
307     case SYMBOL_REF:
308       /* Can happen when small data is being supported.
309          Assume that it will be resolved into GP+INT.  */
310       return true;
311 
312     default:
313       gcc_unreachable ();
314     }
315 }
316 
317 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P.  */
318 
319 static bool
rx_mode_dependent_address_p(const_rtx addr,addr_space_t as ATTRIBUTE_UNUSED)320 rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
321 {
322   if (GET_CODE (addr) == CONST)
323     addr = XEXP (addr, 0);
324 
325   switch (GET_CODE (addr))
326     {
327       /* --REG and REG++ only work in SImode.  */
328     case PRE_DEC:
329     case POST_INC:
330       return true;
331 
332     case MINUS:
333     case PLUS:
334       if (! REG_P (XEXP (addr, 0)))
335 	return true;
336 
337       addr = XEXP (addr, 1);
338 
339       switch (GET_CODE (addr))
340 	{
341 	case REG:
342 	  /* REG+REG only works in SImode.  */
343 	  return true;
344 
345 	case CONST_INT:
346 	  /* REG+INT is only mode independent if INT is a
347 	     multiple of 4, positive and will fit into 8-bits.  */
348 	  if (((INTVAL (addr) & 3) == 0)
349 	      && IN_RANGE (INTVAL (addr), 4, 252))
350 	    return false;
351 	  return true;
352 
353 	case SYMBOL_REF:
354 	case LABEL_REF:
355 	  return true;
356 
357 	case MULT:
358 	  gcc_assert (REG_P (XEXP (addr, 0)));
359 	  gcc_assert (CONST_INT_P (XEXP (addr, 1)));
360 	  /* REG+REG*SCALE is always mode dependent.  */
361 	  return true;
362 
363 	default:
364 	  /* Not recognized, so treat as mode dependent.  */
365 	  return true;
366 	}
367 
368     case CONST_INT:
369     case SYMBOL_REF:
370     case LABEL_REF:
371     case REG:
372       /* These are all mode independent.  */
373       return false;
374 
375     default:
376       /* Everything else is unrecognized,
377 	 so treat as mode dependent.  */
378       return true;
379     }
380 }
381 
382 /* A C compound statement to output to stdio stream FILE the
383    assembler syntax for an instruction operand that is a memory
384    reference whose address is ADDR.  */
385 
386 static void
rx_print_operand_address(FILE * file,rtx addr)387 rx_print_operand_address (FILE * file, rtx addr)
388 {
389   switch (GET_CODE (addr))
390     {
391     case REG:
392       fprintf (file, "[");
393       rx_print_operand (file, addr, 0);
394       fprintf (file, "]");
395       break;
396 
397     case PRE_DEC:
398       fprintf (file, "[-");
399       rx_print_operand (file, XEXP (addr, 0), 0);
400       fprintf (file, "]");
401       break;
402 
403     case POST_INC:
404       fprintf (file, "[");
405       rx_print_operand (file, XEXP (addr, 0), 0);
406       fprintf (file, "+]");
407       break;
408 
409     case PLUS:
410       {
411 	rtx arg1 = XEXP (addr, 0);
412 	rtx arg2 = XEXP (addr, 1);
413 	rtx base, index;
414 
415 	if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
416 	  base = arg1, index = arg2;
417 	else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
418 	  base = arg2, index = arg1;
419 	else
420 	  {
421 	    rx_print_operand (file, arg1, 0);
422 	    fprintf (file, " + ");
423 	    rx_print_operand (file, arg2, 0);
424 	    break;
425 	  }
426 
427 	if (REG_P (index) || GET_CODE (index) == MULT)
428 	  {
429 	    fprintf (file, "[");
430 	    rx_print_operand (file, index, 'A');
431 	    fprintf (file, ",");
432 	  }
433 	else /* GET_CODE (index) == CONST_INT  */
434 	  {
435 	    rx_print_operand (file, index, 'A');
436 	    fprintf (file, "[");
437 	  }
438 	rx_print_operand (file, base, 0);
439 	fprintf (file, "]");
440 	break;
441       }
442 
443     case CONST:
444       if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
445 	{
446 	  addr = XEXP (addr, 0);
447 	  gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
448 
449 	  /* FIXME: Putting this case label here is an appalling abuse of the C language.  */
450 	case UNSPEC:
451           addr = XVECEXP (addr, 0, 0);
452 	  gcc_assert (CONST_INT_P (addr));
453 	}
454       /* Fall through.  */
455     case LABEL_REF:
456     case SYMBOL_REF:
457       fprintf (file, "#");
458       /* Fall through.  */
459     default:
460       output_addr_const (file, addr);
461       break;
462     }
463 }
464 
465 static void
rx_print_integer(FILE * file,HOST_WIDE_INT val)466 rx_print_integer (FILE * file, HOST_WIDE_INT val)
467 {
468   if (IN_RANGE (val, -64, 64))
469     fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
470   else
471     fprintf (file,
472 	     TARGET_AS100_SYNTAX
473 	     ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
474 	     val);
475 }
476 
477 static bool
rx_assemble_integer(rtx x,unsigned int size,int is_aligned)478 rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
479 {
480   const char *  op = integer_asm_op (size, is_aligned);
481 
482   if (! CONST_INT_P (x))
483     return default_assemble_integer (x, size, is_aligned);
484 
485   if (op == NULL)
486     return false;
487   fputs (op, asm_out_file);
488 
489   rx_print_integer (asm_out_file, INTVAL (x));
490   fputc ('\n', asm_out_file);
491   return true;
492 }
493 
494 
495 /* Handles the insertion of a single operand into the assembler output.
496    The %<letter> directives supported are:
497 
498      %A  Print an operand without a leading # character.
499      %B  Print an integer comparison name.
500      %C  Print a control register name.
501      %F  Print a condition code flag name.
502      %G  Register used for small-data-area addressing
503      %H  Print high part of a DImode register, integer or address.
504      %L  Print low part of a DImode register, integer or address.
505      %N  Print the negation of the immediate value.
506      %P  Register used for PID addressing
507      %Q  If the operand is a MEM, then correctly generate
508          register indirect or register relative addressing.
509      %R  Like %Q but for zero-extending loads.  */
510 
511 static void
rx_print_operand(FILE * file,rtx op,int letter)512 rx_print_operand (FILE * file, rtx op, int letter)
513 {
514   bool unsigned_load = false;
515   bool print_hash = true;
516 
517   if (letter == 'A'
518       && ((GET_CODE (op) == CONST
519 	   && GET_CODE (XEXP (op, 0)) == UNSPEC)
520 	  || GET_CODE (op) == UNSPEC))
521     {
522       print_hash = false;
523       letter = 0;
524     }
525 
526   switch (letter)
527     {
528     case 'A':
529       /* Print an operand without a leading #.  */
530       if (MEM_P (op))
531 	op = XEXP (op, 0);
532 
533       switch (GET_CODE (op))
534 	{
535 	case LABEL_REF:
536 	case SYMBOL_REF:
537 	  output_addr_const (file, op);
538 	  break;
539 	case CONST_INT:
540 	  fprintf (file, "%ld", (long) INTVAL (op));
541 	  break;
542 	default:
543 	  rx_print_operand (file, op, 0);
544 	  break;
545 	}
546       break;
547 
548     case 'B':
549       {
550 	enum rtx_code code = GET_CODE (op);
551 	enum machine_mode mode = GET_MODE (XEXP (op, 0));
552 	const char *ret;
553 
554 	if (mode == CC_Fmode)
555 	  {
556 	    /* C flag is undefined, and O flag carries unordered.  None of the
557 	       branch combinations that include O use it helpfully.  */
558 	    switch (code)
559 	      {
560 	      case ORDERED:
561 		ret = "no";
562 		break;
563 	      case UNORDERED:
564 		ret = "o";
565 		break;
566 	      case LT:
567 		ret = "n";
568 		break;
569 	      case GE:
570 		ret = "pz";
571 		break;
572 	      case EQ:
573 		ret = "eq";
574 		break;
575 	      case NE:
576 		ret = "ne";
577 		break;
578 	      default:
579 		gcc_unreachable ();
580 	      }
581 	  }
582 	else
583 	  {
584 	    unsigned int flags = flags_from_mode (mode);
585 
586 	    switch (code)
587 	      {
588 	      case LT:
589 		ret = (flags & CC_FLAG_O ? "lt" : "n");
590 		break;
591 	      case GE:
592 		ret = (flags & CC_FLAG_O ? "ge" : "pz");
593 		break;
594 	      case GT:
595 		ret = "gt";
596 		break;
597 	      case LE:
598 		ret = "le";
599 		break;
600 	      case GEU:
601 		ret = "geu";
602 		break;
603 	      case LTU:
604 		ret = "ltu";
605 		break;
606 	      case GTU:
607 		ret = "gtu";
608 		break;
609 	      case LEU:
610 		ret = "leu";
611 		break;
612 	      case EQ:
613 		ret = "eq";
614 		break;
615 	      case NE:
616 		ret = "ne";
617 		break;
618 	      default:
619 		gcc_unreachable ();
620 	      }
621 	    gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
622 	  }
623 	fputs (ret, file);
624 	break;
625       }
626 
627     case 'C':
628       gcc_assert (CONST_INT_P (op));
629       switch (INTVAL (op))
630 	{
631 	case 0:   fprintf (file, "psw"); break;
632 	case 2:   fprintf (file, "usp"); break;
633 	case 3:   fprintf (file, "fpsw"); break;
634 	case 4:   fprintf (file, "cpen"); break;
635 	case 8:   fprintf (file, "bpsw"); break;
636 	case 9:   fprintf (file, "bpc"); break;
637 	case 0xa: fprintf (file, "isp"); break;
638 	case 0xb: fprintf (file, "fintv"); break;
639 	case 0xc: fprintf (file, "intb"); break;
640 	default:
641 	  warning (0, "unrecognized control register number: %d - using 'psw'",
642 		   (int) INTVAL (op));
643 	  fprintf (file, "psw");
644 	  break;
645 	}
646       break;
647 
648     case 'F':
649       gcc_assert (CONST_INT_P (op));
650       switch (INTVAL (op))
651 	{
652 	case 0: case 'c': case 'C': fprintf (file, "C"); break;
653 	case 1:	case 'z': case 'Z': fprintf (file, "Z"); break;
654 	case 2: case 's': case 'S': fprintf (file, "S"); break;
655 	case 3: case 'o': case 'O': fprintf (file, "O"); break;
656 	case 8: case 'i': case 'I': fprintf (file, "I"); break;
657 	case 9: case 'u': case 'U': fprintf (file, "U"); break;
658 	default:
659 	  gcc_unreachable ();
660 	}
661       break;
662 
663     case 'G':
664       fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
665       break;
666 
667     case 'H':
668       switch (GET_CODE (op))
669 	{
670 	case REG:
671 	  fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
672 	  break;
673 	case CONST_INT:
674 	  {
675 	    HOST_WIDE_INT v = INTVAL (op);
676 
677 	    fprintf (file, "#");
678 	    /* Trickery to avoid problems with shifting 32 bits at a time.  */
679 	    v = v >> 16;
680 	    v = v >> 16;
681 	    rx_print_integer (file, v);
682 	    break;
683 	  }
684 	case CONST_DOUBLE:
685 	  fprintf (file, "#");
686 	  rx_print_integer (file, CONST_DOUBLE_HIGH (op));
687 	  break;
688 	case MEM:
689 	  if (! WORDS_BIG_ENDIAN)
690 	    op = adjust_address (op, SImode, 4);
691 	  output_address (XEXP (op, 0));
692 	  break;
693 	default:
694 	  gcc_unreachable ();
695 	}
696       break;
697 
698     case 'L':
699       switch (GET_CODE (op))
700 	{
701 	case REG:
702 	  fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
703 	  break;
704 	case CONST_INT:
705 	  fprintf (file, "#");
706 	  rx_print_integer (file, INTVAL (op) & 0xffffffff);
707 	  break;
708 	case CONST_DOUBLE:
709 	  fprintf (file, "#");
710 	  rx_print_integer (file, CONST_DOUBLE_LOW (op));
711 	  break;
712 	case MEM:
713 	  if (WORDS_BIG_ENDIAN)
714 	    op = adjust_address (op, SImode, 4);
715 	  output_address (XEXP (op, 0));
716 	  break;
717 	default:
718 	  gcc_unreachable ();
719 	}
720       break;
721 
722     case 'N':
723       gcc_assert (CONST_INT_P (op));
724       fprintf (file, "#");
725       rx_print_integer (file, - INTVAL (op));
726       break;
727 
728     case 'P':
729       fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
730       break;
731 
732     case 'R':
733       gcc_assert (GET_MODE_SIZE (GET_MODE (op)) < 4);
734       unsigned_load = true;
735       /* Fall through.  */
736     case 'Q':
737       if (MEM_P (op))
738 	{
739 	  HOST_WIDE_INT offset;
740 	  rtx mem = op;
741 
742 	  op = XEXP (op, 0);
743 
744 	  if (REG_P (op))
745 	    offset = 0;
746 	  else if (GET_CODE (op) == PLUS)
747 	    {
748 	      rtx displacement;
749 
750 	      if (REG_P (XEXP (op, 0)))
751 		{
752 		  displacement = XEXP (op, 1);
753 		  op = XEXP (op, 0);
754 		}
755 	      else
756 		{
757 		  displacement = XEXP (op, 0);
758 		  op = XEXP (op, 1);
759 		  gcc_assert (REG_P (op));
760 		}
761 
762 	      gcc_assert (CONST_INT_P (displacement));
763 	      offset = INTVAL (displacement);
764 	      gcc_assert (offset >= 0);
765 
766 	      fprintf (file, "%ld", offset);
767 	    }
768 	  else
769 	    gcc_unreachable ();
770 
771 	  fprintf (file, "[");
772 	  rx_print_operand (file, op, 0);
773 	  fprintf (file, "].");
774 
775 	  switch (GET_MODE_SIZE (GET_MODE (mem)))
776 	    {
777 	    case 1:
778 	      gcc_assert (offset <= 65535 * 1);
779 	      fprintf (file, unsigned_load ? "UB" : "B");
780 	      break;
781 	    case 2:
782 	      gcc_assert (offset % 2 == 0);
783 	      gcc_assert (offset <= 65535 * 2);
784 	      fprintf (file, unsigned_load ? "UW" : "W");
785 	      break;
786 	    case 4:
787 	      gcc_assert (offset % 4 == 0);
788 	      gcc_assert (offset <= 65535 * 4);
789 	      fprintf (file, "L");
790 	      break;
791 	    default:
792 	      gcc_unreachable ();
793 	    }
794 	  break;
795 	}
796 
797       /* Fall through.  */
798 
799     default:
800       if (GET_CODE (op) == CONST
801 	  && GET_CODE (XEXP (op, 0)) == UNSPEC)
802 	op = XEXP (op, 0);
803       else if (GET_CODE (op) == CONST
804 	       && GET_CODE (XEXP (op, 0)) == PLUS
805 	       && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
806 	       && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
807 	{
808 	  if (print_hash)
809 	    fprintf (file, "#");
810 	  fprintf (file, "(");
811 	  rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
812 	  fprintf (file, " + ");
813 	  output_addr_const (file, XEXP (XEXP (op, 0), 1));
814 	  fprintf (file, ")");
815 	  return;
816 	}
817 
818       switch (GET_CODE (op))
819 	{
820 	case MULT:
821 	  /* Should be the scaled part of an
822 	     indexed register indirect address.  */
823 	  {
824 	    rtx base = XEXP (op, 0);
825 	    rtx index = XEXP (op, 1);
826 
827 	    /* Check for a swaped index register and scaling factor.
828 	       Not sure if this can happen, but be prepared to handle it.  */
829 	    if (CONST_INT_P (base) && REG_P (index))
830 	      {
831 		rtx tmp = base;
832 		base = index;
833 		index = tmp;
834 	      }
835 
836 	    gcc_assert (REG_P (base));
837 	    gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
838 	    gcc_assert (CONST_INT_P (index));
839 	    /* Do not try to verify the value of the scalar as it is based
840 	       on the mode of the MEM not the mode of the MULT.  (Which
841 	       will always be SImode).  */
842 	    fprintf (file, "%s", reg_names [REGNO (base)]);
843 	    break;
844 	  }
845 
846 	case MEM:
847 	  output_address (XEXP (op, 0));
848 	  break;
849 
850 	case PLUS:
851 	  output_address (op);
852 	  break;
853 
854 	case REG:
855 	  gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
856 	  fprintf (file, "%s", reg_names [REGNO (op)]);
857 	  break;
858 
859 	case SUBREG:
860 	  gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
861 	  fprintf (file, "%s", reg_names [subreg_regno (op)]);
862 	  break;
863 
864 	  /* This will only be single precision....  */
865 	case CONST_DOUBLE:
866 	  {
867 	    unsigned long val;
868 	    REAL_VALUE_TYPE rv;
869 
870 	    REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
871 	    REAL_VALUE_TO_TARGET_SINGLE (rv, val);
872 	    if (print_hash)
873 	      fprintf (file, "#");
874 	    fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
875 	    break;
876 	  }
877 
878 	case CONST_INT:
879 	  if (print_hash)
880 	    fprintf (file, "#");
881 	  rx_print_integer (file, INTVAL (op));
882 	  break;
883 
884 	case UNSPEC:
885 	  switch (XINT (op, 1))
886 	    {
887 	    case UNSPEC_PID_ADDR:
888 	      {
889 		rtx sym, add;
890 
891 		if (print_hash)
892 		  fprintf (file, "#");
893 		sym = XVECEXP (op, 0, 0);
894 		add = NULL_RTX;
895 		fprintf (file, "(");
896 		if (GET_CODE (sym) == PLUS)
897 		  {
898 		    add = XEXP (sym, 1);
899 		    sym = XEXP (sym, 0);
900 		  }
901 		output_addr_const (file, sym);
902 		if (add != NULL_RTX)
903 		  {
904 		    fprintf (file, "+");
905 		    output_addr_const (file, add);
906 		  }
907 		fprintf (file, "-__pid_base");
908 		fprintf (file, ")");
909 		return;
910 	      }
911 	    }
912 	  /* Fall through */
913 
914 	case CONST:
915 	case SYMBOL_REF:
916 	case LABEL_REF:
917 	case CODE_LABEL:
918 	  rx_print_operand_address (file, op);
919 	  break;
920 
921 	default:
922 	  gcc_unreachable ();
923 	}
924       break;
925     }
926 }
927 
928 /* Maybe convert an operand into its PID format.  */
929 
930 rtx
rx_maybe_pidify_operand(rtx op,int copy_to_reg)931 rx_maybe_pidify_operand (rtx op, int copy_to_reg)
932 {
933   if (rx_pid_data_operand (op) == PID_UNENCODED)
934     {
935       if (GET_CODE (op) == MEM)
936 	{
937 	  rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
938 	  op = replace_equiv_address (op, a);
939 	}
940       else
941 	{
942 	  op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
943 	}
944 
945       if (copy_to_reg)
946 	op = copy_to_mode_reg (GET_MODE (op), op);
947     }
948   return op;
949 }
950 
951 /* Returns an assembler template for a move instruction.  */
952 
953 char *
rx_gen_move_template(rtx * operands,bool is_movu)954 rx_gen_move_template (rtx * operands, bool is_movu)
955 {
956   static char  out_template [64];
957   const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
958   const char * src_template;
959   const char * dst_template;
960   rtx          dest = operands[0];
961   rtx          src  = operands[1];
962 
963   /* Decide which extension, if any, should be given to the move instruction.  */
964   switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
965     {
966     case QImode:
967       /* The .B extension is not valid when
968 	 loading an immediate into a register.  */
969       if (! REG_P (dest) || ! CONST_INT_P (src))
970 	extension = ".B";
971       break;
972     case HImode:
973       if (! REG_P (dest) || ! CONST_INT_P (src))
974 	/* The .W extension is not valid when
975 	   loading an immediate into a register.  */
976 	extension = ".W";
977       break;
978     case SFmode:
979     case SImode:
980       extension = ".L";
981       break;
982     case VOIDmode:
983       /* This mode is used by constants.  */
984       break;
985     default:
986       debug_rtx (src);
987       gcc_unreachable ();
988     }
989 
990   if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
991     src_template = "(%A1-__pid_base)[%P1]";
992   else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
993     src_template = "%%gp(%A1)[%G1]";
994   else
995     src_template = "%1";
996 
997   if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
998     dst_template = "%%gp(%A0)[%G0]";
999   else
1000     dst_template = "%0";
1001 
1002   sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1003 	   extension, src_template, dst_template);
1004   return out_template;
1005 }
1006 
1007 /* Return VALUE rounded up to the next ALIGNMENT boundary.  */
1008 
1009 static inline unsigned int
rx_round_up(unsigned int value,unsigned int alignment)1010 rx_round_up (unsigned int value, unsigned int alignment)
1011 {
1012   alignment -= 1;
1013   return (value + alignment) & (~ alignment);
1014 }
1015 
1016 /* Return the number of bytes in the argument registers
1017    occupied by an argument of type TYPE and mode MODE.  */
1018 
1019 static unsigned int
rx_function_arg_size(enum machine_mode mode,const_tree type)1020 rx_function_arg_size (enum machine_mode mode, const_tree type)
1021 {
1022   unsigned int num_bytes;
1023 
1024   num_bytes = (mode == BLKmode)
1025     ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1026   return rx_round_up (num_bytes, UNITS_PER_WORD);
1027 }
1028 
1029 #define NUM_ARG_REGS		4
1030 #define MAX_NUM_ARG_BYTES	(NUM_ARG_REGS * UNITS_PER_WORD)
1031 
1032 /* Return an RTL expression describing the register holding a function
1033    parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1034    be passed on the stack.  CUM describes the previous parameters to the
1035    function and NAMED is false if the parameter is part of a variable
1036    parameter list, or the last named parameter before the start of a
1037    variable parameter list.  */
1038 
1039 static rtx
rx_function_arg(cumulative_args_t cum,enum machine_mode mode,const_tree type,bool named)1040 rx_function_arg (cumulative_args_t cum, enum machine_mode mode,
1041 		 const_tree type, bool named)
1042 {
1043   unsigned int next_reg;
1044   unsigned int bytes_so_far = *get_cumulative_args (cum);
1045   unsigned int size;
1046   unsigned int rounded_size;
1047 
1048   /* An exploded version of rx_function_arg_size.  */
1049   size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1050   /* If the size is not known it cannot be passed in registers.  */
1051   if (size < 1)
1052     return NULL_RTX;
1053 
1054   rounded_size = rx_round_up (size, UNITS_PER_WORD);
1055 
1056   /* Don't pass this arg via registers if there
1057      are insufficient registers to hold all of it.  */
1058   if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1059     return NULL_RTX;
1060 
1061   /* Unnamed arguments and the last named argument in a
1062      variadic function are always passed on the stack.  */
1063   if (!named)
1064     return NULL_RTX;
1065 
1066   /* Structures must occupy an exact number of registers,
1067      otherwise they are passed on the stack.  */
1068   if ((type == NULL || AGGREGATE_TYPE_P (type))
1069       && (size % UNITS_PER_WORD) != 0)
1070     return NULL_RTX;
1071 
1072   next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1073 
1074   return gen_rtx_REG (mode, next_reg);
1075 }
1076 
1077 static void
rx_function_arg_advance(cumulative_args_t cum,enum machine_mode mode,const_tree type,bool named ATTRIBUTE_UNUSED)1078 rx_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
1079 			 const_tree type, bool named ATTRIBUTE_UNUSED)
1080 {
1081   *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
1082 }
1083 
1084 static unsigned int
rx_function_arg_boundary(enum machine_mode mode ATTRIBUTE_UNUSED,const_tree type ATTRIBUTE_UNUSED)1085 rx_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
1086 			  const_tree type ATTRIBUTE_UNUSED)
1087 {
1088   /* Older versions of the RX backend aligned all on-stack arguements
1089      to 32-bits.  The RX C ABI however says that they should be
1090      aligned to their natural alignment.  (See section 5.2.2 of the ABI).  */
1091   if (TARGET_GCC_ABI)
1092     return STACK_BOUNDARY;
1093 
1094   if (type)
1095     {
1096       if (DECL_P (type))
1097 	return DECL_ALIGN (type);
1098       return TYPE_ALIGN (type);
1099     }
1100 
1101   return PARM_BOUNDARY;
1102 }
1103 
1104 /* Return an RTL describing where a function return value of type RET_TYPE
1105    is held.  */
1106 
1107 static rtx
rx_function_value(const_tree ret_type,const_tree fn_decl_or_type ATTRIBUTE_UNUSED,bool outgoing ATTRIBUTE_UNUSED)1108 rx_function_value (const_tree ret_type,
1109 		   const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1110 		   bool       outgoing ATTRIBUTE_UNUSED)
1111 {
1112   enum machine_mode mode = TYPE_MODE (ret_type);
1113 
1114   /* RX ABI specifies that small integer types are
1115      promoted to int when returned by a function.  */
1116   if (GET_MODE_SIZE (mode) > 0
1117       && GET_MODE_SIZE (mode) < 4
1118       && ! COMPLEX_MODE_P (mode)
1119       )
1120     return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1121 
1122   return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1123 }
1124 
1125 /* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1126    regard to function returns as does TARGET_FUNCTION_VALUE.  */
1127 
1128 static enum machine_mode
rx_promote_function_mode(const_tree type ATTRIBUTE_UNUSED,enum machine_mode mode,int * punsignedp ATTRIBUTE_UNUSED,const_tree funtype ATTRIBUTE_UNUSED,int for_return)1129 rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1130 			  enum machine_mode mode,
1131 			  int * punsignedp ATTRIBUTE_UNUSED,
1132 			  const_tree funtype ATTRIBUTE_UNUSED,
1133 			  int for_return)
1134 {
1135   if (for_return != 1
1136       || GET_MODE_SIZE (mode) >= 4
1137       || COMPLEX_MODE_P (mode)
1138       || GET_MODE_SIZE (mode) < 1)
1139     return mode;
1140 
1141   return SImode;
1142 }
1143 
1144 static bool
rx_return_in_memory(const_tree type,const_tree fntype ATTRIBUTE_UNUSED)1145 rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1146 {
1147   HOST_WIDE_INT size;
1148 
1149   if (TYPE_MODE (type) != BLKmode
1150       && ! AGGREGATE_TYPE_P (type))
1151     return false;
1152 
1153   size = int_size_in_bytes (type);
1154   /* Large structs and those whose size is not an
1155      exact multiple of 4 are returned in memory.  */
1156   return size < 1
1157     || size > 16
1158     || (size % UNITS_PER_WORD) != 0;
1159 }
1160 
1161 static rtx
rx_struct_value_rtx(tree fndecl ATTRIBUTE_UNUSED,int incoming ATTRIBUTE_UNUSED)1162 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1163 		     int incoming ATTRIBUTE_UNUSED)
1164 {
1165   return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1166 }
1167 
1168 static bool
rx_return_in_msb(const_tree valtype)1169 rx_return_in_msb (const_tree valtype)
1170 {
1171   return TARGET_BIG_ENDIAN_DATA
1172     && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1173 }
1174 
1175 /* Returns true if the provided function has the specified attribute.  */
1176 
1177 static inline bool
has_func_attr(const_tree decl,const char * func_attr)1178 has_func_attr (const_tree decl, const char * func_attr)
1179 {
1180   if (decl == NULL_TREE)
1181     decl = current_function_decl;
1182 
1183   return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1184 }
1185 
1186 /* Returns true if the provided function has the "fast_interrupt" attribute.  */
1187 
1188 static inline bool
is_fast_interrupt_func(const_tree decl)1189 is_fast_interrupt_func (const_tree decl)
1190 {
1191   return has_func_attr (decl, "fast_interrupt");
1192 }
1193 
1194 /* Returns true if the provided function has the "interrupt" attribute.  */
1195 
1196 static inline bool
is_interrupt_func(const_tree decl)1197 is_interrupt_func (const_tree decl)
1198 {
1199   return has_func_attr (decl, "interrupt");
1200 }
1201 
1202 /* Returns true if the provided function has the "naked" attribute.  */
1203 
1204 static inline bool
is_naked_func(const_tree decl)1205 is_naked_func (const_tree decl)
1206 {
1207   return has_func_attr (decl, "naked");
1208 }
1209 
1210 static bool use_fixed_regs = false;
1211 
1212 static void
rx_conditional_register_usage(void)1213 rx_conditional_register_usage (void)
1214 {
1215   static bool using_fixed_regs = false;
1216 
1217   if (TARGET_PID)
1218     {
1219       rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1220       fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1221     }
1222 
1223   if (rx_small_data_limit > 0)
1224     {
1225       if (TARGET_PID)
1226 	rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1227       else
1228 	rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1229 
1230       fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1231     }
1232 
1233   if (use_fixed_regs != using_fixed_regs)
1234     {
1235       static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1236       static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1237 
1238       if (use_fixed_regs)
1239 	{
1240 	  unsigned int r;
1241 
1242 	  memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1243 	  memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
1244 
1245 	  /* This is for fast interrupt handlers.  Any register in
1246 	     the range r10 to r13 (inclusive) that is currently
1247 	     marked as fixed is now a viable, call-used register.  */
1248 	  for (r = 10; r <= 13; r++)
1249 	    if (fixed_regs[r])
1250 	      {
1251 		fixed_regs[r] = 0;
1252 		call_used_regs[r] = 1;
1253 	      }
1254 
1255 	  /* Mark r7 as fixed.  This is just a hack to avoid
1256 	     altering the reg_alloc_order array so that the newly
1257 	     freed r10-r13 registers are the preferred registers.  */
1258 	  fixed_regs[7] = call_used_regs[7] = 1;
1259 	}
1260       else
1261 	{
1262 	  /* Restore the normal register masks.  */
1263 	  memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1264 	  memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1265 	}
1266 
1267       using_fixed_regs = use_fixed_regs;
1268     }
1269 }
1270 
1271 struct decl_chain
1272 {
1273   tree fndecl;
1274   struct decl_chain * next;
1275 };
1276 
1277 /* Stack of decls for which we have issued warnings.  */
1278 static struct decl_chain * warned_decls = NULL;
1279 
1280 static void
add_warned_decl(tree fndecl)1281 add_warned_decl (tree fndecl)
1282 {
1283   struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1284 
1285   warned->fndecl = fndecl;
1286   warned->next = warned_decls;
1287   warned_decls = warned;
1288 }
1289 
1290 /* Returns TRUE if FNDECL is on our list of warned about decls.  */
1291 
1292 static bool
already_warned(tree fndecl)1293 already_warned (tree fndecl)
1294 {
1295   struct decl_chain * warned;
1296 
1297   for (warned = warned_decls;
1298        warned != NULL;
1299        warned = warned->next)
1300     if (warned->fndecl == fndecl)
1301       return true;
1302 
1303   return false;
1304 }
1305 
1306 /* Perform any actions necessary before starting to compile FNDECL.
1307    For the RX we use this to make sure that we have the correct
1308    set of register masks selected.  If FNDECL is NULL then we are
1309    compiling top level things.  */
1310 
1311 static void
rx_set_current_function(tree fndecl)1312 rx_set_current_function (tree fndecl)
1313 {
1314   /* Remember the last target of rx_set_current_function.  */
1315   static tree rx_previous_fndecl;
1316   bool prev_was_fast_interrupt;
1317   bool current_is_fast_interrupt;
1318 
1319   /* Only change the context if the function changes.  This hook is called
1320      several times in the course of compiling a function, and we don't want
1321      to slow things down too much or call target_reinit when it isn't safe.  */
1322   if (fndecl == rx_previous_fndecl)
1323     return;
1324 
1325   prev_was_fast_interrupt
1326     = rx_previous_fndecl
1327     ? is_fast_interrupt_func (rx_previous_fndecl) : false;
1328 
1329   current_is_fast_interrupt
1330     = fndecl ? is_fast_interrupt_func (fndecl) : false;
1331 
1332   if (prev_was_fast_interrupt != current_is_fast_interrupt)
1333     {
1334       use_fixed_regs = current_is_fast_interrupt;
1335       target_reinit ();
1336     }
1337 
1338   if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1339     {
1340       /* We do not warn about the first fast interrupt routine that
1341 	 we see.  Instead we just push it onto the stack.  */
1342       if (warned_decls == NULL)
1343 	add_warned_decl (fndecl);
1344 
1345       /* Otherwise if this fast interrupt is one for which we have
1346 	 not already issued a warning, generate one and then push
1347 	 it onto the stack as well.  */
1348       else if (! already_warned (fndecl))
1349 	{
1350 	  warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1351 		   fndecl, warned_decls->fndecl);
1352 	  add_warned_decl (fndecl);
1353 	}
1354     }
1355 
1356   rx_previous_fndecl = fndecl;
1357 }
1358 
1359 /* Typical stack layout should looks like this after the function's prologue:
1360 
1361                             |    |
1362                               --                       ^
1363                             |    | \                   |
1364                             |    |   arguments saved   | Increasing
1365                             |    |   on the stack      |  addresses
1366     PARENT   arg pointer -> |    | /
1367   -------------------------- ---- -------------------
1368     CHILD                   |ret |   return address
1369                               --
1370                             |    | \
1371                             |    |   call saved
1372                             |    |   registers
1373 			    |    | /
1374                               --
1375                             |    | \
1376                             |    |   local
1377                             |    |   variables
1378         frame pointer ->    |    | /
1379                               --
1380                             |    | \
1381                             |    |   outgoing          | Decreasing
1382                             |    |   arguments         |  addresses
1383    current stack pointer -> |    | /                   |
1384   -------------------------- ---- ------------------   V
1385                             |    |                 */
1386 
1387 static unsigned int
bit_count(unsigned int x)1388 bit_count (unsigned int x)
1389 {
1390   const unsigned int m1 = 0x55555555;
1391   const unsigned int m2 = 0x33333333;
1392   const unsigned int m4 = 0x0f0f0f0f;
1393 
1394   x -= (x >> 1) & m1;
1395   x = (x & m2) + ((x >> 2) & m2);
1396   x = (x + (x >> 4)) & m4;
1397   x += x >>  8;
1398 
1399   return (x + (x >> 16)) & 0x3f;
1400 }
1401 
1402 #define MUST_SAVE_ACC_REGISTER			\
1403   (TARGET_SAVE_ACC_REGISTER			\
1404    && (is_interrupt_func (NULL_TREE)		\
1405        || is_fast_interrupt_func (NULL_TREE)))
1406 
1407 /* Returns either the lowest numbered and highest numbered registers that
1408    occupy the call-saved area of the stack frame, if the registers are
1409    stored as a contiguous block, or else a bitmask of the individual
1410    registers if they are stored piecemeal.
1411 
1412    Also computes the size of the frame and the size of the outgoing
1413    arguments block (in bytes).  */
1414 
1415 static void
rx_get_stack_layout(unsigned int * lowest,unsigned int * highest,unsigned int * register_mask,unsigned int * frame_size,unsigned int * stack_size)1416 rx_get_stack_layout (unsigned int * lowest,
1417 		     unsigned int * highest,
1418 		     unsigned int * register_mask,
1419 		     unsigned int * frame_size,
1420 		     unsigned int * stack_size)
1421 {
1422   unsigned int reg;
1423   unsigned int low;
1424   unsigned int high;
1425   unsigned int fixed_reg = 0;
1426   unsigned int save_mask;
1427   unsigned int pushed_mask;
1428   unsigned int unneeded_pushes;
1429 
1430   if (is_naked_func (NULL_TREE))
1431     {
1432       /* Naked functions do not create their own stack frame.
1433 	 Instead the programmer must do that for us.  */
1434       * lowest = 0;
1435       * highest = 0;
1436       * register_mask = 0;
1437       * frame_size = 0;
1438       * stack_size = 0;
1439       return;
1440     }
1441 
1442   for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
1443     {
1444       if ((df_regs_ever_live_p (reg)
1445 	   /* Always save all call clobbered registers inside non-leaf
1446 	      interrupt handlers, even if they are not live - they may
1447 	      be used in (non-interrupt aware) routines called from this one.  */
1448 	   || (call_used_regs[reg]
1449 	       && is_interrupt_func (NULL_TREE)
1450 	       && ! crtl->is_leaf))
1451 	  && (! call_used_regs[reg]
1452 	      /* Even call clobbered registered must
1453 		 be pushed inside interrupt handlers.  */
1454 	      || is_interrupt_func (NULL_TREE)
1455 	      /* Likewise for fast interrupt handlers, except registers r10 -
1456 		 r13.  These are normally call-saved, but may have been set
1457 		 to call-used by rx_conditional_register_usage.  If so then
1458 		 they can be used in the fast interrupt handler without
1459 		 saving them on the stack.  */
1460 	      || (is_fast_interrupt_func (NULL_TREE)
1461 		  && ! IN_RANGE (reg, 10, 13))))
1462 	{
1463 	  if (low == 0)
1464 	    low = reg;
1465 	  high = reg;
1466 
1467 	  save_mask |= 1 << reg;
1468 	}
1469 
1470       /* Remember if we see a fixed register
1471 	 after having found the low register.  */
1472       if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1473 	fixed_reg = reg;
1474     }
1475 
1476   /* If we have to save the accumulator register, make sure
1477      that at least two registers are pushed into the frame.  */
1478   if (MUST_SAVE_ACC_REGISTER
1479       && bit_count (save_mask) < 2)
1480     {
1481       save_mask |= (1 << 13) | (1 << 14);
1482       if (low == 0)
1483 	low = 13;
1484       if (high == 0 || low == high)
1485 	high = low + 1;
1486     }
1487 
1488   /* Decide if it would be faster fill in the call-saved area of the stack
1489      frame using multiple PUSH instructions instead of a single PUSHM
1490      instruction.
1491 
1492      SAVE_MASK is a bitmask of the registers that must be stored in the
1493      call-save area.  PUSHED_MASK is a bitmask of the registers that would
1494      be pushed into the area if we used a PUSHM instruction.  UNNEEDED_PUSHES
1495      is a bitmask of those registers in pushed_mask that are not in
1496      save_mask.
1497 
1498      We use a simple heuristic that says that it is better to use
1499      multiple PUSH instructions if the number of unnecessary pushes is
1500      greater than the number of necessary pushes.
1501 
1502      We also use multiple PUSH instructions if there are any fixed registers
1503      between LOW and HIGH.  The only way that this can happen is if the user
1504      has specified --fixed-<reg-name> on the command line and in such
1505      circumstances we do not want to touch the fixed registers at all.
1506 
1507      FIXME: Is it worth improving this heuristic ?  */
1508   pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1509   unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1510 
1511   if ((fixed_reg && fixed_reg <= high)
1512       || (optimize_function_for_speed_p (cfun)
1513 	  && bit_count (save_mask) < bit_count (unneeded_pushes)))
1514     {
1515       /* Use multiple pushes.  */
1516       * lowest = 0;
1517       * highest = 0;
1518       * register_mask = save_mask;
1519     }
1520   else
1521     {
1522       /* Use one push multiple instruction.  */
1523       * lowest = low;
1524       * highest = high;
1525       * register_mask = 0;
1526     }
1527 
1528   * frame_size = rx_round_up
1529     (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1530 
1531   if (crtl->args.size > 0)
1532     * frame_size += rx_round_up
1533       (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1534 
1535   * stack_size = rx_round_up
1536     (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1537 }
1538 
1539 /* Generate a PUSHM instruction that matches the given operands.  */
1540 
1541 void
rx_emit_stack_pushm(rtx * operands)1542 rx_emit_stack_pushm (rtx * operands)
1543 {
1544   HOST_WIDE_INT last_reg;
1545   rtx first_push;
1546 
1547   gcc_assert (CONST_INT_P (operands[0]));
1548   last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1549 
1550   gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1551   first_push = XVECEXP (operands[1], 0, 1);
1552   gcc_assert (SET_P (first_push));
1553   first_push = SET_SRC (first_push);
1554   gcc_assert (REG_P (first_push));
1555 
1556   asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1557 	       reg_names [REGNO (first_push) - last_reg],
1558 	       reg_names [REGNO (first_push)]);
1559 }
1560 
1561 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate.  */
1562 
1563 static rtx
gen_rx_store_vector(unsigned int low,unsigned int high)1564 gen_rx_store_vector (unsigned int low, unsigned int high)
1565 {
1566   unsigned int i;
1567   unsigned int count = (high - low) + 2;
1568   rtx vector;
1569 
1570   vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1571 
1572   XVECEXP (vector, 0, 0) =
1573     gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1574 		 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1575 				GEN_INT ((count - 1) * UNITS_PER_WORD)));
1576 
1577   for (i = 0; i < count - 1; i++)
1578     XVECEXP (vector, 0, i + 1) =
1579       gen_rtx_SET (VOIDmode,
1580 		   gen_rtx_MEM (SImode,
1581 				gen_rtx_MINUS (SImode, stack_pointer_rtx,
1582 					       GEN_INT ((i + 1) * UNITS_PER_WORD))),
1583 		   gen_rtx_REG (SImode, high - i));
1584   return vector;
1585 }
1586 
1587 /* Mark INSN as being frame related.  If it is a PARALLEL
1588    then mark each element as being frame related as well.  */
1589 
1590 static void
mark_frame_related(rtx insn)1591 mark_frame_related (rtx insn)
1592 {
1593   RTX_FRAME_RELATED_P (insn) = 1;
1594   insn = PATTERN (insn);
1595 
1596   if (GET_CODE (insn) == PARALLEL)
1597     {
1598       unsigned int i;
1599 
1600       for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1601 	RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1602     }
1603 }
1604 
1605 static bool
ok_for_max_constant(HOST_WIDE_INT val)1606 ok_for_max_constant (HOST_WIDE_INT val)
1607 {
1608   if (rx_max_constant_size == 0  || rx_max_constant_size == 4)
1609     /* If there is no constraint on the size of constants
1610        used as operands, then any value is legitimate.  */
1611     return true;
1612 
1613   /* rx_max_constant_size specifies the maximum number
1614      of bytes that can be used to hold a signed value.  */
1615   return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
1616 		        ( 1 << (rx_max_constant_size * 8)));
1617 }
1618 
1619 /* Generate an ADD of SRC plus VAL into DEST.
1620    Handles the case where VAL is too big for max_constant_value.
1621    Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true.  */
1622 
1623 static void
gen_safe_add(rtx dest,rtx src,rtx val,bool is_frame_related)1624 gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1625 {
1626   rtx insn;
1627 
1628   if (val == NULL_RTX || INTVAL (val) == 0)
1629     {
1630       gcc_assert (dest != src);
1631 
1632       insn = emit_move_insn (dest, src);
1633     }
1634   else if (ok_for_max_constant (INTVAL (val)))
1635     insn = emit_insn (gen_addsi3 (dest, src, val));
1636   else
1637     {
1638       /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
1639 	 will not reject it.  */
1640       val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1641       insn = emit_insn (gen_addsi3 (dest, src, val));
1642 
1643       if (is_frame_related)
1644 	/* We have to provide our own frame related note here
1645 	   as the dwarf2out code cannot be expected to grok
1646 	   our unspec.  */
1647 	add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1648 		      gen_rtx_SET (SImode, dest,
1649 				   gen_rtx_PLUS (SImode, src, val)));
1650       return;
1651     }
1652 
1653   if (is_frame_related)
1654     RTX_FRAME_RELATED_P (insn) = 1;
1655   return;
1656 }
1657 
1658 void
rx_expand_prologue(void)1659 rx_expand_prologue (void)
1660 {
1661   unsigned int stack_size;
1662   unsigned int frame_size;
1663   unsigned int mask;
1664   unsigned int low;
1665   unsigned int high;
1666   unsigned int reg;
1667   rtx insn;
1668 
1669   /* Naked functions use their own, programmer provided prologues.  */
1670   if (is_naked_func (NULL_TREE))
1671     return;
1672 
1673   rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1674 
1675   if (flag_stack_usage_info)
1676     current_function_static_stack_size = frame_size + stack_size;
1677 
1678   /* If we use any of the callee-saved registers, save them now.  */
1679   if (mask)
1680     {
1681       /* Push registers in reverse order.  */
1682       for (reg = CC_REGNUM; reg --;)
1683 	if (mask & (1 << reg))
1684 	  {
1685 	    insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
1686 	    mark_frame_related (insn);
1687 	  }
1688     }
1689   else if (low)
1690     {
1691       if (high == low)
1692 	insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1693       else
1694 	insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1695 						    * UNITS_PER_WORD),
1696 					   gen_rx_store_vector (low, high)));
1697       mark_frame_related (insn);
1698     }
1699 
1700   if (MUST_SAVE_ACC_REGISTER)
1701     {
1702       unsigned int acc_high, acc_low;
1703 
1704       /* Interrupt handlers have to preserve the accumulator
1705 	 register if so requested by the user.  Use the first
1706          two pushed registers as intermediaries.  */
1707       if (mask)
1708 	{
1709 	  acc_low = acc_high = 0;
1710 
1711 	  for (reg = 1; reg < CC_REGNUM; reg ++)
1712 	    if (mask & (1 << reg))
1713 	      {
1714 		if (acc_low == 0)
1715 		  acc_low = reg;
1716 		else
1717 		  {
1718 		    acc_high = reg;
1719 		    break;
1720 		  }
1721 	      }
1722 
1723 	  /* We have assumed that there are at least two registers pushed... */
1724 	  gcc_assert (acc_high != 0);
1725 
1726 	  /* Note - the bottom 16 bits of the accumulator are inaccessible.
1727 	     We just assume that they are zero.  */
1728 	  emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1729 	  emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1730 	  emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1731 	  emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1732 	}
1733       else
1734 	{
1735 	  acc_low = low;
1736 	  acc_high = low + 1;
1737 
1738 	  /* We have assumed that there are at least two registers pushed... */
1739 	  gcc_assert (acc_high <= high);
1740 
1741 	  emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1742 	  emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1743 	  emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1744 				      gen_rx_store_vector (acc_low, acc_high)));
1745 	}
1746     }
1747 
1748   /* If needed, set up the frame pointer.  */
1749   if (frame_pointer_needed)
1750     gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1751 		  GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1752 
1753   /* Allocate space for the outgoing args.
1754      If the stack frame has not already been set up then handle this as well.  */
1755   if (stack_size)
1756     {
1757       if (frame_size)
1758 	{
1759 	  if (frame_pointer_needed)
1760 	    gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1761 			  GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1762 	  else
1763 	    gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1764 			  GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1765 			  true);
1766 	}
1767       else
1768 	gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1769 		      GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1770     }
1771   else if (frame_size)
1772     {
1773       if (! frame_pointer_needed)
1774 	gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1775 		      GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1776       else
1777 	gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1778 		      true);
1779     }
1780 }
1781 
1782 static void
rx_output_function_prologue(FILE * file,HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)1783 rx_output_function_prologue (FILE * file,
1784 			     HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1785 {
1786   if (is_fast_interrupt_func (NULL_TREE))
1787     asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1788 
1789   if (is_interrupt_func (NULL_TREE))
1790     asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1791 
1792   if (is_naked_func (NULL_TREE))
1793     asm_fprintf (file, "\t; Note: Naked Function\n");
1794 
1795   if (cfun->static_chain_decl != NULL)
1796     asm_fprintf (file, "\t; Note: Nested function declared "
1797 		 "inside another function.\n");
1798 
1799   if (crtl->calls_eh_return)
1800     asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1801 }
1802 
1803 /* Generate a POPM or RTSD instruction that matches the given operands.  */
1804 
1805 void
rx_emit_stack_popm(rtx * operands,bool is_popm)1806 rx_emit_stack_popm (rtx * operands, bool is_popm)
1807 {
1808   HOST_WIDE_INT stack_adjust;
1809   HOST_WIDE_INT last_reg;
1810   rtx first_push;
1811 
1812   gcc_assert (CONST_INT_P (operands[0]));
1813   stack_adjust = INTVAL (operands[0]);
1814 
1815   gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1816   last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1817 
1818   first_push = XVECEXP (operands[1], 0, 1);
1819   gcc_assert (SET_P (first_push));
1820   first_push = SET_DEST (first_push);
1821   gcc_assert (REG_P (first_push));
1822 
1823   if (is_popm)
1824     asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1825 		 reg_names [REGNO (first_push)],
1826 		 reg_names [REGNO (first_push) + last_reg]);
1827   else
1828     asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1829 		 (int) stack_adjust,
1830 		 reg_names [REGNO (first_push)],
1831 		 reg_names [REGNO (first_push) + last_reg]);
1832 }
1833 
1834 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate.  */
1835 
1836 static rtx
gen_rx_rtsd_vector(unsigned int adjust,unsigned int low,unsigned int high)1837 gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1838 {
1839   unsigned int i;
1840   unsigned int bias = 3;
1841   unsigned int count = (high - low) + bias;
1842   rtx vector;
1843 
1844   vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1845 
1846   XVECEXP (vector, 0, 0) =
1847     gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1848 		 plus_constant (Pmode, stack_pointer_rtx, adjust));
1849 
1850   for (i = 0; i < count - 2; i++)
1851     XVECEXP (vector, 0, i + 1) =
1852       gen_rtx_SET (VOIDmode,
1853 		   gen_rtx_REG (SImode, low + i),
1854 		   gen_rtx_MEM (SImode,
1855 				i == 0 ? stack_pointer_rtx
1856 				: plus_constant (Pmode, stack_pointer_rtx,
1857 						 i * UNITS_PER_WORD)));
1858 
1859   XVECEXP (vector, 0, count - 1) = ret_rtx;
1860 
1861   return vector;
1862 }
1863 
1864 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate.  */
1865 
1866 static rtx
gen_rx_popm_vector(unsigned int low,unsigned int high)1867 gen_rx_popm_vector (unsigned int low, unsigned int high)
1868 {
1869   unsigned int i;
1870   unsigned int count = (high - low) + 2;
1871   rtx vector;
1872 
1873   vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1874 
1875   XVECEXP (vector, 0, 0) =
1876     gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1877 		 plus_constant (Pmode, stack_pointer_rtx,
1878 				(count - 1) * UNITS_PER_WORD));
1879 
1880   for (i = 0; i < count - 1; i++)
1881     XVECEXP (vector, 0, i + 1) =
1882       gen_rtx_SET (VOIDmode,
1883 		   gen_rtx_REG (SImode, low + i),
1884 		   gen_rtx_MEM (SImode,
1885 				i == 0 ? stack_pointer_rtx
1886 				: plus_constant (Pmode, stack_pointer_rtx,
1887 						 i * UNITS_PER_WORD)));
1888 
1889   return vector;
1890 }
1891 
1892 /* Returns true if a simple return insn can be used.  */
1893 
1894 bool
rx_can_use_simple_return(void)1895 rx_can_use_simple_return (void)
1896 {
1897   unsigned int low;
1898   unsigned int high;
1899   unsigned int frame_size;
1900   unsigned int stack_size;
1901   unsigned int register_mask;
1902 
1903   if (is_naked_func (NULL_TREE)
1904       || is_fast_interrupt_func (NULL_TREE)
1905       || is_interrupt_func (NULL_TREE))
1906     return false;
1907 
1908   rx_get_stack_layout (& low, & high, & register_mask,
1909 		       & frame_size, & stack_size);
1910 
1911   return (register_mask == 0
1912 	  && (frame_size + stack_size) == 0
1913 	  && low == 0);
1914 }
1915 
1916 void
rx_expand_epilogue(bool is_sibcall)1917 rx_expand_epilogue (bool is_sibcall)
1918 {
1919   unsigned int low;
1920   unsigned int high;
1921   unsigned int frame_size;
1922   unsigned int stack_size;
1923   unsigned int register_mask;
1924   unsigned int regs_size;
1925   unsigned int reg;
1926   unsigned HOST_WIDE_INT total_size;
1927 
1928   /* FIXME: We do not support indirect sibcalls at the moment becaause we
1929      cannot guarantee that the register holding the function address is a
1930      call-used register.  If it is a call-saved register then the stack
1931      pop instructions generated in the epilogue will corrupt the address
1932      before it is used.
1933 
1934      Creating a new call-used-only register class works but then the
1935      reload pass gets stuck because it cannot always find a call-used
1936      register for spilling sibcalls.
1937 
1938      The other possible solution is for this pass to scan forward for the
1939      sibcall instruction (if it has been generated) and work out if it
1940      is an indirect sibcall using a call-saved register.  If it is then
1941      the address can copied into a call-used register in this epilogue
1942      code and the sibcall instruction modified to use that register.  */
1943 
1944   if (is_naked_func (NULL_TREE))
1945     {
1946       gcc_assert (! is_sibcall);
1947 
1948       /* Naked functions use their own, programmer provided epilogues.
1949 	 But, in order to keep gcc happy we have to generate some kind of
1950 	 epilogue RTL.  */
1951       emit_jump_insn (gen_naked_return ());
1952       return;
1953     }
1954 
1955   rx_get_stack_layout (& low, & high, & register_mask,
1956 		       & frame_size, & stack_size);
1957 
1958   total_size = frame_size + stack_size;
1959   regs_size = ((high - low) + 1) * UNITS_PER_WORD;
1960 
1961   /* See if we are unable to use the special stack frame deconstruct and
1962      return instructions.  In most cases we can use them, but the exceptions
1963      are:
1964 
1965      - Sibling calling functions deconstruct the frame but do not return to
1966        their caller.  Instead they branch to their sibling and allow their
1967        return instruction to return to this function's parent.
1968 
1969      - Fast and normal interrupt handling functions have to use special
1970        return instructions.
1971 
1972      - Functions where we have pushed a fragmented set of registers into the
1973        call-save area must have the same set of registers popped.  */
1974   if (is_sibcall
1975       || is_fast_interrupt_func (NULL_TREE)
1976       || is_interrupt_func (NULL_TREE)
1977       || register_mask)
1978     {
1979       /* Cannot use the special instructions - deconstruct by hand.  */
1980       if (total_size)
1981 	gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1982 		      GEN_INT (total_size), false);
1983 
1984       if (MUST_SAVE_ACC_REGISTER)
1985 	{
1986 	  unsigned int acc_low, acc_high;
1987 
1988 	  /* Reverse the saving of the accumulator register onto the stack.
1989 	     Note we must adjust the saved "low" accumulator value as it
1990 	     is really the middle 32-bits of the accumulator.  */
1991 	  if (register_mask)
1992 	    {
1993 	      acc_low = acc_high = 0;
1994 
1995 	      for (reg = 1; reg < CC_REGNUM; reg ++)
1996 		if (register_mask & (1 << reg))
1997 		  {
1998 		    if (acc_low == 0)
1999 		      acc_low = reg;
2000 		    else
2001 		      {
2002 			acc_high = reg;
2003 			break;
2004 		      }
2005 		  }
2006 	      emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
2007 	      emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
2008 	    }
2009 	  else
2010 	    {
2011 	      acc_low = low;
2012 	      acc_high = low + 1;
2013 	      emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
2014 					 gen_rx_popm_vector (acc_low, acc_high)));
2015 	    }
2016 
2017 	  emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
2018 				  gen_rtx_REG (SImode, acc_low),
2019 				  GEN_INT (16)));
2020 	  emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
2021 	  emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
2022 	}
2023 
2024       if (register_mask)
2025 	{
2026 	  for (reg = 0; reg < CC_REGNUM; reg ++)
2027 	    if (register_mask & (1 << reg))
2028 	      emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
2029 	}
2030       else if (low)
2031 	{
2032 	  if (high == low)
2033 	    emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2034 	  else
2035 	    emit_insn (gen_stack_popm (GEN_INT (regs_size),
2036 				       gen_rx_popm_vector (low, high)));
2037 	}
2038 
2039       if (is_fast_interrupt_func (NULL_TREE))
2040 	{
2041 	  gcc_assert (! is_sibcall);
2042 	  emit_jump_insn (gen_fast_interrupt_return ());
2043 	}
2044       else if (is_interrupt_func (NULL_TREE))
2045 	{
2046 	  gcc_assert (! is_sibcall);
2047 	  emit_jump_insn (gen_exception_return ());
2048 	}
2049       else if (! is_sibcall)
2050 	emit_jump_insn (gen_simple_return ());
2051 
2052       return;
2053     }
2054 
2055   /* If we allocated space on the stack, free it now.  */
2056   if (total_size)
2057     {
2058       unsigned HOST_WIDE_INT rtsd_size;
2059 
2060       /* See if we can use the RTSD instruction.  */
2061       rtsd_size = total_size + regs_size;
2062       if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
2063 	{
2064 	  if (low)
2065 	    emit_jump_insn (gen_pop_and_return
2066 			    (GEN_INT (rtsd_size),
2067 			     gen_rx_rtsd_vector (rtsd_size, low, high)));
2068 	  else
2069 	    emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2070 
2071 	  return;
2072 	}
2073 
2074       gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2075 		    GEN_INT (total_size), false);
2076     }
2077 
2078   if (low)
2079     emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2080 					gen_rx_rtsd_vector (regs_size,
2081 							    low, high)));
2082   else
2083     emit_jump_insn (gen_simple_return ());
2084 }
2085 
2086 
2087 /* Compute the offset (in words) between FROM (arg pointer
2088    or frame pointer) and TO (frame pointer or stack pointer).
2089    See ASCII art comment at the start of rx_expand_prologue
2090    for more information.  */
2091 
2092 int
rx_initial_elimination_offset(int from,int to)2093 rx_initial_elimination_offset (int from, int to)
2094 {
2095   unsigned int low;
2096   unsigned int high;
2097   unsigned int frame_size;
2098   unsigned int stack_size;
2099   unsigned int mask;
2100 
2101   rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2102 
2103   if (from == ARG_POINTER_REGNUM)
2104     {
2105       /* Extend the computed size of the stack frame to
2106 	 include the registers pushed in the prologue.  */
2107       if (low)
2108 	frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2109       else
2110 	frame_size += bit_count (mask) * UNITS_PER_WORD;
2111 
2112       /* Remember to include the return address.  */
2113       frame_size += 1 * UNITS_PER_WORD;
2114 
2115       if (to == FRAME_POINTER_REGNUM)
2116 	return frame_size;
2117 
2118       gcc_assert (to == STACK_POINTER_REGNUM);
2119       return frame_size + stack_size;
2120     }
2121 
2122   gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2123   return stack_size;
2124 }
2125 
2126 /* Decide if a variable should go into one of the small data sections.  */
2127 
2128 static bool
rx_in_small_data(const_tree decl)2129 rx_in_small_data (const_tree decl)
2130 {
2131   int size;
2132   const_tree section;
2133 
2134   if (rx_small_data_limit == 0)
2135     return false;
2136 
2137   if (TREE_CODE (decl) != VAR_DECL)
2138     return false;
2139 
2140   /* We do not put read-only variables into a small data area because
2141      they would be placed with the other read-only sections, far away
2142      from the read-write data sections, and we only have one small
2143      data area pointer.
2144      Similarly commons are placed in the .bss section which might be
2145      far away (and out of alignment with respect to) the .data section.  */
2146   if (TREE_READONLY (decl) || DECL_COMMON (decl))
2147     return false;
2148 
2149   section = DECL_SECTION_NAME (decl);
2150   if (section)
2151     {
2152       const char * const name = TREE_STRING_POINTER (section);
2153 
2154       return (strcmp (name, "D_2") == 0) || (strcmp (name, "B_2") == 0);
2155     }
2156 
2157   size = int_size_in_bytes (TREE_TYPE (decl));
2158 
2159   return (size > 0) && (size <= rx_small_data_limit);
2160 }
2161 
2162 /* Return a section for X.
2163    The only special thing we do here is to honor small data.  */
2164 
2165 static section *
rx_select_rtx_section(enum machine_mode mode,rtx x,unsigned HOST_WIDE_INT align)2166 rx_select_rtx_section (enum machine_mode mode,
2167 		       rtx x,
2168 		       unsigned HOST_WIDE_INT align)
2169 {
2170   if (rx_small_data_limit > 0
2171       && GET_MODE_SIZE (mode) <= rx_small_data_limit
2172       && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2173     return sdata_section;
2174 
2175   return default_elf_select_rtx_section (mode, x, align);
2176 }
2177 
2178 static section *
rx_select_section(tree decl,int reloc,unsigned HOST_WIDE_INT align)2179 rx_select_section (tree decl,
2180 		   int reloc,
2181 		   unsigned HOST_WIDE_INT align)
2182 {
2183   if (rx_small_data_limit > 0)
2184     {
2185       switch (categorize_decl_for_section (decl, reloc))
2186 	{
2187 	case SECCAT_SDATA:	return sdata_section;
2188 	case SECCAT_SBSS:	return sbss_section;
2189 	case SECCAT_SRODATA:
2190 	  /* Fall through.  We do not put small, read only
2191 	     data into the C_2 section because we are not
2192 	     using the C_2 section.  We do not use the C_2
2193 	     section because it is located with the other
2194 	     read-only data sections, far away from the read-write
2195 	     data sections and we only have one small data
2196 	     pointer (r13).  */
2197 	default:
2198 	  break;
2199 	}
2200     }
2201 
2202   /* If we are supporting the Renesas assembler
2203      we cannot use mergeable sections.  */
2204   if (TARGET_AS100_SYNTAX)
2205     switch (categorize_decl_for_section (decl, reloc))
2206       {
2207       case SECCAT_RODATA_MERGE_CONST:
2208       case SECCAT_RODATA_MERGE_STR_INIT:
2209       case SECCAT_RODATA_MERGE_STR:
2210 	return readonly_data_section;
2211 
2212       default:
2213 	break;
2214       }
2215 
2216   return default_elf_select_section (decl, reloc, align);
2217 }
2218 
2219 enum rx_builtin
2220 {
2221   RX_BUILTIN_BRK,
2222   RX_BUILTIN_CLRPSW,
2223   RX_BUILTIN_INT,
2224   RX_BUILTIN_MACHI,
2225   RX_BUILTIN_MACLO,
2226   RX_BUILTIN_MULHI,
2227   RX_BUILTIN_MULLO,
2228   RX_BUILTIN_MVFACHI,
2229   RX_BUILTIN_MVFACMI,
2230   RX_BUILTIN_MVFC,
2231   RX_BUILTIN_MVTACHI,
2232   RX_BUILTIN_MVTACLO,
2233   RX_BUILTIN_MVTC,
2234   RX_BUILTIN_MVTIPL,
2235   RX_BUILTIN_RACW,
2236   RX_BUILTIN_REVW,
2237   RX_BUILTIN_RMPA,
2238   RX_BUILTIN_ROUND,
2239   RX_BUILTIN_SETPSW,
2240   RX_BUILTIN_WAIT,
2241   RX_BUILTIN_max
2242 };
2243 
2244 static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2245 
2246 static void
rx_init_builtins(void)2247 rx_init_builtins (void)
2248 {
2249 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE)		\
2250    rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2251    add_builtin_function ("__builtin_rx_" LC_NAME,			\
2252 			build_function_type_list (RET_TYPE##_type_node, \
2253 						  ARG_TYPE##_type_node, \
2254 						  NULL_TREE),		\
2255 			RX_BUILTIN_##UC_NAME,				\
2256 			BUILT_IN_MD, NULL, NULL_TREE)
2257 
2258 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
2259   rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2260   add_builtin_function ("__builtin_rx_" LC_NAME,			\
2261 			build_function_type_list (RET_TYPE##_type_node, \
2262 						  ARG_TYPE1##_type_node,\
2263 						  ARG_TYPE2##_type_node,\
2264 						  NULL_TREE),		\
2265 			RX_BUILTIN_##UC_NAME,				\
2266 			BUILT_IN_MD, NULL, NULL_TREE)
2267 
2268 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
2269   rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2270   add_builtin_function ("__builtin_rx_" LC_NAME,			\
2271 			build_function_type_list (RET_TYPE##_type_node, \
2272 						  ARG_TYPE1##_type_node,\
2273 						  ARG_TYPE2##_type_node,\
2274 						  ARG_TYPE3##_type_node,\
2275 						  NULL_TREE),		\
2276 			RX_BUILTIN_##UC_NAME,				\
2277 			BUILT_IN_MD, NULL, NULL_TREE)
2278 
2279   ADD_RX_BUILTIN1 (BRK,     "brk",     void,  void);
2280   ADD_RX_BUILTIN1 (CLRPSW,  "clrpsw",  void,  integer);
2281   ADD_RX_BUILTIN1 (SETPSW,  "setpsw",  void,  integer);
2282   ADD_RX_BUILTIN1 (INT,     "int",     void,  integer);
2283   ADD_RX_BUILTIN2 (MACHI,   "machi",   void,  intSI, intSI);
2284   ADD_RX_BUILTIN2 (MACLO,   "maclo",   void,  intSI, intSI);
2285   ADD_RX_BUILTIN2 (MULHI,   "mulhi",   void,  intSI, intSI);
2286   ADD_RX_BUILTIN2 (MULLO,   "mullo",   void,  intSI, intSI);
2287   ADD_RX_BUILTIN1 (MVFACHI, "mvfachi", intSI, void);
2288   ADD_RX_BUILTIN1 (MVFACMI, "mvfacmi", intSI, void);
2289   ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void,  intSI);
2290   ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void,  intSI);
2291   ADD_RX_BUILTIN1 (RMPA,    "rmpa",    void,  void);
2292   ADD_RX_BUILTIN1 (MVFC,    "mvfc",    intSI, integer);
2293   ADD_RX_BUILTIN2 (MVTC,    "mvtc",    void,  integer, integer);
2294   ADD_RX_BUILTIN1 (MVTIPL,  "mvtipl",  void,  integer);
2295   ADD_RX_BUILTIN1 (RACW,    "racw",    void,  integer);
2296   ADD_RX_BUILTIN1 (ROUND,   "round",   intSI, float);
2297   ADD_RX_BUILTIN1 (REVW,    "revw",    intSI, intSI);
2298   ADD_RX_BUILTIN1 (WAIT,    "wait",    void,  void);
2299 }
2300 
2301 /* Return the RX builtin for CODE.  */
2302 
2303 static tree
rx_builtin_decl(unsigned code,bool initialize_p ATTRIBUTE_UNUSED)2304 rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2305 {
2306   if (code >= RX_BUILTIN_max)
2307     return error_mark_node;
2308 
2309   return rx_builtins[code];
2310 }
2311 
2312 static rtx
rx_expand_void_builtin_1_arg(rtx arg,rtx (* gen_func)(rtx),bool reg)2313 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2314 {
2315   if (reg && ! REG_P (arg))
2316     arg = force_reg (SImode, arg);
2317 
2318   emit_insn (gen_func (arg));
2319 
2320   return NULL_RTX;
2321 }
2322 
2323 static rtx
rx_expand_builtin_mvtc(tree exp)2324 rx_expand_builtin_mvtc (tree exp)
2325 {
2326   rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2327   rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2328 
2329   if (! CONST_INT_P (arg1))
2330     return NULL_RTX;
2331 
2332   if (! REG_P (arg2))
2333     arg2 = force_reg (SImode, arg2);
2334 
2335   emit_insn (gen_mvtc (arg1, arg2));
2336 
2337   return NULL_RTX;
2338 }
2339 
2340 static rtx
rx_expand_builtin_mvfc(tree t_arg,rtx target)2341 rx_expand_builtin_mvfc (tree t_arg, rtx target)
2342 {
2343   rtx arg = expand_normal (t_arg);
2344 
2345   if (! CONST_INT_P (arg))
2346     return NULL_RTX;
2347 
2348   if (target == NULL_RTX)
2349     return NULL_RTX;
2350 
2351   if (! REG_P (target))
2352     target = force_reg (SImode, target);
2353 
2354   emit_insn (gen_mvfc (target, arg));
2355 
2356   return target;
2357 }
2358 
2359 static rtx
rx_expand_builtin_mvtipl(rtx arg)2360 rx_expand_builtin_mvtipl (rtx arg)
2361 {
2362   /* The RX610 does not support the MVTIPL instruction.  */
2363   if (rx_cpu_type == RX610)
2364     return NULL_RTX;
2365 
2366   if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
2367     return NULL_RTX;
2368 
2369   emit_insn (gen_mvtipl (arg));
2370 
2371   return NULL_RTX;
2372 }
2373 
2374 static rtx
rx_expand_builtin_mac(tree exp,rtx (* gen_func)(rtx,rtx))2375 rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2376 {
2377   rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2378   rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2379 
2380   if (! REG_P (arg1))
2381     arg1 = force_reg (SImode, arg1);
2382 
2383   if (! REG_P (arg2))
2384     arg2 = force_reg (SImode, arg2);
2385 
2386   emit_insn (gen_func (arg1, arg2));
2387 
2388   return NULL_RTX;
2389 }
2390 
2391 static rtx
rx_expand_int_builtin_1_arg(rtx arg,rtx target,rtx (* gen_func)(rtx,rtx),bool mem_ok)2392 rx_expand_int_builtin_1_arg (rtx arg,
2393 			     rtx target,
2394 			     rtx (* gen_func)(rtx, rtx),
2395 			     bool mem_ok)
2396 {
2397   if (! REG_P (arg))
2398     if (!mem_ok || ! MEM_P (arg))
2399       arg = force_reg (SImode, arg);
2400 
2401   if (target == NULL_RTX || ! REG_P (target))
2402     target = gen_reg_rtx (SImode);
2403 
2404   emit_insn (gen_func (target, arg));
2405 
2406   return target;
2407 }
2408 
2409 static rtx
rx_expand_int_builtin_0_arg(rtx target,rtx (* gen_func)(rtx))2410 rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2411 {
2412   if (target == NULL_RTX || ! REG_P (target))
2413     target = gen_reg_rtx (SImode);
2414 
2415   emit_insn (gen_func (target));
2416 
2417   return target;
2418 }
2419 
2420 static rtx
rx_expand_builtin_round(rtx arg,rtx target)2421 rx_expand_builtin_round (rtx arg, rtx target)
2422 {
2423   if ((! REG_P (arg) && ! MEM_P (arg))
2424       || GET_MODE (arg) != SFmode)
2425     arg = force_reg (SFmode, arg);
2426 
2427   if (target == NULL_RTX || ! REG_P (target))
2428     target = gen_reg_rtx (SImode);
2429 
2430   emit_insn (gen_lrintsf2 (target, arg));
2431 
2432   return target;
2433 }
2434 
2435 static int
valid_psw_flag(rtx op,const char * which)2436 valid_psw_flag (rtx op, const char *which)
2437 {
2438   static int mvtc_inform_done = 0;
2439 
2440   if (GET_CODE (op) == CONST_INT)
2441     switch (INTVAL (op))
2442       {
2443       case 0: case 'c': case 'C':
2444       case 1: case 'z': case 'Z':
2445       case 2: case 's': case 'S':
2446       case 3: case 'o': case 'O':
2447       case 8: case 'i': case 'I':
2448       case 9: case 'u': case 'U':
2449 	return 1;
2450       }
2451 
2452   error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2453   if (!mvtc_inform_done)
2454     error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2455   mvtc_inform_done = 1;
2456 
2457   return 0;
2458 }
2459 
2460 static rtx
rx_expand_builtin(tree exp,rtx target,rtx subtarget ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)2461 rx_expand_builtin (tree exp,
2462 		   rtx target,
2463 		   rtx subtarget ATTRIBUTE_UNUSED,
2464 		   enum machine_mode mode ATTRIBUTE_UNUSED,
2465 		   int ignore ATTRIBUTE_UNUSED)
2466 {
2467   tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2468   tree arg    = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
2469   rtx  op     = arg ? expand_normal (arg) : NULL_RTX;
2470   unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2471 
2472   switch (fcode)
2473     {
2474     case RX_BUILTIN_BRK:     emit_insn (gen_brk ()); return NULL_RTX;
2475     case RX_BUILTIN_CLRPSW:
2476       if (!valid_psw_flag (op, "clrpsw"))
2477 	return NULL_RTX;
2478       return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2479     case RX_BUILTIN_SETPSW:
2480       if (!valid_psw_flag (op, "setpsw"))
2481 	return NULL_RTX;
2482       return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
2483     case RX_BUILTIN_INT:     return rx_expand_void_builtin_1_arg
2484 	(op, gen_int, false);
2485     case RX_BUILTIN_MACHI:   return rx_expand_builtin_mac (exp, gen_machi);
2486     case RX_BUILTIN_MACLO:   return rx_expand_builtin_mac (exp, gen_maclo);
2487     case RX_BUILTIN_MULHI:   return rx_expand_builtin_mac (exp, gen_mulhi);
2488     case RX_BUILTIN_MULLO:   return rx_expand_builtin_mac (exp, gen_mullo);
2489     case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2490 	(target, gen_mvfachi);
2491     case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2492 	(target, gen_mvfacmi);
2493     case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2494 	(op, gen_mvtachi, true);
2495     case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2496 	(op, gen_mvtaclo, true);
2497     case RX_BUILTIN_RMPA:    emit_insn (gen_rmpa ()); return NULL_RTX;
2498     case RX_BUILTIN_MVFC:    return rx_expand_builtin_mvfc (arg, target);
2499     case RX_BUILTIN_MVTC:    return rx_expand_builtin_mvtc (exp);
2500     case RX_BUILTIN_MVTIPL:  return rx_expand_builtin_mvtipl (op);
2501     case RX_BUILTIN_RACW:    return rx_expand_void_builtin_1_arg
2502 	(op, gen_racw, false);
2503     case RX_BUILTIN_ROUND:   return rx_expand_builtin_round (op, target);
2504     case RX_BUILTIN_REVW:    return rx_expand_int_builtin_1_arg
2505 	(op, target, gen_revw, false);
2506     case RX_BUILTIN_WAIT:    emit_insn (gen_wait ()); return NULL_RTX;
2507 
2508     default:
2509       internal_error ("bad builtin code");
2510       break;
2511     }
2512 
2513   return NULL_RTX;
2514 }
2515 
2516 /* Place an element into a constructor or destructor section.
2517    Like default_ctor_section_asm_out_constructor in varasm.c
2518    except that it uses .init_array (or .fini_array) and it
2519    handles constructor priorities.  */
2520 
2521 static void
rx_elf_asm_cdtor(rtx symbol,int priority,bool is_ctor)2522 rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2523 {
2524   section * s;
2525 
2526   if (priority != DEFAULT_INIT_PRIORITY)
2527     {
2528       char buf[18];
2529 
2530       sprintf (buf, "%s.%.5u",
2531 	       is_ctor ? ".init_array" : ".fini_array",
2532 	       priority);
2533       s = get_section (buf, SECTION_WRITE, NULL_TREE);
2534     }
2535   else if (is_ctor)
2536     s = ctors_section;
2537   else
2538     s = dtors_section;
2539 
2540   switch_to_section (s);
2541   assemble_align (POINTER_SIZE);
2542   assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2543 }
2544 
2545 static void
rx_elf_asm_constructor(rtx symbol,int priority)2546 rx_elf_asm_constructor (rtx symbol, int priority)
2547 {
2548   rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2549 }
2550 
2551 static void
rx_elf_asm_destructor(rtx symbol,int priority)2552 rx_elf_asm_destructor (rtx symbol, int priority)
2553 {
2554   rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2555 }
2556 
2557 /* Check "fast_interrupt", "interrupt" and "naked" attributes.  */
2558 
2559 static tree
rx_handle_func_attribute(tree * node,tree name,tree args,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)2560 rx_handle_func_attribute (tree * node,
2561 			  tree   name,
2562 			  tree   args,
2563 			  int    flags ATTRIBUTE_UNUSED,
2564 			  bool * no_add_attrs)
2565 {
2566   gcc_assert (DECL_P (* node));
2567   gcc_assert (args == NULL_TREE);
2568 
2569   if (TREE_CODE (* node) != FUNCTION_DECL)
2570     {
2571       warning (OPT_Wattributes, "%qE attribute only applies to functions",
2572 	       name);
2573       * no_add_attrs = true;
2574     }
2575 
2576   /* FIXME: We ought to check for conflicting attributes.  */
2577 
2578   /* FIXME: We ought to check that the interrupt and exception
2579      handler attributes have been applied to void functions.  */
2580   return NULL_TREE;
2581 }
2582 
2583 /* Table of RX specific attributes.  */
2584 const struct attribute_spec rx_attribute_table[] =
2585 {
2586   /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2587      affects_type_identity.  */
2588   { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2589     false },
2590   { "interrupt",      0, 0, true, false, false, rx_handle_func_attribute,
2591     false },
2592   { "naked",          0, 0, true, false, false, rx_handle_func_attribute,
2593     false },
2594   { NULL,             0, 0, false, false, false, NULL, false }
2595 };
2596 
2597 /* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE.  */
2598 
2599 static void
rx_override_options_after_change(void)2600 rx_override_options_after_change (void)
2601 {
2602   static bool first_time = TRUE;
2603 
2604   if (first_time)
2605     {
2606       /* If this is the first time through and the user has not disabled
2607 	 the use of RX FPU hardware then enable -ffinite-math-only,
2608 	 since the FPU instructions do not support NaNs and infinities.  */
2609       if (TARGET_USE_FPU)
2610 	flag_finite_math_only = 1;
2611 
2612       first_time = FALSE;
2613     }
2614   else
2615     {
2616       /* Alert the user if they are changing the optimization options
2617 	 to use IEEE compliant floating point arithmetic with RX FPU insns.  */
2618       if (TARGET_USE_FPU
2619 	  && !flag_finite_math_only)
2620 	warning (0, "RX FPU instructions do not support NaNs and infinities");
2621     }
2622 }
2623 
2624 static void
rx_option_override(void)2625 rx_option_override (void)
2626 {
2627   unsigned int i;
2628   cl_deferred_option *opt;
2629   vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
2630 
2631   if (v)
2632     FOR_EACH_VEC_ELT (*v, i, opt)
2633       {
2634 	switch (opt->opt_index)
2635 	  {
2636 	  case OPT_mint_register_:
2637 	    switch (opt->value)
2638 	      {
2639 	      case 4:
2640 		fixed_regs[10] = call_used_regs [10] = 1;
2641 		/* Fall through.  */
2642 	      case 3:
2643 		fixed_regs[11] = call_used_regs [11] = 1;
2644 		/* Fall through.  */
2645 	      case 2:
2646 		fixed_regs[12] = call_used_regs [12] = 1;
2647 		/* Fall through.  */
2648 	      case 1:
2649 		fixed_regs[13] = call_used_regs [13] = 1;
2650 		/* Fall through.  */
2651 	      case 0:
2652 		rx_num_interrupt_regs = opt->value;
2653 		break;
2654 	      default:
2655 		rx_num_interrupt_regs = 0;
2656 		/* Error message already given because rx_handle_option
2657 		  returned false.  */
2658 		break;
2659 	      }
2660 	    break;
2661 
2662 	  default:
2663 	    gcc_unreachable ();
2664 	  }
2665       }
2666 
2667   /* This target defaults to strict volatile bitfields.  */
2668   if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
2669     flag_strict_volatile_bitfields = 1;
2670 
2671   rx_override_options_after_change ();
2672 
2673   if (align_jumps == 0 && ! optimize_size)
2674     align_jumps = 3;
2675   if (align_loops == 0 && ! optimize_size)
2676     align_loops = 3;
2677   if (align_labels == 0 && ! optimize_size)
2678     align_labels = 3;
2679 }
2680 
2681 
2682 static bool
rx_allocate_stack_slots_for_args(void)2683 rx_allocate_stack_slots_for_args (void)
2684 {
2685   /* Naked functions should not allocate stack slots for arguments.  */
2686   return ! is_naked_func (NULL_TREE);
2687 }
2688 
2689 static bool
rx_func_attr_inlinable(const_tree decl)2690 rx_func_attr_inlinable (const_tree decl)
2691 {
2692   return ! is_fast_interrupt_func (decl)
2693     &&   ! is_interrupt_func (decl)
2694     &&   ! is_naked_func (decl);
2695 }
2696 
2697 static bool
rx_warn_func_return(tree decl)2698 rx_warn_func_return (tree decl)
2699 {
2700   /* Naked functions are implemented entirely in assembly, including the
2701      return sequence, so suppress warnings about this.  */
2702   return !is_naked_func (decl);
2703 }
2704 
2705 /* Return nonzero if it is ok to make a tail-call to DECL,
2706    a function_decl or NULL if this is an indirect call, using EXP  */
2707 
2708 static bool
rx_function_ok_for_sibcall(tree decl,tree exp ATTRIBUTE_UNUSED)2709 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2710 {
2711   /* Do not allow indirect tailcalls.  The
2712      sibcall patterns do not support them.  */
2713   if (decl == NULL)
2714     return false;
2715 
2716   /* Never tailcall from inside interrupt handlers or naked functions.  */
2717   if (is_fast_interrupt_func (NULL_TREE)
2718       || is_interrupt_func (NULL_TREE)
2719       || is_naked_func (NULL_TREE))
2720     return false;
2721 
2722   return true;
2723 }
2724 
2725 static void
rx_file_start(void)2726 rx_file_start (void)
2727 {
2728   if (! TARGET_AS100_SYNTAX)
2729     default_file_start ();
2730 }
2731 
2732 static bool
rx_is_ms_bitfield_layout(const_tree record_type ATTRIBUTE_UNUSED)2733 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2734 {
2735   /* The packed attribute overrides the MS behaviour.  */
2736   return ! TYPE_PACKED (record_type);
2737 }
2738 
2739 /* Returns true if X a legitimate constant for an immediate
2740    operand on the RX.  X is already known to satisfy CONSTANT_P.  */
2741 
2742 bool
rx_is_legitimate_constant(enum machine_mode mode ATTRIBUTE_UNUSED,rtx x)2743 rx_is_legitimate_constant (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2744 {
2745   switch (GET_CODE (x))
2746     {
2747     case CONST:
2748       x = XEXP (x, 0);
2749 
2750       if (GET_CODE (x) == PLUS)
2751 	{
2752 	  if (! CONST_INT_P (XEXP (x, 1)))
2753 	    return false;
2754 
2755 	  /* GCC would not pass us CONST_INT + CONST_INT so we
2756 	     know that we have {SYMBOL|LABEL} + CONST_INT.  */
2757 	  x = XEXP (x, 0);
2758 	  gcc_assert (! CONST_INT_P (x));
2759 	}
2760 
2761       switch (GET_CODE (x))
2762 	{
2763 	case LABEL_REF:
2764 	case SYMBOL_REF:
2765 	  return true;
2766 
2767 	case UNSPEC:
2768 	  return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
2769 
2770 	default:
2771 	  /* FIXME: Can this ever happen ?  */
2772 	  gcc_unreachable ();
2773 	}
2774       break;
2775 
2776     case LABEL_REF:
2777     case SYMBOL_REF:
2778       return true;
2779     case CONST_DOUBLE:
2780       return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2781     case CONST_VECTOR:
2782       return false;
2783     default:
2784       gcc_assert (CONST_INT_P (x));
2785       break;
2786     }
2787 
2788   return ok_for_max_constant (INTVAL (x));
2789 }
2790 
2791 static int
rx_address_cost(rtx addr,enum machine_mode mode ATTRIBUTE_UNUSED,addr_space_t as ATTRIBUTE_UNUSED,bool speed)2792 rx_address_cost (rtx addr, enum machine_mode mode ATTRIBUTE_UNUSED,
2793 		 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
2794 {
2795   rtx a, b;
2796 
2797   if (GET_CODE (addr) != PLUS)
2798     return COSTS_N_INSNS (1);
2799 
2800   a = XEXP (addr, 0);
2801   b = XEXP (addr, 1);
2802 
2803   if (REG_P (a) && REG_P (b))
2804     /* Try to discourage REG+REG addressing as it keeps two registers live.  */
2805     return COSTS_N_INSNS (4);
2806 
2807   if (speed)
2808     /* [REG+OFF] is just as fast as [REG].  */
2809     return COSTS_N_INSNS (1);
2810 
2811   if (CONST_INT_P (b)
2812       && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2813     /* Try to discourage REG + <large OFF> when optimizing for size.  */
2814     return COSTS_N_INSNS (2);
2815 
2816   return COSTS_N_INSNS (1);
2817 }
2818 
2819 static bool
rx_can_eliminate(const int from ATTRIBUTE_UNUSED,const int to)2820 rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2821 {
2822   /* We can always eliminate to the frame pointer.
2823      We can eliminate to the stack pointer unless a frame
2824      pointer is needed.  */
2825 
2826   return to == FRAME_POINTER_REGNUM
2827     || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2828 }
2829 
2830 
2831 static void
rx_trampoline_template(FILE * file)2832 rx_trampoline_template (FILE * file)
2833 {
2834   /* Output assembler code for a block containing the constant
2835      part of a trampoline, leaving space for the variable parts.
2836 
2837      On the RX, (where r8 is the static chain regnum) the trampoline
2838      looks like:
2839 
2840 	   mov 		#<static chain value>, r8
2841 	   mov          #<function's address>, r9
2842 	   jmp		r9
2843 
2844      In big-endian-data-mode however instructions are read into the CPU
2845      4 bytes at a time.  These bytes are then swapped around before being
2846      passed to the decoder.  So...we must partition our trampoline into
2847      4 byte packets and swap these packets around so that the instruction
2848      reader will reverse the process.  But, in order to avoid splitting
2849      the 32-bit constants across these packet boundaries, (making inserting
2850      them into the constructed trampoline very difficult) we have to pad the
2851      instruction sequence with NOP insns.  ie:
2852 
2853            nop
2854 	   nop
2855            mov.l	#<...>, r8
2856 	   nop
2857 	   nop
2858            mov.l	#<...>, r9
2859            jmp		r9
2860 	   nop
2861 	   nop             */
2862 
2863   if (! TARGET_BIG_ENDIAN_DATA)
2864     {
2865       asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
2866       asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
2867       asm_fprintf (file, "\tjmp\tr%d\n",                TRAMPOLINE_TEMP_REGNUM);
2868     }
2869   else
2870     {
2871       char r8 = '0' + STATIC_CHAIN_REGNUM;
2872       char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
2873 
2874       if (TARGET_AS100_SYNTAX)
2875         {
2876           asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H,  003H\n", r8);
2877           asm_fprintf (file, "\t.BYTE 0deH,  0adH, 0beH,  0efH\n");
2878           asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H,  003H\n", r9);
2879           asm_fprintf (file, "\t.BYTE 0deH,  0adH, 0beH,  0efH\n");
2880           asm_fprintf (file, "\t.BYTE 003H,  003H, 00%cH, 07fH\n", r9);
2881         }
2882       else
2883         {
2884           asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03,  0x03\n", r8);
2885           asm_fprintf (file, "\t.byte 0xde,  0xad, 0xbe,  0xef\n");
2886           asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03,  0x03\n", r9);
2887           asm_fprintf (file, "\t.byte 0xde,  0xad, 0xbe,  0xef\n");
2888           asm_fprintf (file, "\t.byte 0x03,  0x03, 0x0%c, 0x7f\n", r9);
2889         }
2890     }
2891 }
2892 
2893 static void
rx_trampoline_init(rtx tramp,tree fndecl,rtx chain)2894 rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
2895 {
2896   rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2897 
2898   emit_block_move (tramp, assemble_trampoline_template (),
2899 		   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2900 
2901   if (TARGET_BIG_ENDIAN_DATA)
2902     {
2903       emit_move_insn (adjust_address (tramp, SImode, 4), chain);
2904       emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
2905     }
2906   else
2907     {
2908       emit_move_insn (adjust_address (tramp, SImode, 2), chain);
2909       emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
2910     }
2911 }
2912 
2913 static int
rx_memory_move_cost(enum machine_mode mode ATTRIBUTE_UNUSED,reg_class_t regclass ATTRIBUTE_UNUSED,bool in)2914 rx_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2915 		     reg_class_t regclass ATTRIBUTE_UNUSED,
2916 		     bool in)
2917 {
2918   return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
2919 }
2920 
2921 /* Convert a CC_MODE to the set of flags that it represents.  */
2922 
2923 static unsigned int
flags_from_mode(enum machine_mode mode)2924 flags_from_mode (enum machine_mode mode)
2925 {
2926   switch (mode)
2927     {
2928     case CC_ZSmode:
2929       return CC_FLAG_S | CC_FLAG_Z;
2930     case CC_ZSOmode:
2931       return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
2932     case CC_ZSCmode:
2933       return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
2934     case CCmode:
2935       return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
2936     case CC_Fmode:
2937       return CC_FLAG_FP;
2938     default:
2939       gcc_unreachable ();
2940     }
2941 }
2942 
2943 /* Convert a set of flags to a CC_MODE that can implement it.  */
2944 
2945 static enum machine_mode
mode_from_flags(unsigned int f)2946 mode_from_flags (unsigned int f)
2947 {
2948   if (f & CC_FLAG_FP)
2949     return CC_Fmode;
2950   if (f & CC_FLAG_O)
2951     {
2952       if (f & CC_FLAG_C)
2953 	return CCmode;
2954       else
2955 	return CC_ZSOmode;
2956     }
2957   else if (f & CC_FLAG_C)
2958     return CC_ZSCmode;
2959   else
2960     return CC_ZSmode;
2961 }
2962 
2963 /* Convert an RTX_CODE to the set of flags needed to implement it.
2964    This assumes an integer comparison.  */
2965 
2966 static unsigned int
flags_from_code(enum rtx_code code)2967 flags_from_code (enum rtx_code code)
2968 {
2969   switch (code)
2970     {
2971     case LT:
2972     case GE:
2973       return CC_FLAG_S;
2974     case GT:
2975     case LE:
2976       return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
2977     case GEU:
2978     case LTU:
2979       return CC_FLAG_C;
2980     case GTU:
2981     case LEU:
2982       return CC_FLAG_C | CC_FLAG_Z;
2983     case EQ:
2984     case NE:
2985       return CC_FLAG_Z;
2986     default:
2987       gcc_unreachable ();
2988     }
2989 }
2990 
2991 /* Return a CC_MODE of which both M1 and M2 are subsets.  */
2992 
2993 static enum machine_mode
rx_cc_modes_compatible(enum machine_mode m1,enum machine_mode m2)2994 rx_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
2995 {
2996   unsigned f;
2997 
2998   /* Early out for identical modes.  */
2999   if (m1 == m2)
3000     return m1;
3001 
3002   /* There's no valid combination for FP vs non-FP.  */
3003   f = flags_from_mode (m1) | flags_from_mode (m2);
3004   if (f & CC_FLAG_FP)
3005     return VOIDmode;
3006 
3007   /* Otherwise, see what mode can implement all the flags.  */
3008   return mode_from_flags (f);
3009 }
3010 
3011 /* Return the minimal CC mode needed to implement (CMP_CODE X Y).  */
3012 
3013 enum machine_mode
rx_select_cc_mode(enum rtx_code cmp_code,rtx x,rtx y)3014 rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
3015 {
3016   if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3017     return CC_Fmode;
3018 
3019   if (y != const0_rtx)
3020     return CCmode;
3021 
3022   return mode_from_flags (flags_from_code (cmp_code));
3023 }
3024 
3025 /* Split the conditional branch.  Emit (COMPARE C1 C2) into CC_REG with
3026    CC_MODE, and use that in branches based on that compare.  */
3027 
3028 void
rx_split_cbranch(enum machine_mode cc_mode,enum rtx_code cmp1,rtx c1,rtx c2,rtx label)3029 rx_split_cbranch (enum machine_mode cc_mode, enum rtx_code cmp1,
3030 		  rtx c1, rtx c2, rtx label)
3031 {
3032   rtx flags, x;
3033 
3034   flags = gen_rtx_REG (cc_mode, CC_REG);
3035   x = gen_rtx_COMPARE (cc_mode, c1, c2);
3036   x = gen_rtx_SET (VOIDmode, flags, x);
3037   emit_insn (x);
3038 
3039   x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
3040   x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
3041   x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3042   emit_jump_insn (x);
3043 }
3044 
3045 /* A helper function for matching parallels that set the flags.  */
3046 
3047 bool
rx_match_ccmode(rtx insn,enum machine_mode cc_mode)3048 rx_match_ccmode (rtx insn, enum machine_mode cc_mode)
3049 {
3050   rtx op1, flags;
3051   enum machine_mode flags_mode;
3052 
3053   gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
3054 
3055   op1 = XVECEXP (PATTERN (insn), 0, 1);
3056   gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
3057 
3058   flags = SET_DEST (op1);
3059   flags_mode = GET_MODE (flags);
3060 
3061   if (GET_MODE (SET_SRC (op1)) != flags_mode)
3062     return false;
3063   if (GET_MODE_CLASS (flags_mode) != MODE_CC)
3064     return false;
3065 
3066   /* Ensure that the mode of FLAGS is compatible with CC_MODE.  */
3067   if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3068     return false;
3069 
3070   return true;
3071 }
3072 
3073 int
rx_align_for_label(rtx lab,int uses_threshold)3074 rx_align_for_label (rtx lab, int uses_threshold)
3075 {
3076   /* This is a simple heuristic to guess when an alignment would not be useful
3077      because the delay due to the inserted NOPs would be greater than the delay
3078      due to the misaligned branch.  If uses_threshold is zero then the alignment
3079      is always useful.  */
3080   if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
3081     return 0;
3082 
3083   return optimize_size ? 1 : 3;
3084 }
3085 
3086 static int
rx_max_skip_for_label(rtx lab)3087 rx_max_skip_for_label (rtx lab)
3088 {
3089   int opsize;
3090   rtx op;
3091 
3092   if (lab == NULL_RTX)
3093     return 0;
3094 
3095   op = lab;
3096   do
3097     {
3098       op = next_nonnote_nondebug_insn (op);
3099     }
3100   while (op && (LABEL_P (op)
3101 		|| (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3102   if (!op)
3103     return 0;
3104 
3105   opsize = get_attr_length (op);
3106   if (opsize >= 0 && opsize < 8)
3107     return opsize - 1;
3108   return 0;
3109 }
3110 
3111 /* Compute the real length of the extending load-and-op instructions.  */
3112 
3113 int
rx_adjust_insn_length(rtx insn,int current_length)3114 rx_adjust_insn_length (rtx insn, int current_length)
3115 {
3116   rtx extend, mem, offset;
3117   bool zero;
3118   int factor;
3119 
3120   switch (INSN_CODE (insn))
3121     {
3122     default:
3123       return current_length;
3124 
3125     case CODE_FOR_plussi3_zero_extendhi:
3126     case CODE_FOR_andsi3_zero_extendhi:
3127     case CODE_FOR_iorsi3_zero_extendhi:
3128     case CODE_FOR_xorsi3_zero_extendhi:
3129     case CODE_FOR_divsi3_zero_extendhi:
3130     case CODE_FOR_udivsi3_zero_extendhi:
3131     case CODE_FOR_minussi3_zero_extendhi:
3132     case CODE_FOR_smaxsi3_zero_extendhi:
3133     case CODE_FOR_sminsi3_zero_extendhi:
3134     case CODE_FOR_multsi3_zero_extendhi:
3135     case CODE_FOR_comparesi3_zero_extendhi:
3136       zero = true;
3137       factor = 2;
3138       break;
3139 
3140     case CODE_FOR_plussi3_sign_extendhi:
3141     case CODE_FOR_andsi3_sign_extendhi:
3142     case CODE_FOR_iorsi3_sign_extendhi:
3143     case CODE_FOR_xorsi3_sign_extendhi:
3144     case CODE_FOR_divsi3_sign_extendhi:
3145     case CODE_FOR_udivsi3_sign_extendhi:
3146     case CODE_FOR_minussi3_sign_extendhi:
3147     case CODE_FOR_smaxsi3_sign_extendhi:
3148     case CODE_FOR_sminsi3_sign_extendhi:
3149     case CODE_FOR_multsi3_sign_extendhi:
3150     case CODE_FOR_comparesi3_sign_extendhi:
3151       zero = false;
3152       factor = 2;
3153       break;
3154 
3155     case CODE_FOR_plussi3_zero_extendqi:
3156     case CODE_FOR_andsi3_zero_extendqi:
3157     case CODE_FOR_iorsi3_zero_extendqi:
3158     case CODE_FOR_xorsi3_zero_extendqi:
3159     case CODE_FOR_divsi3_zero_extendqi:
3160     case CODE_FOR_udivsi3_zero_extendqi:
3161     case CODE_FOR_minussi3_zero_extendqi:
3162     case CODE_FOR_smaxsi3_zero_extendqi:
3163     case CODE_FOR_sminsi3_zero_extendqi:
3164     case CODE_FOR_multsi3_zero_extendqi:
3165     case CODE_FOR_comparesi3_zero_extendqi:
3166       zero = true;
3167       factor = 1;
3168       break;
3169 
3170     case CODE_FOR_plussi3_sign_extendqi:
3171     case CODE_FOR_andsi3_sign_extendqi:
3172     case CODE_FOR_iorsi3_sign_extendqi:
3173     case CODE_FOR_xorsi3_sign_extendqi:
3174     case CODE_FOR_divsi3_sign_extendqi:
3175     case CODE_FOR_udivsi3_sign_extendqi:
3176     case CODE_FOR_minussi3_sign_extendqi:
3177     case CODE_FOR_smaxsi3_sign_extendqi:
3178     case CODE_FOR_sminsi3_sign_extendqi:
3179     case CODE_FOR_multsi3_sign_extendqi:
3180     case CODE_FOR_comparesi3_sign_extendqi:
3181       zero = false;
3182       factor = 1;
3183       break;
3184     }
3185 
3186   /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))).  */
3187   extend = single_set (insn);
3188   gcc_assert (extend != NULL_RTX);
3189 
3190   extend = SET_SRC (extend);
3191   if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3192       || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3193     extend = XEXP (extend, 0);
3194   else
3195     extend = XEXP (extend, 1);
3196 
3197   gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3198 	      || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3199 
3200   mem = XEXP (extend, 0);
3201   gcc_checking_assert (MEM_P (mem));
3202   if (REG_P (XEXP (mem, 0)))
3203     return (zero && factor == 1) ? 2 : 3;
3204 
3205   /* We are expecting: (MEM (PLUS (REG) (CONST_INT))).  */
3206   gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3207   gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3208 
3209   offset = XEXP (XEXP (mem, 0), 1);
3210   gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3211 
3212   if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3213     return (zero && factor == 1) ? 3 : 4;
3214 
3215   return (zero && factor == 1) ? 4 : 5;
3216 }
3217 
3218 static bool
rx_narrow_volatile_bitfield(void)3219 rx_narrow_volatile_bitfield (void)
3220 {
3221   return true;
3222 }
3223 
3224 static bool
rx_ok_to_inline(tree caller,tree callee)3225 rx_ok_to_inline (tree caller, tree callee)
3226 {
3227   /* Do not inline functions with local variables
3228      into a naked CALLER - naked function have no stack frame and
3229      locals need a frame in order to have somewhere to live.
3230 
3231      Unfortunately we have no way to determine the presence of
3232      local variables in CALLEE, so we have to be cautious and
3233      assume that there might be some there.
3234 
3235      We do allow inlining when CALLEE has the "inline" type
3236      modifier or the "always_inline" or "gnu_inline" attributes.  */
3237   return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3238     || DECL_DECLARED_INLINE_P (callee)
3239     || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3240     || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3241 }
3242 
3243 
3244 #undef  TARGET_NARROW_VOLATILE_BITFIELD
3245 #define TARGET_NARROW_VOLATILE_BITFIELD		rx_narrow_volatile_bitfield
3246 
3247 #undef  TARGET_CAN_INLINE_P
3248 #define TARGET_CAN_INLINE_P			rx_ok_to_inline
3249 
3250 #undef  TARGET_ASM_JUMP_ALIGN_MAX_SKIP
3251 #define TARGET_ASM_JUMP_ALIGN_MAX_SKIP			rx_max_skip_for_label
3252 #undef  TARGET_ASM_LOOP_ALIGN_MAX_SKIP
3253 #define TARGET_ASM_LOOP_ALIGN_MAX_SKIP			rx_max_skip_for_label
3254 #undef  TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
3255 #define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP	rx_max_skip_for_label
3256 #undef  TARGET_ASM_LABEL_ALIGN_MAX_SKIP
3257 #define TARGET_ASM_LABEL_ALIGN_MAX_SKIP			rx_max_skip_for_label
3258 
3259 #undef  TARGET_FUNCTION_VALUE
3260 #define TARGET_FUNCTION_VALUE		rx_function_value
3261 
3262 #undef  TARGET_RETURN_IN_MSB
3263 #define TARGET_RETURN_IN_MSB		rx_return_in_msb
3264 
3265 #undef  TARGET_IN_SMALL_DATA_P
3266 #define TARGET_IN_SMALL_DATA_P		rx_in_small_data
3267 
3268 #undef  TARGET_RETURN_IN_MEMORY
3269 #define TARGET_RETURN_IN_MEMORY		rx_return_in_memory
3270 
3271 #undef  TARGET_HAVE_SRODATA_SECTION
3272 #define TARGET_HAVE_SRODATA_SECTION	true
3273 
3274 #undef	TARGET_ASM_SELECT_RTX_SECTION
3275 #define	TARGET_ASM_SELECT_RTX_SECTION	rx_select_rtx_section
3276 
3277 #undef	TARGET_ASM_SELECT_SECTION
3278 #define	TARGET_ASM_SELECT_SECTION	rx_select_section
3279 
3280 #undef  TARGET_INIT_BUILTINS
3281 #define TARGET_INIT_BUILTINS		rx_init_builtins
3282 
3283 #undef  TARGET_BUILTIN_DECL
3284 #define TARGET_BUILTIN_DECL		rx_builtin_decl
3285 
3286 #undef  TARGET_EXPAND_BUILTIN
3287 #define TARGET_EXPAND_BUILTIN		rx_expand_builtin
3288 
3289 #undef  TARGET_ASM_CONSTRUCTOR
3290 #define TARGET_ASM_CONSTRUCTOR		rx_elf_asm_constructor
3291 
3292 #undef  TARGET_ASM_DESTRUCTOR
3293 #define TARGET_ASM_DESTRUCTOR		rx_elf_asm_destructor
3294 
3295 #undef  TARGET_STRUCT_VALUE_RTX
3296 #define TARGET_STRUCT_VALUE_RTX		rx_struct_value_rtx
3297 
3298 #undef  TARGET_ATTRIBUTE_TABLE
3299 #define TARGET_ATTRIBUTE_TABLE		rx_attribute_table
3300 
3301 #undef  TARGET_ASM_FILE_START
3302 #define TARGET_ASM_FILE_START			rx_file_start
3303 
3304 #undef  TARGET_MS_BITFIELD_LAYOUT_P
3305 #define TARGET_MS_BITFIELD_LAYOUT_P		rx_is_ms_bitfield_layout
3306 
3307 #undef  TARGET_LEGITIMATE_ADDRESS_P
3308 #define TARGET_LEGITIMATE_ADDRESS_P		rx_is_legitimate_address
3309 
3310 #undef  TARGET_MODE_DEPENDENT_ADDRESS_P
3311 #define TARGET_MODE_DEPENDENT_ADDRESS_P		rx_mode_dependent_address_p
3312 
3313 #undef  TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3314 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS	rx_allocate_stack_slots_for_args
3315 
3316 #undef  TARGET_ASM_FUNCTION_PROLOGUE
3317 #define TARGET_ASM_FUNCTION_PROLOGUE 		rx_output_function_prologue
3318 
3319 #undef  TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3320 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P 	rx_func_attr_inlinable
3321 
3322 #undef  TARGET_FUNCTION_OK_FOR_SIBCALL
3323 #define TARGET_FUNCTION_OK_FOR_SIBCALL		rx_function_ok_for_sibcall
3324 
3325 #undef  TARGET_FUNCTION_ARG
3326 #define TARGET_FUNCTION_ARG     		rx_function_arg
3327 
3328 #undef  TARGET_FUNCTION_ARG_ADVANCE
3329 #define TARGET_FUNCTION_ARG_ADVANCE     	rx_function_arg_advance
3330 
3331 #undef	TARGET_FUNCTION_ARG_BOUNDARY
3332 #define	TARGET_FUNCTION_ARG_BOUNDARY		rx_function_arg_boundary
3333 
3334 #undef  TARGET_SET_CURRENT_FUNCTION
3335 #define TARGET_SET_CURRENT_FUNCTION		rx_set_current_function
3336 
3337 #undef  TARGET_ASM_INTEGER
3338 #define TARGET_ASM_INTEGER			rx_assemble_integer
3339 
3340 #undef  TARGET_USE_BLOCKS_FOR_CONSTANT_P
3341 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P	hook_bool_mode_const_rtx_true
3342 
3343 #undef  TARGET_MAX_ANCHOR_OFFSET
3344 #define TARGET_MAX_ANCHOR_OFFSET		32
3345 
3346 #undef  TARGET_ADDRESS_COST
3347 #define TARGET_ADDRESS_COST			rx_address_cost
3348 
3349 #undef  TARGET_CAN_ELIMINATE
3350 #define TARGET_CAN_ELIMINATE			rx_can_eliminate
3351 
3352 #undef  TARGET_CONDITIONAL_REGISTER_USAGE
3353 #define TARGET_CONDITIONAL_REGISTER_USAGE	rx_conditional_register_usage
3354 
3355 #undef  TARGET_ASM_TRAMPOLINE_TEMPLATE
3356 #define TARGET_ASM_TRAMPOLINE_TEMPLATE		rx_trampoline_template
3357 
3358 #undef  TARGET_TRAMPOLINE_INIT
3359 #define TARGET_TRAMPOLINE_INIT			rx_trampoline_init
3360 
3361 #undef  TARGET_PRINT_OPERAND
3362 #define TARGET_PRINT_OPERAND			rx_print_operand
3363 
3364 #undef  TARGET_PRINT_OPERAND_ADDRESS
3365 #define TARGET_PRINT_OPERAND_ADDRESS		rx_print_operand_address
3366 
3367 #undef  TARGET_CC_MODES_COMPATIBLE
3368 #define TARGET_CC_MODES_COMPATIBLE		rx_cc_modes_compatible
3369 
3370 #undef  TARGET_MEMORY_MOVE_COST
3371 #define TARGET_MEMORY_MOVE_COST			rx_memory_move_cost
3372 
3373 #undef  TARGET_OPTION_OVERRIDE
3374 #define TARGET_OPTION_OVERRIDE			rx_option_override
3375 
3376 #undef  TARGET_PROMOTE_FUNCTION_MODE
3377 #define TARGET_PROMOTE_FUNCTION_MODE		rx_promote_function_mode
3378 
3379 #undef  TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3380 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE	rx_override_options_after_change
3381 
3382 #undef  TARGET_FLAGS_REGNUM
3383 #define TARGET_FLAGS_REGNUM			CC_REG
3384 
3385 #undef  TARGET_LEGITIMATE_CONSTANT_P
3386 #define TARGET_LEGITIMATE_CONSTANT_P		rx_is_legitimate_constant
3387 
3388 #undef  TARGET_LEGITIMIZE_ADDRESS
3389 #define TARGET_LEGITIMIZE_ADDRESS		rx_legitimize_address
3390 
3391 #undef  TARGET_WARN_FUNC_RETURN
3392 #define TARGET_WARN_FUNC_RETURN 		rx_warn_func_return
3393 
3394 struct gcc_target targetm = TARGET_INITIALIZER;
3395 
3396 #include "gt-rx.h"
3397