163d1a8abSmrg /* Subroutines for insn-output.c for Matsushita MN10300 series
2*ec02198aSmrg    Copyright (C) 1996-2020 Free Software Foundation, Inc.
363d1a8abSmrg    Contributed by Jeff Law (law@cygnus.com).
463d1a8abSmrg 
563d1a8abSmrg    This file is part of GCC.
663d1a8abSmrg 
763d1a8abSmrg    GCC is free software; you can redistribute it and/or modify
863d1a8abSmrg    it under the terms of the GNU General Public License as published by
963d1a8abSmrg    the Free Software Foundation; either version 3, or (at your option)
1063d1a8abSmrg    any later version.
1163d1a8abSmrg 
1263d1a8abSmrg    GCC is distributed in the hope that it will be useful,
1363d1a8abSmrg    but WITHOUT ANY WARRANTY; without even the implied warranty of
1463d1a8abSmrg    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
1563d1a8abSmrg    GNU General Public License for more details.
1663d1a8abSmrg 
1763d1a8abSmrg    You should have received a copy of the GNU General Public License
1863d1a8abSmrg    along with GCC; see the file COPYING3.  If not see
1963d1a8abSmrg    <http://www.gnu.org/licenses/>.  */
2063d1a8abSmrg 
21c7a68eb7Smrg #define IN_TARGET_CODE 1
22c7a68eb7Smrg 
2363d1a8abSmrg #include "config.h"
2463d1a8abSmrg #include "system.h"
2563d1a8abSmrg #include "coretypes.h"
2663d1a8abSmrg #include "backend.h"
2763d1a8abSmrg #include "target.h"
2863d1a8abSmrg #include "rtl.h"
2963d1a8abSmrg #include "tree.h"
30c7a68eb7Smrg #include "stringpool.h"
31c7a68eb7Smrg #include "attribs.h"
3263d1a8abSmrg #include "cfghooks.h"
3363d1a8abSmrg #include "cfgloop.h"
3463d1a8abSmrg #include "df.h"
3563d1a8abSmrg #include "memmodel.h"
3663d1a8abSmrg #include "tm_p.h"
3763d1a8abSmrg #include "optabs.h"
3863d1a8abSmrg #include "regs.h"
3963d1a8abSmrg #include "emit-rtl.h"
4063d1a8abSmrg #include "recog.h"
4163d1a8abSmrg #include "diagnostic-core.h"
4263d1a8abSmrg #include "alias.h"
4363d1a8abSmrg #include "stor-layout.h"
4463d1a8abSmrg #include "varasm.h"
4563d1a8abSmrg #include "calls.h"
4663d1a8abSmrg #include "output.h"
4763d1a8abSmrg #include "insn-attr.h"
4863d1a8abSmrg #include "reload.h"
4963d1a8abSmrg #include "explow.h"
5063d1a8abSmrg #include "expr.h"
5163d1a8abSmrg #include "tm-constrs.h"
5263d1a8abSmrg #include "cfgrtl.h"
5363d1a8abSmrg #include "dumpfile.h"
5463d1a8abSmrg #include "builtins.h"
5563d1a8abSmrg 
5663d1a8abSmrg /* This file should be included last.  */
5763d1a8abSmrg #include "target-def.h"
5863d1a8abSmrg 
5963d1a8abSmrg /* This is used in the am33_2.0-linux-gnu port, in which global symbol
6063d1a8abSmrg    names are not prefixed by underscores, to tell whether to prefix a
6163d1a8abSmrg    label with a plus sign or not, so that the assembler can tell
6263d1a8abSmrg    symbol names from register names.  */
6363d1a8abSmrg int mn10300_protect_label;
6463d1a8abSmrg 
6563d1a8abSmrg /* Selected processor type for tuning.  */
6663d1a8abSmrg enum processor_type mn10300_tune_cpu = PROCESSOR_DEFAULT;
6763d1a8abSmrg 
6863d1a8abSmrg #define CC_FLAG_Z	1
6963d1a8abSmrg #define CC_FLAG_N	2
7063d1a8abSmrg #define CC_FLAG_C	4
7163d1a8abSmrg #define CC_FLAG_V	8
7263d1a8abSmrg 
7363d1a8abSmrg static int cc_flags_for_mode(machine_mode);
7463d1a8abSmrg static int cc_flags_for_code(enum rtx_code);
7563d1a8abSmrg 
7663d1a8abSmrg /* Implement TARGET_OPTION_OVERRIDE.  */
7763d1a8abSmrg static void
mn10300_option_override(void)7863d1a8abSmrg mn10300_option_override (void)
7963d1a8abSmrg {
8063d1a8abSmrg   if (TARGET_AM33)
8163d1a8abSmrg     target_flags &= ~MASK_MULT_BUG;
8263d1a8abSmrg   else
8363d1a8abSmrg     {
8463d1a8abSmrg       /* Disable scheduling for the MN10300 as we do
8563d1a8abSmrg 	 not have timing information available for it.  */
8663d1a8abSmrg       flag_schedule_insns = 0;
8763d1a8abSmrg       flag_schedule_insns_after_reload = 0;
8863d1a8abSmrg 
8963d1a8abSmrg       /* Force enable splitting of wide types, as otherwise it is trivial
9063d1a8abSmrg 	 to run out of registers.  Indeed, this works so well that register
9163d1a8abSmrg 	 allocation problems are now more common *without* optimization,
9263d1a8abSmrg 	 when this flag is not enabled by default.  */
9363d1a8abSmrg       flag_split_wide_types = 1;
9463d1a8abSmrg     }
9563d1a8abSmrg 
9663d1a8abSmrg   if (mn10300_tune_string)
9763d1a8abSmrg     {
9863d1a8abSmrg       if (strcasecmp (mn10300_tune_string, "mn10300") == 0)
9963d1a8abSmrg 	mn10300_tune_cpu = PROCESSOR_MN10300;
10063d1a8abSmrg       else if (strcasecmp (mn10300_tune_string, "am33") == 0)
10163d1a8abSmrg 	mn10300_tune_cpu = PROCESSOR_AM33;
10263d1a8abSmrg       else if (strcasecmp (mn10300_tune_string, "am33-2") == 0)
10363d1a8abSmrg 	mn10300_tune_cpu = PROCESSOR_AM33_2;
10463d1a8abSmrg       else if (strcasecmp (mn10300_tune_string, "am34") == 0)
10563d1a8abSmrg 	mn10300_tune_cpu = PROCESSOR_AM34;
10663d1a8abSmrg       else
1070fc04c29Smrg 	error ("%<-mtune=%> expects mn10300, am33, am33-2, or am34");
10863d1a8abSmrg     }
10963d1a8abSmrg }
11063d1a8abSmrg 
11163d1a8abSmrg static void
mn10300_file_start(void)11263d1a8abSmrg mn10300_file_start (void)
11363d1a8abSmrg {
11463d1a8abSmrg   default_file_start ();
11563d1a8abSmrg 
11663d1a8abSmrg   if (TARGET_AM33_2)
11763d1a8abSmrg     fprintf (asm_out_file, "\t.am33_2\n");
11863d1a8abSmrg   else if (TARGET_AM33)
11963d1a8abSmrg     fprintf (asm_out_file, "\t.am33\n");
12063d1a8abSmrg }
12163d1a8abSmrg 
12263d1a8abSmrg /* Note: This list must match the liw_op attribute in mn10300.md.  */
12363d1a8abSmrg 
12463d1a8abSmrg static const char *liw_op_names[] =
12563d1a8abSmrg {
12663d1a8abSmrg   "add", "cmp", "sub", "mov",
12763d1a8abSmrg   "and", "or", "xor",
12863d1a8abSmrg   "asr", "lsr", "asl",
12963d1a8abSmrg   "none", "max"
13063d1a8abSmrg };
13163d1a8abSmrg 
13263d1a8abSmrg /* Print operand X using operand code CODE to assembly language output file
13363d1a8abSmrg    FILE.  */
13463d1a8abSmrg 
13563d1a8abSmrg void
mn10300_print_operand(FILE * file,rtx x,int code)13663d1a8abSmrg mn10300_print_operand (FILE *file, rtx x, int code)
13763d1a8abSmrg {
13863d1a8abSmrg   switch (code)
13963d1a8abSmrg     {
14063d1a8abSmrg     case 'W':
14163d1a8abSmrg       {
14263d1a8abSmrg 	unsigned int liw_op = UINTVAL (x);
14363d1a8abSmrg 
14463d1a8abSmrg 	gcc_assert (TARGET_ALLOW_LIW);
14563d1a8abSmrg 	gcc_assert (liw_op < LIW_OP_MAX);
14663d1a8abSmrg 	fputs (liw_op_names[liw_op], file);
14763d1a8abSmrg 	break;
14863d1a8abSmrg       }
14963d1a8abSmrg 
15063d1a8abSmrg     case 'b':
15163d1a8abSmrg     case 'B':
15263d1a8abSmrg       {
15363d1a8abSmrg 	enum rtx_code cmp = GET_CODE (x);
15463d1a8abSmrg 	machine_mode mode = GET_MODE (XEXP (x, 0));
15563d1a8abSmrg 	const char *str;
15663d1a8abSmrg 	int have_flags;
15763d1a8abSmrg 
15863d1a8abSmrg 	if (code == 'B')
15963d1a8abSmrg 	  cmp = reverse_condition (cmp);
16063d1a8abSmrg 	have_flags = cc_flags_for_mode (mode);
16163d1a8abSmrg 
16263d1a8abSmrg 	switch (cmp)
16363d1a8abSmrg 	  {
16463d1a8abSmrg 	  case NE:
16563d1a8abSmrg 	    str = "ne";
16663d1a8abSmrg 	    break;
16763d1a8abSmrg 	  case EQ:
16863d1a8abSmrg 	    str = "eq";
16963d1a8abSmrg 	    break;
17063d1a8abSmrg 	  case GE:
17163d1a8abSmrg 	    /* bge is smaller than bnc.  */
17263d1a8abSmrg 	    str = (have_flags & CC_FLAG_V ? "ge" : "nc");
17363d1a8abSmrg 	    break;
17463d1a8abSmrg 	  case LT:
17563d1a8abSmrg 	    str = (have_flags & CC_FLAG_V ? "lt" : "ns");
17663d1a8abSmrg 	    break;
17763d1a8abSmrg 	  case GT:
17863d1a8abSmrg 	    str = "gt";
17963d1a8abSmrg 	    break;
18063d1a8abSmrg 	  case LE:
18163d1a8abSmrg 	    str = "le";
18263d1a8abSmrg 	    break;
18363d1a8abSmrg 	  case GEU:
18463d1a8abSmrg 	    str = "cc";
18563d1a8abSmrg 	    break;
18663d1a8abSmrg 	  case GTU:
18763d1a8abSmrg 	    str = "hi";
18863d1a8abSmrg 	    break;
18963d1a8abSmrg 	  case LEU:
19063d1a8abSmrg 	    str = "ls";
19163d1a8abSmrg 	    break;
19263d1a8abSmrg 	  case LTU:
19363d1a8abSmrg 	    str = "cs";
19463d1a8abSmrg 	    break;
19563d1a8abSmrg 	  case ORDERED:
19663d1a8abSmrg 	    str = "lge";
19763d1a8abSmrg 	    break;
19863d1a8abSmrg 	  case UNORDERED:
19963d1a8abSmrg 	    str = "uo";
20063d1a8abSmrg 	    break;
20163d1a8abSmrg 	  case LTGT:
20263d1a8abSmrg 	    str = "lg";
20363d1a8abSmrg 	    break;
20463d1a8abSmrg 	  case UNEQ:
20563d1a8abSmrg 	    str = "ue";
20663d1a8abSmrg 	    break;
20763d1a8abSmrg 	  case UNGE:
20863d1a8abSmrg 	    str = "uge";
20963d1a8abSmrg 	    break;
21063d1a8abSmrg 	  case UNGT:
21163d1a8abSmrg 	    str = "ug";
21263d1a8abSmrg 	    break;
21363d1a8abSmrg 	  case UNLE:
21463d1a8abSmrg 	    str = "ule";
21563d1a8abSmrg 	    break;
21663d1a8abSmrg 	  case UNLT:
21763d1a8abSmrg 	    str = "ul";
21863d1a8abSmrg 	    break;
21963d1a8abSmrg 	  default:
22063d1a8abSmrg 	    gcc_unreachable ();
22163d1a8abSmrg 	  }
22263d1a8abSmrg 
22363d1a8abSmrg 	gcc_checking_assert ((cc_flags_for_code (cmp) & ~have_flags) == 0);
22463d1a8abSmrg 	fputs (str, file);
22563d1a8abSmrg       }
22663d1a8abSmrg       break;
22763d1a8abSmrg 
22863d1a8abSmrg     case 'C':
22963d1a8abSmrg       /* This is used for the operand to a call instruction;
23063d1a8abSmrg 	 if it's a REG, enclose it in parens, else output
23163d1a8abSmrg 	 the operand normally.  */
23263d1a8abSmrg       if (REG_P (x))
23363d1a8abSmrg 	{
23463d1a8abSmrg 	  fputc ('(', file);
23563d1a8abSmrg 	  mn10300_print_operand (file, x, 0);
23663d1a8abSmrg 	  fputc (')', file);
23763d1a8abSmrg 	}
23863d1a8abSmrg       else
23963d1a8abSmrg 	mn10300_print_operand (file, x, 0);
24063d1a8abSmrg       break;
24163d1a8abSmrg 
24263d1a8abSmrg     case 'D':
24363d1a8abSmrg       switch (GET_CODE (x))
24463d1a8abSmrg 	{
24563d1a8abSmrg 	case MEM:
24663d1a8abSmrg 	  fputc ('(', file);
24763d1a8abSmrg 	  output_address (GET_MODE (x), XEXP (x, 0));
24863d1a8abSmrg 	  fputc (')', file);
24963d1a8abSmrg 	  break;
25063d1a8abSmrg 
25163d1a8abSmrg 	case REG:
25263d1a8abSmrg 	  fprintf (file, "fd%d", REGNO (x) - 18);
25363d1a8abSmrg 	  break;
25463d1a8abSmrg 
25563d1a8abSmrg 	default:
25663d1a8abSmrg 	  gcc_unreachable ();
25763d1a8abSmrg 	}
25863d1a8abSmrg       break;
25963d1a8abSmrg 
26063d1a8abSmrg       /* These are the least significant word in a 64bit value.  */
26163d1a8abSmrg     case 'L':
26263d1a8abSmrg       switch (GET_CODE (x))
26363d1a8abSmrg 	{
26463d1a8abSmrg 	case MEM:
26563d1a8abSmrg 	  fputc ('(', file);
26663d1a8abSmrg 	  output_address (GET_MODE (x), XEXP (x, 0));
26763d1a8abSmrg 	  fputc (')', file);
26863d1a8abSmrg 	  break;
26963d1a8abSmrg 
27063d1a8abSmrg 	case REG:
27163d1a8abSmrg 	  fprintf (file, "%s", reg_names[REGNO (x)]);
27263d1a8abSmrg 	  break;
27363d1a8abSmrg 
27463d1a8abSmrg 	case SUBREG:
27563d1a8abSmrg 	  fprintf (file, "%s", reg_names[subreg_regno (x)]);
27663d1a8abSmrg 	  break;
27763d1a8abSmrg 
27863d1a8abSmrg 	case CONST_DOUBLE:
27963d1a8abSmrg 	  {
28063d1a8abSmrg 	    long val[2];
28163d1a8abSmrg 
28263d1a8abSmrg 	    switch (GET_MODE (x))
28363d1a8abSmrg 	      {
284c7a68eb7Smrg 	      case E_DFmode:
28563d1a8abSmrg 		REAL_VALUE_TO_TARGET_DOUBLE
28663d1a8abSmrg 		  (*CONST_DOUBLE_REAL_VALUE (x), val);
28763d1a8abSmrg 		fprintf (file, "0x%lx", val[0]);
28863d1a8abSmrg 		break;;
289c7a68eb7Smrg 	      case E_SFmode:
29063d1a8abSmrg 		REAL_VALUE_TO_TARGET_SINGLE
29163d1a8abSmrg 		  (*CONST_DOUBLE_REAL_VALUE (x), val[0]);
29263d1a8abSmrg 		fprintf (file, "0x%lx", val[0]);
29363d1a8abSmrg 		break;;
294c7a68eb7Smrg 	      case E_VOIDmode:
295c7a68eb7Smrg 	      case E_DImode:
29663d1a8abSmrg 		mn10300_print_operand_address (file,
29763d1a8abSmrg 					       GEN_INT (CONST_DOUBLE_LOW (x)));
29863d1a8abSmrg 		break;
29963d1a8abSmrg 	      default:
30063d1a8abSmrg 		break;
30163d1a8abSmrg 	      }
30263d1a8abSmrg 	    break;
30363d1a8abSmrg 	  }
30463d1a8abSmrg 
30563d1a8abSmrg 	case CONST_INT:
30663d1a8abSmrg 	  {
30763d1a8abSmrg 	    rtx low, high;
30863d1a8abSmrg 	    split_double (x, &low, &high);
30963d1a8abSmrg 	    fprintf (file, "%ld", (long)INTVAL (low));
31063d1a8abSmrg 	    break;
31163d1a8abSmrg 	    }
31263d1a8abSmrg 
31363d1a8abSmrg 	default:
31463d1a8abSmrg 	  gcc_unreachable ();
31563d1a8abSmrg 	}
31663d1a8abSmrg       break;
31763d1a8abSmrg 
31863d1a8abSmrg       /* Similarly, but for the most significant word.  */
31963d1a8abSmrg     case 'H':
32063d1a8abSmrg       switch (GET_CODE (x))
32163d1a8abSmrg 	{
32263d1a8abSmrg 	case MEM:
32363d1a8abSmrg 	  fputc ('(', file);
32463d1a8abSmrg 	  x = adjust_address (x, SImode, 4);
32563d1a8abSmrg 	  output_address (GET_MODE (x), XEXP (x, 0));
32663d1a8abSmrg 	  fputc (')', file);
32763d1a8abSmrg 	  break;
32863d1a8abSmrg 
32963d1a8abSmrg 	case REG:
33063d1a8abSmrg 	  fprintf (file, "%s", reg_names[REGNO (x) + 1]);
33163d1a8abSmrg 	  break;
33263d1a8abSmrg 
33363d1a8abSmrg 	case SUBREG:
33463d1a8abSmrg 	  fprintf (file, "%s", reg_names[subreg_regno (x) + 1]);
33563d1a8abSmrg 	  break;
33663d1a8abSmrg 
33763d1a8abSmrg 	case CONST_DOUBLE:
33863d1a8abSmrg 	  {
33963d1a8abSmrg 	    long val[2];
34063d1a8abSmrg 
34163d1a8abSmrg 	    switch (GET_MODE (x))
34263d1a8abSmrg 	      {
343c7a68eb7Smrg 	      case E_DFmode:
34463d1a8abSmrg 		REAL_VALUE_TO_TARGET_DOUBLE
34563d1a8abSmrg 		  (*CONST_DOUBLE_REAL_VALUE (x), val);
34663d1a8abSmrg 		fprintf (file, "0x%lx", val[1]);
34763d1a8abSmrg 		break;;
348c7a68eb7Smrg 	      case E_SFmode:
34963d1a8abSmrg 		gcc_unreachable ();
350c7a68eb7Smrg 	      case E_VOIDmode:
351c7a68eb7Smrg 	      case E_DImode:
35263d1a8abSmrg 		mn10300_print_operand_address (file,
35363d1a8abSmrg 					       GEN_INT (CONST_DOUBLE_HIGH (x)));
35463d1a8abSmrg 		break;
35563d1a8abSmrg 	      default:
35663d1a8abSmrg 		break;
35763d1a8abSmrg 	      }
35863d1a8abSmrg 	    break;
35963d1a8abSmrg 	  }
36063d1a8abSmrg 
36163d1a8abSmrg 	case CONST_INT:
36263d1a8abSmrg 	  {
36363d1a8abSmrg 	    rtx low, high;
36463d1a8abSmrg 	    split_double (x, &low, &high);
36563d1a8abSmrg 	    fprintf (file, "%ld", (long)INTVAL (high));
36663d1a8abSmrg 	    break;
36763d1a8abSmrg 	  }
36863d1a8abSmrg 
36963d1a8abSmrg 	default:
37063d1a8abSmrg 	  gcc_unreachable ();
37163d1a8abSmrg 	}
37263d1a8abSmrg       break;
37363d1a8abSmrg 
37463d1a8abSmrg     case 'A':
37563d1a8abSmrg       fputc ('(', file);
37663d1a8abSmrg       if (REG_P (XEXP (x, 0)))
37763d1a8abSmrg 	output_address (VOIDmode, gen_rtx_PLUS (SImode,
37863d1a8abSmrg 						XEXP (x, 0), const0_rtx));
37963d1a8abSmrg       else
38063d1a8abSmrg 	output_address (VOIDmode, XEXP (x, 0));
38163d1a8abSmrg       fputc (')', file);
38263d1a8abSmrg       break;
38363d1a8abSmrg 
38463d1a8abSmrg     case 'N':
38563d1a8abSmrg       gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
38663d1a8abSmrg       fprintf (file, "%d", (int)((~INTVAL (x)) & 0xff));
38763d1a8abSmrg       break;
38863d1a8abSmrg 
38963d1a8abSmrg     case 'U':
39063d1a8abSmrg       gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
39163d1a8abSmrg       fprintf (file, "%d", (int)(INTVAL (x) & 0xff));
39263d1a8abSmrg       break;
39363d1a8abSmrg 
39463d1a8abSmrg       /* For shift counts.  The hardware ignores the upper bits of
39563d1a8abSmrg 	 any immediate, but the assembler will flag an out of range
39663d1a8abSmrg 	 shift count as an error.  So we mask off the high bits
39763d1a8abSmrg 	 of the immediate here.  */
39863d1a8abSmrg     case 'S':
39963d1a8abSmrg       if (CONST_INT_P (x))
40063d1a8abSmrg 	{
40163d1a8abSmrg 	  fprintf (file, "%d", (int)(INTVAL (x) & 0x1f));
40263d1a8abSmrg 	  break;
40363d1a8abSmrg 	}
40463d1a8abSmrg       /* FALL THROUGH */
40563d1a8abSmrg 
40663d1a8abSmrg     default:
40763d1a8abSmrg       switch (GET_CODE (x))
40863d1a8abSmrg 	{
40963d1a8abSmrg 	case MEM:
41063d1a8abSmrg 	  fputc ('(', file);
41163d1a8abSmrg 	  output_address (GET_MODE (x), XEXP (x, 0));
41263d1a8abSmrg 	  fputc (')', file);
41363d1a8abSmrg 	  break;
41463d1a8abSmrg 
41563d1a8abSmrg 	case PLUS:
41663d1a8abSmrg 	  output_address (VOIDmode, x);
41763d1a8abSmrg 	  break;
41863d1a8abSmrg 
41963d1a8abSmrg 	case REG:
42063d1a8abSmrg 	  fprintf (file, "%s", reg_names[REGNO (x)]);
42163d1a8abSmrg 	  break;
42263d1a8abSmrg 
42363d1a8abSmrg 	case SUBREG:
42463d1a8abSmrg 	  fprintf (file, "%s", reg_names[subreg_regno (x)]);
42563d1a8abSmrg 	  break;
42663d1a8abSmrg 
42763d1a8abSmrg 	  /* This will only be single precision....  */
42863d1a8abSmrg 	case CONST_DOUBLE:
42963d1a8abSmrg 	  {
43063d1a8abSmrg 	    unsigned long val;
43163d1a8abSmrg 
43263d1a8abSmrg 	    REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), val);
43363d1a8abSmrg 	    fprintf (file, "0x%lx", val);
43463d1a8abSmrg 	    break;
43563d1a8abSmrg 	  }
43663d1a8abSmrg 
43763d1a8abSmrg 	case CONST_INT:
43863d1a8abSmrg 	case SYMBOL_REF:
43963d1a8abSmrg 	case CONST:
44063d1a8abSmrg 	case LABEL_REF:
44163d1a8abSmrg 	case CODE_LABEL:
44263d1a8abSmrg 	case UNSPEC:
44363d1a8abSmrg 	  mn10300_print_operand_address (file, x);
44463d1a8abSmrg 	  break;
44563d1a8abSmrg 	default:
44663d1a8abSmrg 	  gcc_unreachable ();
44763d1a8abSmrg 	}
44863d1a8abSmrg       break;
44963d1a8abSmrg     }
45063d1a8abSmrg }
45163d1a8abSmrg 
45263d1a8abSmrg /* Output assembly language output for the address ADDR to FILE.  */
45363d1a8abSmrg 
45463d1a8abSmrg void
mn10300_print_operand_address(FILE * file,rtx addr)45563d1a8abSmrg mn10300_print_operand_address (FILE *file, rtx addr)
45663d1a8abSmrg {
45763d1a8abSmrg   switch (GET_CODE (addr))
45863d1a8abSmrg     {
45963d1a8abSmrg     case POST_INC:
46063d1a8abSmrg       mn10300_print_operand (file, XEXP (addr, 0), 0);
46163d1a8abSmrg       fputc ('+', file);
46263d1a8abSmrg       break;
46363d1a8abSmrg 
46463d1a8abSmrg     case POST_MODIFY:
46563d1a8abSmrg       mn10300_print_operand (file, XEXP (addr, 0), 0);
46663d1a8abSmrg       fputc ('+', file);
46763d1a8abSmrg       fputc (',', file);
46863d1a8abSmrg       mn10300_print_operand (file, XEXP (addr, 1), 0);
46963d1a8abSmrg       break;
47063d1a8abSmrg 
47163d1a8abSmrg     case REG:
47263d1a8abSmrg       mn10300_print_operand (file, addr, 0);
47363d1a8abSmrg       break;
47463d1a8abSmrg     case PLUS:
47563d1a8abSmrg       {
47663d1a8abSmrg 	rtx base = XEXP (addr, 0);
47763d1a8abSmrg 	rtx index = XEXP (addr, 1);
47863d1a8abSmrg 
47963d1a8abSmrg 	if (REG_P (index) && !REG_OK_FOR_INDEX_P (index))
48063d1a8abSmrg 	  {
48163d1a8abSmrg 	    rtx x = base;
48263d1a8abSmrg 	    base = index;
48363d1a8abSmrg 	    index = x;
48463d1a8abSmrg 
48563d1a8abSmrg 	    gcc_assert (REG_P (index) && REG_OK_FOR_INDEX_P (index));
48663d1a8abSmrg 	  }
48763d1a8abSmrg 	gcc_assert (REG_OK_FOR_BASE_P (base));
48863d1a8abSmrg 
48963d1a8abSmrg 	mn10300_print_operand (file, index, 0);
49063d1a8abSmrg 	fputc (',', file);
49163d1a8abSmrg 	mn10300_print_operand (file, base, 0);
49263d1a8abSmrg 	break;
49363d1a8abSmrg       }
49463d1a8abSmrg     case SYMBOL_REF:
49563d1a8abSmrg       output_addr_const (file, addr);
49663d1a8abSmrg       break;
49763d1a8abSmrg     default:
49863d1a8abSmrg       output_addr_const (file, addr);
49963d1a8abSmrg       break;
50063d1a8abSmrg     }
50163d1a8abSmrg }
50263d1a8abSmrg 
50363d1a8abSmrg /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA.
50463d1a8abSmrg 
50563d1a8abSmrg    Used for PIC-specific UNSPECs.  */
50663d1a8abSmrg 
50763d1a8abSmrg static bool
mn10300_asm_output_addr_const_extra(FILE * file,rtx x)50863d1a8abSmrg mn10300_asm_output_addr_const_extra (FILE *file, rtx x)
50963d1a8abSmrg {
51063d1a8abSmrg   if (GET_CODE (x) == UNSPEC)
51163d1a8abSmrg     {
51263d1a8abSmrg       switch (XINT (x, 1))
51363d1a8abSmrg 	{
51463d1a8abSmrg 	case UNSPEC_PIC:
51563d1a8abSmrg 	  /* GLOBAL_OFFSET_TABLE or local symbols, no suffix.  */
51663d1a8abSmrg 	  output_addr_const (file, XVECEXP (x, 0, 0));
51763d1a8abSmrg 	  break;
51863d1a8abSmrg 	case UNSPEC_GOT:
51963d1a8abSmrg 	  output_addr_const (file, XVECEXP (x, 0, 0));
52063d1a8abSmrg 	  fputs ("@GOT", file);
52163d1a8abSmrg 	  break;
52263d1a8abSmrg 	case UNSPEC_GOTOFF:
52363d1a8abSmrg 	  output_addr_const (file, XVECEXP (x, 0, 0));
52463d1a8abSmrg 	  fputs ("@GOTOFF", file);
52563d1a8abSmrg 	  break;
52663d1a8abSmrg 	case UNSPEC_PLT:
52763d1a8abSmrg 	  output_addr_const (file, XVECEXP (x, 0, 0));
52863d1a8abSmrg 	  fputs ("@PLT", file);
52963d1a8abSmrg 	  break;
53063d1a8abSmrg 	case UNSPEC_GOTSYM_OFF:
53163d1a8abSmrg 	  assemble_name (file, GOT_SYMBOL_NAME);
53263d1a8abSmrg 	  fputs ("-(", file);
53363d1a8abSmrg 	  output_addr_const (file, XVECEXP (x, 0, 0));
53463d1a8abSmrg 	  fputs ("-.)", file);
53563d1a8abSmrg 	  break;
53663d1a8abSmrg 	default:
53763d1a8abSmrg 	  return false;
53863d1a8abSmrg 	}
53963d1a8abSmrg       return true;
54063d1a8abSmrg     }
54163d1a8abSmrg   else
54263d1a8abSmrg     return false;
54363d1a8abSmrg }
54463d1a8abSmrg 
54563d1a8abSmrg /* Count the number of FP registers that have to be saved.  */
54663d1a8abSmrg static int
fp_regs_to_save(void)54763d1a8abSmrg fp_regs_to_save (void)
54863d1a8abSmrg {
54963d1a8abSmrg   int i, n = 0;
55063d1a8abSmrg 
55163d1a8abSmrg   if (! TARGET_AM33_2)
55263d1a8abSmrg     return 0;
55363d1a8abSmrg 
55463d1a8abSmrg   for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
555*ec02198aSmrg     if (df_regs_ever_live_p (i) && ! call_used_regs[i])
55663d1a8abSmrg       ++n;
55763d1a8abSmrg 
55863d1a8abSmrg   return n;
55963d1a8abSmrg }
56063d1a8abSmrg 
56163d1a8abSmrg /* Print a set of registers in the format required by "movm" and "ret".
56263d1a8abSmrg    Register K is saved if bit K of MASK is set.  The data and address
56363d1a8abSmrg    registers can be stored individually, but the extended registers cannot.
56463d1a8abSmrg    We assume that the mask already takes that into account.  For instance,
56563d1a8abSmrg    bits 14 to 17 must have the same value.  */
56663d1a8abSmrg 
56763d1a8abSmrg void
mn10300_print_reg_list(FILE * file,int mask)56863d1a8abSmrg mn10300_print_reg_list (FILE *file, int mask)
56963d1a8abSmrg {
57063d1a8abSmrg   int need_comma;
57163d1a8abSmrg   int i;
57263d1a8abSmrg 
57363d1a8abSmrg   need_comma = 0;
57463d1a8abSmrg   fputc ('[', file);
57563d1a8abSmrg 
57663d1a8abSmrg   for (i = 0; i < FIRST_EXTENDED_REGNUM; i++)
57763d1a8abSmrg     if ((mask & (1 << i)) != 0)
57863d1a8abSmrg       {
57963d1a8abSmrg 	if (need_comma)
58063d1a8abSmrg 	  fputc (',', file);
58163d1a8abSmrg 	fputs (reg_names [i], file);
58263d1a8abSmrg 	need_comma = 1;
58363d1a8abSmrg       }
58463d1a8abSmrg 
58563d1a8abSmrg   if ((mask & 0x3c000) != 0)
58663d1a8abSmrg     {
58763d1a8abSmrg       gcc_assert ((mask & 0x3c000) == 0x3c000);
58863d1a8abSmrg       if (need_comma)
58963d1a8abSmrg 	fputc (',', file);
59063d1a8abSmrg       fputs ("exreg1", file);
59163d1a8abSmrg       need_comma = 1;
59263d1a8abSmrg     }
59363d1a8abSmrg 
59463d1a8abSmrg   fputc (']', file);
59563d1a8abSmrg }
59663d1a8abSmrg 
59763d1a8abSmrg /* If the MDR register is never clobbered, we can use the RETF instruction
59863d1a8abSmrg    which takes the address from the MDR register.  This is 3 cycles faster
59963d1a8abSmrg    than having to load the address from the stack.  */
60063d1a8abSmrg 
60163d1a8abSmrg bool
mn10300_can_use_retf_insn(void)60263d1a8abSmrg mn10300_can_use_retf_insn (void)
60363d1a8abSmrg {
60463d1a8abSmrg   /* Don't bother if we're not optimizing.  In this case we won't
60563d1a8abSmrg      have proper access to df_regs_ever_live_p.  */
60663d1a8abSmrg   if (!optimize)
60763d1a8abSmrg     return false;
60863d1a8abSmrg 
60963d1a8abSmrg   /* EH returns alter the saved return address; MDR is not current.  */
61063d1a8abSmrg   if (crtl->calls_eh_return)
61163d1a8abSmrg     return false;
61263d1a8abSmrg 
61363d1a8abSmrg   /* Obviously not if MDR is ever clobbered.  */
61463d1a8abSmrg   if (df_regs_ever_live_p (MDR_REG))
61563d1a8abSmrg     return false;
61663d1a8abSmrg 
61763d1a8abSmrg   /* ??? Careful not to use this during expand_epilogue etc.  */
61863d1a8abSmrg   gcc_assert (!in_sequence_p ());
61963d1a8abSmrg   return leaf_function_p ();
62063d1a8abSmrg }
62163d1a8abSmrg 
62263d1a8abSmrg bool
mn10300_can_use_rets_insn(void)62363d1a8abSmrg mn10300_can_use_rets_insn (void)
62463d1a8abSmrg {
62563d1a8abSmrg   return !mn10300_initial_offset (ARG_POINTER_REGNUM, STACK_POINTER_REGNUM);
62663d1a8abSmrg }
62763d1a8abSmrg 
62863d1a8abSmrg /* Returns the set of live, callee-saved registers as a bitmask.  The
62963d1a8abSmrg    callee-saved extended registers cannot be stored individually, so
63063d1a8abSmrg    all of them will be included in the mask if any one of them is used.
63163d1a8abSmrg    Also returns the number of bytes in the registers in the mask if
63263d1a8abSmrg    BYTES_SAVED is not NULL.  */
63363d1a8abSmrg 
63463d1a8abSmrg unsigned int
mn10300_get_live_callee_saved_regs(unsigned int * bytes_saved)63563d1a8abSmrg mn10300_get_live_callee_saved_regs (unsigned int * bytes_saved)
63663d1a8abSmrg {
63763d1a8abSmrg   int mask;
63863d1a8abSmrg   int i;
63963d1a8abSmrg   unsigned int count;
64063d1a8abSmrg 
64163d1a8abSmrg   count = mask = 0;
64263d1a8abSmrg   for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
643*ec02198aSmrg     if (df_regs_ever_live_p (i) && ! call_used_regs[i])
64463d1a8abSmrg       {
64563d1a8abSmrg 	mask |= (1 << i);
64663d1a8abSmrg 	++ count;
64763d1a8abSmrg       }
64863d1a8abSmrg 
64963d1a8abSmrg   if ((mask & 0x3c000) != 0)
65063d1a8abSmrg     {
65163d1a8abSmrg       for (i = 0x04000; i < 0x40000; i <<= 1)
65263d1a8abSmrg 	if ((mask & i) == 0)
65363d1a8abSmrg 	  ++ count;
65463d1a8abSmrg 
65563d1a8abSmrg       mask |= 0x3c000;
65663d1a8abSmrg     }
65763d1a8abSmrg 
65863d1a8abSmrg   if (bytes_saved)
65963d1a8abSmrg     * bytes_saved = count * UNITS_PER_WORD;
66063d1a8abSmrg 
66163d1a8abSmrg   return mask;
66263d1a8abSmrg }
66363d1a8abSmrg 
66463d1a8abSmrg static rtx
F(rtx r)66563d1a8abSmrg F (rtx r)
66663d1a8abSmrg {
66763d1a8abSmrg   RTX_FRAME_RELATED_P (r) = 1;
66863d1a8abSmrg   return r;
66963d1a8abSmrg }
67063d1a8abSmrg 
67163d1a8abSmrg /* Generate an instruction that pushes several registers onto the stack.
67263d1a8abSmrg    Register K will be saved if bit K in MASK is set.  The function does
67363d1a8abSmrg    nothing if MASK is zero.
67463d1a8abSmrg 
67563d1a8abSmrg    To be compatible with the "movm" instruction, the lowest-numbered
67663d1a8abSmrg    register must be stored in the lowest slot.  If MASK is the set
67763d1a8abSmrg    { R1,...,RN }, where R1...RN are ordered least first, the generated
67863d1a8abSmrg    instruction will have the form:
67963d1a8abSmrg 
68063d1a8abSmrg        (parallel
68163d1a8abSmrg          (set (reg:SI 9) (plus:SI (reg:SI 9) (const_int -N*4)))
68263d1a8abSmrg 	 (set (mem:SI (plus:SI (reg:SI 9)
68363d1a8abSmrg 	                       (const_int -1*4)))
68463d1a8abSmrg 	      (reg:SI RN))
68563d1a8abSmrg 	 ...
68663d1a8abSmrg 	 (set (mem:SI (plus:SI (reg:SI 9)
68763d1a8abSmrg 	                       (const_int -N*4)))
68863d1a8abSmrg 	      (reg:SI R1))) */
68963d1a8abSmrg 
69063d1a8abSmrg static void
mn10300_gen_multiple_store(unsigned int mask)69163d1a8abSmrg mn10300_gen_multiple_store (unsigned int mask)
69263d1a8abSmrg {
69363d1a8abSmrg   /* The order in which registers are stored, from SP-4 through SP-N*4.  */
69463d1a8abSmrg   static const unsigned int store_order[8] = {
69563d1a8abSmrg     /* e2, e3: never saved */
69663d1a8abSmrg     FIRST_EXTENDED_REGNUM + 4,
69763d1a8abSmrg     FIRST_EXTENDED_REGNUM + 5,
69863d1a8abSmrg     FIRST_EXTENDED_REGNUM + 6,
69963d1a8abSmrg     FIRST_EXTENDED_REGNUM + 7,
70063d1a8abSmrg     /* e0, e1, mdrq, mcrh, mcrl, mcvf: never saved. */
70163d1a8abSmrg     FIRST_DATA_REGNUM + 2,
70263d1a8abSmrg     FIRST_DATA_REGNUM + 3,
70363d1a8abSmrg     FIRST_ADDRESS_REGNUM + 2,
70463d1a8abSmrg     FIRST_ADDRESS_REGNUM + 3,
70563d1a8abSmrg     /* d0, d1, a0, a1, mdr, lir, lar: never saved.  */
70663d1a8abSmrg   };
70763d1a8abSmrg 
70863d1a8abSmrg   rtx x, elts[9];
70963d1a8abSmrg   unsigned int i;
71063d1a8abSmrg   int count;
71163d1a8abSmrg 
71263d1a8abSmrg   if (mask == 0)
71363d1a8abSmrg     return;
71463d1a8abSmrg 
71563d1a8abSmrg   for (i = count = 0; i < ARRAY_SIZE(store_order); ++i)
71663d1a8abSmrg     {
71763d1a8abSmrg       unsigned regno = store_order[i];
71863d1a8abSmrg 
71963d1a8abSmrg       if (((mask >> regno) & 1) == 0)
72063d1a8abSmrg 	continue;
72163d1a8abSmrg 
72263d1a8abSmrg       ++count;
72363d1a8abSmrg       x = plus_constant (Pmode, stack_pointer_rtx, count * -4);
72463d1a8abSmrg       x = gen_frame_mem (SImode, x);
72563d1a8abSmrg       x = gen_rtx_SET (x, gen_rtx_REG (SImode, regno));
72663d1a8abSmrg       elts[count] = F(x);
72763d1a8abSmrg 
72863d1a8abSmrg       /* Remove the register from the mask so that... */
72963d1a8abSmrg       mask &= ~(1u << regno);
73063d1a8abSmrg     }
73163d1a8abSmrg 
73263d1a8abSmrg   /* ... we can make sure that we didn't try to use a register
73363d1a8abSmrg      not listed in the store order.  */
73463d1a8abSmrg   gcc_assert (mask == 0);
73563d1a8abSmrg 
73663d1a8abSmrg   /* Create the instruction that updates the stack pointer.  */
73763d1a8abSmrg   x = plus_constant (Pmode, stack_pointer_rtx, count * -4);
73863d1a8abSmrg   x = gen_rtx_SET (stack_pointer_rtx, x);
73963d1a8abSmrg   elts[0] = F(x);
74063d1a8abSmrg 
74163d1a8abSmrg   /* We need one PARALLEL element to update the stack pointer and
74263d1a8abSmrg      an additional element for each register that is stored.  */
74363d1a8abSmrg   x = gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (count + 1, elts));
74463d1a8abSmrg   F (emit_insn (x));
74563d1a8abSmrg }
74663d1a8abSmrg 
74763d1a8abSmrg static inline unsigned int
popcount(unsigned int mask)74863d1a8abSmrg popcount (unsigned int mask)
74963d1a8abSmrg {
75063d1a8abSmrg   unsigned int count = 0;
75163d1a8abSmrg 
75263d1a8abSmrg   while (mask)
75363d1a8abSmrg     {
75463d1a8abSmrg       ++ count;
75563d1a8abSmrg       mask &= ~ (mask & - mask);
75663d1a8abSmrg     }
75763d1a8abSmrg   return count;
75863d1a8abSmrg }
75963d1a8abSmrg 
76063d1a8abSmrg void
mn10300_expand_prologue(void)76163d1a8abSmrg mn10300_expand_prologue (void)
76263d1a8abSmrg {
76363d1a8abSmrg   HOST_WIDE_INT size = mn10300_frame_size ();
76463d1a8abSmrg   unsigned int mask;
76563d1a8abSmrg 
76663d1a8abSmrg   mask = mn10300_get_live_callee_saved_regs (NULL);
76763d1a8abSmrg   /* If we use any of the callee-saved registers, save them now.  */
76863d1a8abSmrg   mn10300_gen_multiple_store (mask);
76963d1a8abSmrg 
77063d1a8abSmrg   if (flag_stack_usage_info)
77163d1a8abSmrg     current_function_static_stack_size = size + popcount (mask) * 4;
77263d1a8abSmrg 
77363d1a8abSmrg   if (TARGET_AM33_2 && fp_regs_to_save ())
77463d1a8abSmrg     {
77563d1a8abSmrg       int num_regs_to_save = fp_regs_to_save (), i;
77663d1a8abSmrg       HOST_WIDE_INT xsize;
77763d1a8abSmrg       enum
77863d1a8abSmrg       {
77963d1a8abSmrg 	save_sp_merge,
78063d1a8abSmrg 	save_sp_no_merge,
78163d1a8abSmrg 	save_sp_partial_merge,
78263d1a8abSmrg 	save_a0_merge,
78363d1a8abSmrg 	save_a0_no_merge
78463d1a8abSmrg       } strategy;
78563d1a8abSmrg       unsigned int strategy_size = (unsigned)-1, this_strategy_size;
78663d1a8abSmrg       rtx reg;
78763d1a8abSmrg 
78863d1a8abSmrg       if (flag_stack_usage_info)
78963d1a8abSmrg 	current_function_static_stack_size += num_regs_to_save * 4;
79063d1a8abSmrg 
79163d1a8abSmrg       /* We have several different strategies to save FP registers.
79263d1a8abSmrg 	 We can store them using SP offsets, which is beneficial if
79363d1a8abSmrg 	 there are just a few registers to save, or we can use `a0' in
79463d1a8abSmrg 	 post-increment mode (`a0' is the only call-clobbered address
79563d1a8abSmrg 	 register that is never used to pass information to a
79663d1a8abSmrg 	 function).  Furthermore, if we don't need a frame pointer, we
79763d1a8abSmrg 	 can merge the two SP adds into a single one, but this isn't
79863d1a8abSmrg 	 always beneficial; sometimes we can just split the two adds
79963d1a8abSmrg 	 so that we don't exceed a 16-bit constant size.  The code
80063d1a8abSmrg 	 below will select which strategy to use, so as to generate
80163d1a8abSmrg 	 smallest code.  Ties are broken in favor or shorter sequences
80263d1a8abSmrg 	 (in terms of number of instructions).  */
80363d1a8abSmrg 
80463d1a8abSmrg #define SIZE_ADD_AX(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
80563d1a8abSmrg 			: (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 2)
80663d1a8abSmrg #define SIZE_ADD_SP(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
80763d1a8abSmrg 			: (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 3)
80863d1a8abSmrg 
80963d1a8abSmrg /* We add 0 * (S) in two places to promote to the type of S,
81063d1a8abSmrg    so that all arms of the conditional have the same type.  */
81163d1a8abSmrg #define SIZE_FMOV_LIMIT(S,N,L,SIZE1,SIZE2,ELSE) \
81263d1a8abSmrg   (((S) >= (L)) ? 0 * (S) + (SIZE1) * (N) \
81363d1a8abSmrg    : ((S) + 4 * (N) >= (L)) ? (((L) - (S)) / 4 * (SIZE2) \
81463d1a8abSmrg 			       + ((S) + 4 * (N) - (L)) / 4 * (SIZE1)) \
81563d1a8abSmrg    : 0 * (S) + (ELSE))
81663d1a8abSmrg #define SIZE_FMOV_SP_(S,N) \
81763d1a8abSmrg   (SIZE_FMOV_LIMIT ((S), (N), (1 << 24), 7, 6, \
81863d1a8abSmrg                    SIZE_FMOV_LIMIT ((S), (N), (1 << 8), 6, 4, \
81963d1a8abSmrg 				    (S) ? 4 * (N) : 3 + 4 * ((N) - 1))))
82063d1a8abSmrg #define SIZE_FMOV_SP(S,N) (SIZE_FMOV_SP_ ((unsigned HOST_WIDE_INT)(S), (N)))
82163d1a8abSmrg 
82263d1a8abSmrg       /* Consider alternative save_sp_merge only if we don't need the
82363d1a8abSmrg 	 frame pointer and size is nonzero.  */
82463d1a8abSmrg       if (! frame_pointer_needed && size)
82563d1a8abSmrg 	{
82663d1a8abSmrg 	  /* Insn: add -(size + 4 * num_regs_to_save), sp.  */
82763d1a8abSmrg 	  this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
82863d1a8abSmrg 	  /* Insn: fmov fs#, (##, sp), for each fs# to be saved.  */
82963d1a8abSmrg 	  this_strategy_size += SIZE_FMOV_SP (size, num_regs_to_save);
83063d1a8abSmrg 
83163d1a8abSmrg 	  if (this_strategy_size < strategy_size)
83263d1a8abSmrg 	    {
83363d1a8abSmrg 	      strategy = save_sp_merge;
83463d1a8abSmrg 	      strategy_size = this_strategy_size;
83563d1a8abSmrg 	    }
83663d1a8abSmrg 	}
83763d1a8abSmrg 
83863d1a8abSmrg       /* Consider alternative save_sp_no_merge unconditionally.  */
83963d1a8abSmrg       /* Insn: add -4 * num_regs_to_save, sp.  */
84063d1a8abSmrg       this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
84163d1a8abSmrg       /* Insn: fmov fs#, (##, sp), for each fs# to be saved.  */
84263d1a8abSmrg       this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
84363d1a8abSmrg       if (size)
84463d1a8abSmrg 	{
84563d1a8abSmrg 	  /* Insn: add -size, sp.  */
84663d1a8abSmrg 	  this_strategy_size += SIZE_ADD_SP (-size);
84763d1a8abSmrg 	}
84863d1a8abSmrg 
84963d1a8abSmrg       if (this_strategy_size < strategy_size)
85063d1a8abSmrg 	{
85163d1a8abSmrg 	  strategy = save_sp_no_merge;
85263d1a8abSmrg 	  strategy_size = this_strategy_size;
85363d1a8abSmrg 	}
85463d1a8abSmrg 
85563d1a8abSmrg       /* Consider alternative save_sp_partial_merge only if we don't
85663d1a8abSmrg 	 need a frame pointer and size is reasonably large.  */
85763d1a8abSmrg       if (! frame_pointer_needed && size + 4 * num_regs_to_save > 128)
85863d1a8abSmrg 	{
85963d1a8abSmrg 	  /* Insn: add -128, sp.  */
86063d1a8abSmrg 	  this_strategy_size = SIZE_ADD_SP (-128);
86163d1a8abSmrg 	  /* Insn: fmov fs#, (##, sp), for each fs# to be saved.  */
86263d1a8abSmrg 	  this_strategy_size += SIZE_FMOV_SP (128 - 4 * num_regs_to_save,
86363d1a8abSmrg 					      num_regs_to_save);
86463d1a8abSmrg 	  if (size)
86563d1a8abSmrg 	    {
86663d1a8abSmrg 	      /* Insn: add 128-size, sp.  */
86763d1a8abSmrg 	      this_strategy_size += SIZE_ADD_SP (128 - size);
86863d1a8abSmrg 	    }
86963d1a8abSmrg 
87063d1a8abSmrg 	  if (this_strategy_size < strategy_size)
87163d1a8abSmrg 	    {
87263d1a8abSmrg 	      strategy = save_sp_partial_merge;
87363d1a8abSmrg 	      strategy_size = this_strategy_size;
87463d1a8abSmrg 	    }
87563d1a8abSmrg 	}
87663d1a8abSmrg 
87763d1a8abSmrg       /* Consider alternative save_a0_merge only if we don't need a
87863d1a8abSmrg 	 frame pointer, size is nonzero and the user hasn't
87963d1a8abSmrg 	 changed the calling conventions of a0.  */
88063d1a8abSmrg       if (! frame_pointer_needed && size
881*ec02198aSmrg 	  && call_used_regs[FIRST_ADDRESS_REGNUM]
88263d1a8abSmrg 	  && ! fixed_regs[FIRST_ADDRESS_REGNUM])
88363d1a8abSmrg 	{
88463d1a8abSmrg 	  /* Insn: add -(size + 4 * num_regs_to_save), sp.  */
88563d1a8abSmrg 	  this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
88663d1a8abSmrg 	  /* Insn: mov sp, a0.  */
88763d1a8abSmrg 	  this_strategy_size++;
88863d1a8abSmrg 	  if (size)
88963d1a8abSmrg 	    {
89063d1a8abSmrg 	      /* Insn: add size, a0.  */
89163d1a8abSmrg 	      this_strategy_size += SIZE_ADD_AX (size);
89263d1a8abSmrg 	    }
89363d1a8abSmrg 	  /* Insn: fmov fs#, (a0+), for each fs# to be saved.  */
89463d1a8abSmrg 	  this_strategy_size += 3 * num_regs_to_save;
89563d1a8abSmrg 
89663d1a8abSmrg 	  if (this_strategy_size < strategy_size)
89763d1a8abSmrg 	    {
89863d1a8abSmrg 	      strategy = save_a0_merge;
89963d1a8abSmrg 	      strategy_size = this_strategy_size;
90063d1a8abSmrg 	    }
90163d1a8abSmrg 	}
90263d1a8abSmrg 
90363d1a8abSmrg       /* Consider alternative save_a0_no_merge if the user hasn't
90463d1a8abSmrg 	 changed the calling conventions of a0.  */
905*ec02198aSmrg       if (call_used_regs[FIRST_ADDRESS_REGNUM]
90663d1a8abSmrg 	  && ! fixed_regs[FIRST_ADDRESS_REGNUM])
90763d1a8abSmrg 	{
90863d1a8abSmrg 	  /* Insn: add -4 * num_regs_to_save, sp.  */
90963d1a8abSmrg 	  this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
91063d1a8abSmrg 	  /* Insn: mov sp, a0.  */
91163d1a8abSmrg 	  this_strategy_size++;
91263d1a8abSmrg 	  /* Insn: fmov fs#, (a0+), for each fs# to be saved.  */
91363d1a8abSmrg 	  this_strategy_size += 3 * num_regs_to_save;
91463d1a8abSmrg 	  if (size)
91563d1a8abSmrg 	    {
91663d1a8abSmrg 	      /* Insn: add -size, sp.  */
91763d1a8abSmrg 	      this_strategy_size += SIZE_ADD_SP (-size);
91863d1a8abSmrg 	    }
91963d1a8abSmrg 
92063d1a8abSmrg 	  if (this_strategy_size < strategy_size)
92163d1a8abSmrg 	    {
92263d1a8abSmrg 	      strategy = save_a0_no_merge;
92363d1a8abSmrg 	      strategy_size = this_strategy_size;
92463d1a8abSmrg 	    }
92563d1a8abSmrg 	}
92663d1a8abSmrg 
92763d1a8abSmrg       /* Emit the initial SP add, common to all strategies.  */
92863d1a8abSmrg       switch (strategy)
92963d1a8abSmrg 	{
93063d1a8abSmrg 	case save_sp_no_merge:
93163d1a8abSmrg 	case save_a0_no_merge:
93263d1a8abSmrg 	  F (emit_insn (gen_addsi3 (stack_pointer_rtx,
93363d1a8abSmrg 				    stack_pointer_rtx,
93463d1a8abSmrg 				    GEN_INT (-4 * num_regs_to_save))));
93563d1a8abSmrg 	  xsize = 0;
93663d1a8abSmrg 	  break;
93763d1a8abSmrg 
93863d1a8abSmrg 	case save_sp_partial_merge:
93963d1a8abSmrg 	  F (emit_insn (gen_addsi3 (stack_pointer_rtx,
94063d1a8abSmrg 				    stack_pointer_rtx,
94163d1a8abSmrg 				    GEN_INT (-128))));
94263d1a8abSmrg 	  xsize = 128 - 4 * num_regs_to_save;
94363d1a8abSmrg 	  size -= xsize;
94463d1a8abSmrg 	  break;
94563d1a8abSmrg 
94663d1a8abSmrg 	case save_sp_merge:
94763d1a8abSmrg 	case save_a0_merge:
94863d1a8abSmrg 	  F (emit_insn (gen_addsi3 (stack_pointer_rtx,
94963d1a8abSmrg 				    stack_pointer_rtx,
95063d1a8abSmrg 				    GEN_INT (-(size + 4 * num_regs_to_save)))));
95163d1a8abSmrg 	  /* We'll have to adjust FP register saves according to the
95263d1a8abSmrg 	     frame size.  */
95363d1a8abSmrg 	  xsize = size;
95463d1a8abSmrg 	  /* Since we've already created the stack frame, don't do it
95563d1a8abSmrg 	     again at the end of the function.  */
95663d1a8abSmrg 	  size = 0;
95763d1a8abSmrg 	  break;
95863d1a8abSmrg 
95963d1a8abSmrg 	default:
96063d1a8abSmrg 	  gcc_unreachable ();
96163d1a8abSmrg 	}
96263d1a8abSmrg 
96363d1a8abSmrg       /* Now prepare register a0, if we have decided to use it.  */
96463d1a8abSmrg       switch (strategy)
96563d1a8abSmrg 	{
96663d1a8abSmrg 	case save_sp_merge:
96763d1a8abSmrg 	case save_sp_no_merge:
96863d1a8abSmrg 	case save_sp_partial_merge:
96963d1a8abSmrg 	  reg = 0;
97063d1a8abSmrg 	  break;
97163d1a8abSmrg 
97263d1a8abSmrg 	case save_a0_merge:
97363d1a8abSmrg 	case save_a0_no_merge:
97463d1a8abSmrg 	  reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM);
97563d1a8abSmrg 	  F (emit_insn (gen_movsi (reg, stack_pointer_rtx)));
97663d1a8abSmrg 	  if (xsize)
97763d1a8abSmrg 	    F (emit_insn (gen_addsi3 (reg, reg, GEN_INT (xsize))));
97863d1a8abSmrg 	  reg = gen_rtx_POST_INC (SImode, reg);
97963d1a8abSmrg 	  break;
98063d1a8abSmrg 
98163d1a8abSmrg 	default:
98263d1a8abSmrg 	  gcc_unreachable ();
98363d1a8abSmrg 	}
98463d1a8abSmrg 
98563d1a8abSmrg       /* Now actually save the FP registers.  */
98663d1a8abSmrg       for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
987*ec02198aSmrg 	if (df_regs_ever_live_p (i) && ! call_used_regs[i])
98863d1a8abSmrg 	  {
98963d1a8abSmrg 	    rtx addr;
99063d1a8abSmrg 
99163d1a8abSmrg 	    if (reg)
99263d1a8abSmrg 	      addr = reg;
99363d1a8abSmrg 	    else
99463d1a8abSmrg 	      {
99563d1a8abSmrg 		/* If we aren't using `a0', use an SP offset.  */
99663d1a8abSmrg 		if (xsize)
99763d1a8abSmrg 		  {
99863d1a8abSmrg 		    addr = gen_rtx_PLUS (SImode,
99963d1a8abSmrg 					 stack_pointer_rtx,
100063d1a8abSmrg 					 GEN_INT (xsize));
100163d1a8abSmrg 		  }
100263d1a8abSmrg 		else
100363d1a8abSmrg 		  addr = stack_pointer_rtx;
100463d1a8abSmrg 
100563d1a8abSmrg 		xsize += 4;
100663d1a8abSmrg 	      }
100763d1a8abSmrg 
100863d1a8abSmrg 	    F (emit_insn (gen_movsf (gen_rtx_MEM (SFmode, addr),
100963d1a8abSmrg 				     gen_rtx_REG (SFmode, i))));
101063d1a8abSmrg 	  }
101163d1a8abSmrg     }
101263d1a8abSmrg 
101363d1a8abSmrg   /* Now put the frame pointer into the frame pointer register.  */
101463d1a8abSmrg   if (frame_pointer_needed)
101563d1a8abSmrg     F (emit_move_insn (frame_pointer_rtx, stack_pointer_rtx));
101663d1a8abSmrg 
101763d1a8abSmrg   /* Allocate stack for this frame.  */
101863d1a8abSmrg   if (size)
101963d1a8abSmrg     F (emit_insn (gen_addsi3 (stack_pointer_rtx,
102063d1a8abSmrg 			      stack_pointer_rtx,
102163d1a8abSmrg 			      GEN_INT (-size))));
102263d1a8abSmrg 
102363d1a8abSmrg   if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
102463d1a8abSmrg     emit_insn (gen_load_pic ());
102563d1a8abSmrg }
102663d1a8abSmrg 
102763d1a8abSmrg void
mn10300_expand_epilogue(void)102863d1a8abSmrg mn10300_expand_epilogue (void)
102963d1a8abSmrg {
103063d1a8abSmrg   HOST_WIDE_INT size = mn10300_frame_size ();
103163d1a8abSmrg   unsigned int reg_save_bytes;
103263d1a8abSmrg 
103363d1a8abSmrg   mn10300_get_live_callee_saved_regs (& reg_save_bytes);
103463d1a8abSmrg 
103563d1a8abSmrg   if (TARGET_AM33_2 && fp_regs_to_save ())
103663d1a8abSmrg     {
103763d1a8abSmrg       int num_regs_to_save = fp_regs_to_save (), i;
103863d1a8abSmrg       rtx reg = 0;
103963d1a8abSmrg 
104063d1a8abSmrg       /* We have several options to restore FP registers.  We could
104163d1a8abSmrg 	 load them from SP offsets, but, if there are enough FP
104263d1a8abSmrg 	 registers to restore, we win if we use a post-increment
104363d1a8abSmrg 	 addressing mode.  */
104463d1a8abSmrg 
104563d1a8abSmrg       /* If we have a frame pointer, it's the best option, because we
104663d1a8abSmrg 	 already know it has the value we want.  */
104763d1a8abSmrg       if (frame_pointer_needed)
104863d1a8abSmrg 	reg = gen_rtx_REG (SImode, FRAME_POINTER_REGNUM);
104963d1a8abSmrg       /* Otherwise, we may use `a1', since it's call-clobbered and
105063d1a8abSmrg 	 it's never used for return values.  But only do so if it's
105163d1a8abSmrg 	 smaller than using SP offsets.  */
105263d1a8abSmrg       else
105363d1a8abSmrg 	{
105463d1a8abSmrg 	  enum { restore_sp_post_adjust,
105563d1a8abSmrg 		 restore_sp_pre_adjust,
105663d1a8abSmrg 		 restore_sp_partial_adjust,
105763d1a8abSmrg 		 restore_a1 } strategy;
105863d1a8abSmrg 	  unsigned int this_strategy_size, strategy_size = (unsigned)-1;
105963d1a8abSmrg 
106063d1a8abSmrg 	  /* Consider using sp offsets before adjusting sp.  */
106163d1a8abSmrg 	  /* Insn: fmov (##,sp),fs#, for each fs# to be restored.  */
106263d1a8abSmrg 	  this_strategy_size = SIZE_FMOV_SP (size, num_regs_to_save);
106363d1a8abSmrg 	  /* If size is too large, we'll have to adjust SP with an
106463d1a8abSmrg 		 add.  */
106563d1a8abSmrg 	  if (size + 4 * num_regs_to_save + reg_save_bytes > 255)
106663d1a8abSmrg 	    {
106763d1a8abSmrg 	      /* Insn: add size + 4 * num_regs_to_save, sp.  */
106863d1a8abSmrg 	      this_strategy_size += SIZE_ADD_SP (size + 4 * num_regs_to_save);
106963d1a8abSmrg 	    }
107063d1a8abSmrg 	  /* If we don't have to restore any non-FP registers,
107163d1a8abSmrg 		 we'll be able to save one byte by using rets.  */
107263d1a8abSmrg 	  if (! reg_save_bytes)
107363d1a8abSmrg 	    this_strategy_size--;
107463d1a8abSmrg 
107563d1a8abSmrg 	  if (this_strategy_size < strategy_size)
107663d1a8abSmrg 	    {
107763d1a8abSmrg 	      strategy = restore_sp_post_adjust;
107863d1a8abSmrg 	      strategy_size = this_strategy_size;
107963d1a8abSmrg 	    }
108063d1a8abSmrg 
108163d1a8abSmrg 	  /* Consider using sp offsets after adjusting sp.  */
108263d1a8abSmrg 	  /* Insn: add size, sp.  */
108363d1a8abSmrg 	  this_strategy_size = SIZE_ADD_SP (size);
108463d1a8abSmrg 	  /* Insn: fmov (##,sp),fs#, for each fs# to be restored.  */
108563d1a8abSmrg 	  this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
108663d1a8abSmrg 	  /* We're going to use ret to release the FP registers
108763d1a8abSmrg 		 save area, so, no savings.  */
108863d1a8abSmrg 
108963d1a8abSmrg 	  if (this_strategy_size < strategy_size)
109063d1a8abSmrg 	    {
109163d1a8abSmrg 	      strategy = restore_sp_pre_adjust;
109263d1a8abSmrg 	      strategy_size = this_strategy_size;
109363d1a8abSmrg 	    }
109463d1a8abSmrg 
109563d1a8abSmrg 	  /* Consider using sp offsets after partially adjusting sp.
109663d1a8abSmrg 	     When size is close to 32Kb, we may be able to adjust SP
109763d1a8abSmrg 	     with an imm16 add instruction while still using fmov
109863d1a8abSmrg 	     (d8,sp).  */
109963d1a8abSmrg 	  if (size + 4 * num_regs_to_save + reg_save_bytes > 255)
110063d1a8abSmrg 	    {
110163d1a8abSmrg 	      /* Insn: add size + 4 * num_regs_to_save
110263d1a8abSmrg 				+ reg_save_bytes - 252,sp.  */
110363d1a8abSmrg 	      this_strategy_size = SIZE_ADD_SP (size + 4 * num_regs_to_save
110463d1a8abSmrg 						+ (int) reg_save_bytes - 252);
110563d1a8abSmrg 	      /* Insn: fmov (##,sp),fs#, fo each fs# to be restored.  */
110663d1a8abSmrg 	      this_strategy_size += SIZE_FMOV_SP (252 - reg_save_bytes
110763d1a8abSmrg 						  - 4 * num_regs_to_save,
110863d1a8abSmrg 						  num_regs_to_save);
110963d1a8abSmrg 	      /* We're going to use ret to release the FP registers
111063d1a8abSmrg 		 save area, so, no savings.  */
111163d1a8abSmrg 
111263d1a8abSmrg 	      if (this_strategy_size < strategy_size)
111363d1a8abSmrg 		{
111463d1a8abSmrg 		  strategy = restore_sp_partial_adjust;
111563d1a8abSmrg 		  strategy_size = this_strategy_size;
111663d1a8abSmrg 		}
111763d1a8abSmrg 	    }
111863d1a8abSmrg 
111963d1a8abSmrg 	  /* Consider using a1 in post-increment mode, as long as the
112063d1a8abSmrg 	     user hasn't changed the calling conventions of a1.  */
1121*ec02198aSmrg 	  if (call_used_regs[FIRST_ADDRESS_REGNUM + 1]
112263d1a8abSmrg 	      && ! fixed_regs[FIRST_ADDRESS_REGNUM+1])
112363d1a8abSmrg 	    {
112463d1a8abSmrg 	      /* Insn: mov sp,a1.  */
112563d1a8abSmrg 	      this_strategy_size = 1;
112663d1a8abSmrg 	      if (size)
112763d1a8abSmrg 		{
112863d1a8abSmrg 		  /* Insn: add size,a1.  */
112963d1a8abSmrg 		  this_strategy_size += SIZE_ADD_AX (size);
113063d1a8abSmrg 		}
113163d1a8abSmrg 	      /* Insn: fmov (a1+),fs#, for each fs# to be restored.  */
113263d1a8abSmrg 	      this_strategy_size += 3 * num_regs_to_save;
113363d1a8abSmrg 	      /* If size is large enough, we may be able to save a
113463d1a8abSmrg 		 couple of bytes.  */
113563d1a8abSmrg 	      if (size + 4 * num_regs_to_save + reg_save_bytes > 255)
113663d1a8abSmrg 		{
113763d1a8abSmrg 		  /* Insn: mov a1,sp.  */
113863d1a8abSmrg 		  this_strategy_size += 2;
113963d1a8abSmrg 		}
114063d1a8abSmrg 	      /* If we don't have to restore any non-FP registers,
114163d1a8abSmrg 		 we'll be able to save one byte by using rets.  */
114263d1a8abSmrg 	      if (! reg_save_bytes)
114363d1a8abSmrg 		this_strategy_size--;
114463d1a8abSmrg 
114563d1a8abSmrg 	      if (this_strategy_size < strategy_size)
114663d1a8abSmrg 		{
114763d1a8abSmrg 		  strategy = restore_a1;
114863d1a8abSmrg 		  strategy_size = this_strategy_size;
114963d1a8abSmrg 		}
115063d1a8abSmrg 	    }
115163d1a8abSmrg 
115263d1a8abSmrg 	  switch (strategy)
115363d1a8abSmrg 	    {
115463d1a8abSmrg 	    case restore_sp_post_adjust:
115563d1a8abSmrg 	      break;
115663d1a8abSmrg 
115763d1a8abSmrg 	    case restore_sp_pre_adjust:
115863d1a8abSmrg 	      emit_insn (gen_addsi3 (stack_pointer_rtx,
115963d1a8abSmrg 				     stack_pointer_rtx,
116063d1a8abSmrg 				     GEN_INT (size)));
116163d1a8abSmrg 	      size = 0;
116263d1a8abSmrg 	      break;
116363d1a8abSmrg 
116463d1a8abSmrg 	    case restore_sp_partial_adjust:
116563d1a8abSmrg 	      emit_insn (gen_addsi3 (stack_pointer_rtx,
116663d1a8abSmrg 				     stack_pointer_rtx,
116763d1a8abSmrg 				     GEN_INT (size + 4 * num_regs_to_save
116863d1a8abSmrg 					      + reg_save_bytes - 252)));
116963d1a8abSmrg 	      size = 252 - reg_save_bytes - 4 * num_regs_to_save;
117063d1a8abSmrg 	      break;
117163d1a8abSmrg 
117263d1a8abSmrg 	    case restore_a1:
117363d1a8abSmrg 	      reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM + 1);
117463d1a8abSmrg 	      emit_insn (gen_movsi (reg, stack_pointer_rtx));
117563d1a8abSmrg 	      if (size)
117663d1a8abSmrg 		emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
117763d1a8abSmrg 	      break;
117863d1a8abSmrg 
117963d1a8abSmrg 	    default:
118063d1a8abSmrg 	      gcc_unreachable ();
118163d1a8abSmrg 	    }
118263d1a8abSmrg 	}
118363d1a8abSmrg 
118463d1a8abSmrg       /* Adjust the selected register, if any, for post-increment.  */
118563d1a8abSmrg       if (reg)
118663d1a8abSmrg 	reg = gen_rtx_POST_INC (SImode, reg);
118763d1a8abSmrg 
118863d1a8abSmrg       for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
1189*ec02198aSmrg 	if (df_regs_ever_live_p (i) && ! call_used_regs[i])
119063d1a8abSmrg 	  {
119163d1a8abSmrg 	    rtx addr;
119263d1a8abSmrg 
119363d1a8abSmrg 	    if (reg)
119463d1a8abSmrg 	      addr = reg;
119563d1a8abSmrg 	    else if (size)
119663d1a8abSmrg 	      {
119763d1a8abSmrg 		/* If we aren't using a post-increment register, use an
119863d1a8abSmrg 		   SP offset.  */
119963d1a8abSmrg 		addr = gen_rtx_PLUS (SImode,
120063d1a8abSmrg 				     stack_pointer_rtx,
120163d1a8abSmrg 				     GEN_INT (size));
120263d1a8abSmrg 	      }
120363d1a8abSmrg 	    else
120463d1a8abSmrg 	      addr = stack_pointer_rtx;
120563d1a8abSmrg 
120663d1a8abSmrg 	    size += 4;
120763d1a8abSmrg 
120863d1a8abSmrg 	    emit_insn (gen_movsf (gen_rtx_REG (SFmode, i),
120963d1a8abSmrg 				  gen_rtx_MEM (SFmode, addr)));
121063d1a8abSmrg 	  }
121163d1a8abSmrg 
121263d1a8abSmrg       /* If we were using the restore_a1 strategy and the number of
121363d1a8abSmrg 	 bytes to be released won't fit in the `ret' byte, copy `a1'
121463d1a8abSmrg 	 to `sp', to avoid having to use `add' to adjust it.  */
121563d1a8abSmrg       if (! frame_pointer_needed && reg && size + reg_save_bytes > 255)
121663d1a8abSmrg 	{
121763d1a8abSmrg 	  emit_move_insn (stack_pointer_rtx, XEXP (reg, 0));
121863d1a8abSmrg 	  size = 0;
121963d1a8abSmrg 	}
122063d1a8abSmrg     }
122163d1a8abSmrg 
122263d1a8abSmrg   /* Maybe cut back the stack, except for the register save area.
122363d1a8abSmrg 
122463d1a8abSmrg      If the frame pointer exists, then use the frame pointer to
122563d1a8abSmrg      cut back the stack.
122663d1a8abSmrg 
122763d1a8abSmrg      If the stack size + register save area is more than 255 bytes,
122863d1a8abSmrg      then the stack must be cut back here since the size + register
122963d1a8abSmrg      save size is too big for a ret/retf instruction.
123063d1a8abSmrg 
123163d1a8abSmrg      Else leave it alone, it will be cut back as part of the
123263d1a8abSmrg      ret/retf instruction, or there wasn't any stack to begin with.
123363d1a8abSmrg 
123463d1a8abSmrg      Under no circumstances should the register save area be
123563d1a8abSmrg      deallocated here, that would leave a window where an interrupt
123663d1a8abSmrg      could occur and trash the register save area.  */
123763d1a8abSmrg   if (frame_pointer_needed)
123863d1a8abSmrg     {
123963d1a8abSmrg       emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
124063d1a8abSmrg       size = 0;
124163d1a8abSmrg     }
124263d1a8abSmrg   else if (size + reg_save_bytes > 255)
124363d1a8abSmrg     {
124463d1a8abSmrg       emit_insn (gen_addsi3 (stack_pointer_rtx,
124563d1a8abSmrg 			     stack_pointer_rtx,
124663d1a8abSmrg 			     GEN_INT (size)));
124763d1a8abSmrg       size = 0;
124863d1a8abSmrg     }
124963d1a8abSmrg 
125063d1a8abSmrg   /* Adjust the stack and restore callee-saved registers, if any.  */
125163d1a8abSmrg   if (mn10300_can_use_rets_insn ())
125263d1a8abSmrg     emit_jump_insn (ret_rtx);
125363d1a8abSmrg   else
125463d1a8abSmrg     emit_jump_insn (gen_return_ret (GEN_INT (size + reg_save_bytes)));
125563d1a8abSmrg }
125663d1a8abSmrg 
125763d1a8abSmrg /* Recognize the PARALLEL rtx generated by mn10300_gen_multiple_store().
125863d1a8abSmrg    This function is for MATCH_PARALLEL and so assumes OP is known to be
125963d1a8abSmrg    parallel.  If OP is a multiple store, return a mask indicating which
126063d1a8abSmrg    registers it saves.  Return 0 otherwise.  */
126163d1a8abSmrg 
126263d1a8abSmrg unsigned int
mn10300_store_multiple_regs(rtx op)126363d1a8abSmrg mn10300_store_multiple_regs (rtx op)
126463d1a8abSmrg {
126563d1a8abSmrg   int count;
126663d1a8abSmrg   int mask;
126763d1a8abSmrg   int i;
126863d1a8abSmrg   unsigned int last;
126963d1a8abSmrg   rtx elt;
127063d1a8abSmrg 
127163d1a8abSmrg   count = XVECLEN (op, 0);
127263d1a8abSmrg   if (count < 2)
127363d1a8abSmrg     return 0;
127463d1a8abSmrg 
127563d1a8abSmrg   /* Check that first instruction has the form (set (sp) (plus A B)) */
127663d1a8abSmrg   elt = XVECEXP (op, 0, 0);
127763d1a8abSmrg   if (GET_CODE (elt) != SET
127863d1a8abSmrg       || (! REG_P (SET_DEST (elt)))
127963d1a8abSmrg       || REGNO (SET_DEST (elt)) != STACK_POINTER_REGNUM
128063d1a8abSmrg       || GET_CODE (SET_SRC (elt)) != PLUS)
128163d1a8abSmrg     return 0;
128263d1a8abSmrg 
128363d1a8abSmrg   /* Check that A is the stack pointer and B is the expected stack size.
128463d1a8abSmrg      For OP to match, each subsequent instruction should push a word onto
128563d1a8abSmrg      the stack.  We therefore expect the first instruction to create
128663d1a8abSmrg      COUNT-1 stack slots.  */
128763d1a8abSmrg   elt = SET_SRC (elt);
128863d1a8abSmrg   if ((! REG_P (XEXP (elt, 0)))
128963d1a8abSmrg       || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
129063d1a8abSmrg       || (! CONST_INT_P (XEXP (elt, 1)))
129163d1a8abSmrg       || INTVAL (XEXP (elt, 1)) != -(count - 1) * 4)
129263d1a8abSmrg     return 0;
129363d1a8abSmrg 
129463d1a8abSmrg   mask = 0;
129563d1a8abSmrg   for (i = 1; i < count; i++)
129663d1a8abSmrg     {
129763d1a8abSmrg       /* Check that element i is a (set (mem M) R).  */
129863d1a8abSmrg       /* ??? Validate the register order a-la mn10300_gen_multiple_store.
129963d1a8abSmrg 	 Remember: the ordering is *not* monotonic.  */
130063d1a8abSmrg       elt = XVECEXP (op, 0, i);
130163d1a8abSmrg       if (GET_CODE (elt) != SET
130263d1a8abSmrg 	  || (! MEM_P (SET_DEST (elt)))
130363d1a8abSmrg 	  || (! REG_P (SET_SRC (elt))))
130463d1a8abSmrg 	return 0;
130563d1a8abSmrg 
130663d1a8abSmrg       /* Remember which registers are to be saved.  */
130763d1a8abSmrg       last = REGNO (SET_SRC (elt));
130863d1a8abSmrg       mask |= (1 << last);
130963d1a8abSmrg 
131063d1a8abSmrg       /* Check that M has the form (plus (sp) (const_int -I*4)) */
131163d1a8abSmrg       elt = XEXP (SET_DEST (elt), 0);
131263d1a8abSmrg       if (GET_CODE (elt) != PLUS
131363d1a8abSmrg 	  || (! REG_P (XEXP (elt, 0)))
131463d1a8abSmrg 	  || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
131563d1a8abSmrg 	  || (! CONST_INT_P (XEXP (elt, 1)))
131663d1a8abSmrg 	  || INTVAL (XEXP (elt, 1)) != -i * 4)
131763d1a8abSmrg 	return 0;
131863d1a8abSmrg     }
131963d1a8abSmrg 
132063d1a8abSmrg   /* All or none of the callee-saved extended registers must be in the set.  */
132163d1a8abSmrg   if ((mask & 0x3c000) != 0
132263d1a8abSmrg       && (mask & 0x3c000) != 0x3c000)
132363d1a8abSmrg     return 0;
132463d1a8abSmrg 
132563d1a8abSmrg   return mask;
132663d1a8abSmrg }
132763d1a8abSmrg 
132863d1a8abSmrg /* Implement TARGET_PREFERRED_RELOAD_CLASS.  */
132963d1a8abSmrg 
133063d1a8abSmrg static reg_class_t
mn10300_preferred_reload_class(rtx x,reg_class_t rclass)133163d1a8abSmrg mn10300_preferred_reload_class (rtx x, reg_class_t rclass)
133263d1a8abSmrg {
133363d1a8abSmrg   if (x == stack_pointer_rtx && rclass != SP_REGS)
133463d1a8abSmrg     return (TARGET_AM33 ? GENERAL_REGS : ADDRESS_REGS);
133563d1a8abSmrg   else if (MEM_P (x)
133663d1a8abSmrg 	   || (REG_P (x)
133763d1a8abSmrg 	       && !HARD_REGISTER_P (x))
133863d1a8abSmrg 	   || (GET_CODE (x) == SUBREG
133963d1a8abSmrg 	       && REG_P (SUBREG_REG (x))
134063d1a8abSmrg 	       && !HARD_REGISTER_P (SUBREG_REG (x))))
134163d1a8abSmrg     return LIMIT_RELOAD_CLASS (GET_MODE (x), rclass);
134263d1a8abSmrg   else
134363d1a8abSmrg     return rclass;
134463d1a8abSmrg }
134563d1a8abSmrg 
134663d1a8abSmrg /* Implement TARGET_PREFERRED_OUTPUT_RELOAD_CLASS.  */
134763d1a8abSmrg 
134863d1a8abSmrg static reg_class_t
mn10300_preferred_output_reload_class(rtx x,reg_class_t rclass)134963d1a8abSmrg mn10300_preferred_output_reload_class (rtx x, reg_class_t rclass)
135063d1a8abSmrg {
135163d1a8abSmrg   if (x == stack_pointer_rtx && rclass != SP_REGS)
135263d1a8abSmrg     return (TARGET_AM33 ? GENERAL_REGS : ADDRESS_REGS);
135363d1a8abSmrg   return rclass;
135463d1a8abSmrg }
135563d1a8abSmrg 
135663d1a8abSmrg /* Implement TARGET_SECONDARY_RELOAD.  */
135763d1a8abSmrg 
135863d1a8abSmrg static reg_class_t
mn10300_secondary_reload(bool in_p,rtx x,reg_class_t rclass_i,machine_mode mode,secondary_reload_info * sri)135963d1a8abSmrg mn10300_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
136063d1a8abSmrg 			  machine_mode mode, secondary_reload_info *sri)
136163d1a8abSmrg {
136263d1a8abSmrg   enum reg_class rclass = (enum reg_class) rclass_i;
136363d1a8abSmrg   enum reg_class xclass = NO_REGS;
136463d1a8abSmrg   unsigned int xregno = INVALID_REGNUM;
136563d1a8abSmrg 
136663d1a8abSmrg   if (REG_P (x))
136763d1a8abSmrg     {
136863d1a8abSmrg       xregno = REGNO (x);
136963d1a8abSmrg       if (xregno >= FIRST_PSEUDO_REGISTER)
137063d1a8abSmrg 	xregno = true_regnum (x);
137163d1a8abSmrg       if (xregno != INVALID_REGNUM)
137263d1a8abSmrg 	xclass = REGNO_REG_CLASS (xregno);
137363d1a8abSmrg     }
137463d1a8abSmrg 
137563d1a8abSmrg   if (!TARGET_AM33)
137663d1a8abSmrg     {
137763d1a8abSmrg       /* Memory load/stores less than a full word wide can't have an
137863d1a8abSmrg          address or stack pointer destination.  They must use a data
137963d1a8abSmrg          register as an intermediate register.  */
138063d1a8abSmrg       if (rclass != DATA_REGS
138163d1a8abSmrg 	  && (mode == QImode || mode == HImode)
138263d1a8abSmrg 	  && xclass == NO_REGS)
138363d1a8abSmrg 	return DATA_REGS;
138463d1a8abSmrg 
138563d1a8abSmrg       /* We can only move SP to/from an address register.  */
138663d1a8abSmrg       if (in_p
138763d1a8abSmrg 	  && rclass == SP_REGS
138863d1a8abSmrg 	  && xclass != ADDRESS_REGS)
138963d1a8abSmrg 	return ADDRESS_REGS;
139063d1a8abSmrg       if (!in_p
139163d1a8abSmrg 	  && xclass == SP_REGS
139263d1a8abSmrg 	  && rclass != ADDRESS_REGS
139363d1a8abSmrg 	  && rclass != SP_OR_ADDRESS_REGS)
139463d1a8abSmrg 	return ADDRESS_REGS;
139563d1a8abSmrg     }
139663d1a8abSmrg 
139763d1a8abSmrg   /* We can't directly load sp + const_int into a register;
139863d1a8abSmrg      we must use an address register as an scratch.  */
139963d1a8abSmrg   if (in_p
140063d1a8abSmrg       && rclass != SP_REGS
140163d1a8abSmrg       && rclass != SP_OR_ADDRESS_REGS
140263d1a8abSmrg       && rclass != SP_OR_GENERAL_REGS
140363d1a8abSmrg       && GET_CODE (x) == PLUS
140463d1a8abSmrg       && (XEXP (x, 0) == stack_pointer_rtx
140563d1a8abSmrg 	  || XEXP (x, 1) == stack_pointer_rtx))
140663d1a8abSmrg     {
140763d1a8abSmrg       sri->icode = CODE_FOR_reload_plus_sp_const;
140863d1a8abSmrg       return NO_REGS;
140963d1a8abSmrg     }
141063d1a8abSmrg 
141163d1a8abSmrg   /* We can only move MDR to/from a data register.  */
141263d1a8abSmrg   if (rclass == MDR_REGS && xclass != DATA_REGS)
141363d1a8abSmrg     return DATA_REGS;
141463d1a8abSmrg   if (xclass == MDR_REGS && rclass != DATA_REGS)
141563d1a8abSmrg     return DATA_REGS;
141663d1a8abSmrg 
141763d1a8abSmrg   /* We can't load/store an FP register from a constant address.  */
141863d1a8abSmrg   if (TARGET_AM33_2
141963d1a8abSmrg       && (rclass == FP_REGS || xclass == FP_REGS)
142063d1a8abSmrg       && (xclass == NO_REGS || rclass == NO_REGS))
142163d1a8abSmrg     {
142263d1a8abSmrg       rtx addr = NULL;
142363d1a8abSmrg 
142463d1a8abSmrg       if (xregno >= FIRST_PSEUDO_REGISTER && xregno != INVALID_REGNUM)
142563d1a8abSmrg 	{
142663d1a8abSmrg 	  addr = reg_equiv_mem (xregno);
142763d1a8abSmrg 	  if (addr)
142863d1a8abSmrg 	    addr = XEXP (addr, 0);
142963d1a8abSmrg 	}
143063d1a8abSmrg       else if (MEM_P (x))
143163d1a8abSmrg 	addr = XEXP (x, 0);
143263d1a8abSmrg 
143363d1a8abSmrg       if (addr && CONSTANT_ADDRESS_P (addr))
143463d1a8abSmrg 	return GENERAL_REGS;
143563d1a8abSmrg     }
143663d1a8abSmrg   /* Otherwise assume no secondary reloads are needed.  */
143763d1a8abSmrg   return NO_REGS;
143863d1a8abSmrg }
143963d1a8abSmrg 
144063d1a8abSmrg int
mn10300_frame_size(void)144163d1a8abSmrg mn10300_frame_size (void)
144263d1a8abSmrg {
144363d1a8abSmrg   /* size includes the fixed stack space needed for function calls.  */
144463d1a8abSmrg   int size = get_frame_size () + crtl->outgoing_args_size;
144563d1a8abSmrg 
144663d1a8abSmrg   /* And space for the return pointer.  */
144763d1a8abSmrg   size += crtl->outgoing_args_size ? 4 : 0;
144863d1a8abSmrg 
144963d1a8abSmrg   return size;
145063d1a8abSmrg }
145163d1a8abSmrg 
145263d1a8abSmrg int
mn10300_initial_offset(int from,int to)145363d1a8abSmrg mn10300_initial_offset (int from, int to)
145463d1a8abSmrg {
145563d1a8abSmrg   int diff = 0;
145663d1a8abSmrg 
145763d1a8abSmrg   gcc_assert (from == ARG_POINTER_REGNUM || from == FRAME_POINTER_REGNUM);
145863d1a8abSmrg   gcc_assert (to == FRAME_POINTER_REGNUM || to == STACK_POINTER_REGNUM);
145963d1a8abSmrg 
146063d1a8abSmrg   if (to == STACK_POINTER_REGNUM)
146163d1a8abSmrg     diff = mn10300_frame_size ();
146263d1a8abSmrg 
146363d1a8abSmrg   /* The difference between the argument pointer and the frame pointer
146463d1a8abSmrg      is the size of the callee register save area.  */
146563d1a8abSmrg   if (from == ARG_POINTER_REGNUM)
146663d1a8abSmrg     {
146763d1a8abSmrg       unsigned int reg_save_bytes;
146863d1a8abSmrg 
146963d1a8abSmrg       mn10300_get_live_callee_saved_regs (& reg_save_bytes);
147063d1a8abSmrg       diff += reg_save_bytes;
147163d1a8abSmrg       diff += 4 * fp_regs_to_save ();
147263d1a8abSmrg     }
147363d1a8abSmrg 
147463d1a8abSmrg   return diff;
147563d1a8abSmrg }
147663d1a8abSmrg 
147763d1a8abSmrg /* Worker function for TARGET_RETURN_IN_MEMORY.  */
147863d1a8abSmrg 
147963d1a8abSmrg static bool
mn10300_return_in_memory(const_tree type,const_tree fntype ATTRIBUTE_UNUSED)148063d1a8abSmrg mn10300_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
148163d1a8abSmrg {
148263d1a8abSmrg   /* Return values > 8 bytes in length in memory.  */
148363d1a8abSmrg   return (int_size_in_bytes (type) > 8
148463d1a8abSmrg 	  || int_size_in_bytes (type) == 0
148563d1a8abSmrg 	  || TYPE_MODE (type) == BLKmode);
148663d1a8abSmrg }
148763d1a8abSmrg 
148863d1a8abSmrg /* Flush the argument registers to the stack for a stdarg function;
148963d1a8abSmrg    return the new argument pointer.  */
149063d1a8abSmrg static rtx
mn10300_builtin_saveregs(void)149163d1a8abSmrg mn10300_builtin_saveregs (void)
149263d1a8abSmrg {
149363d1a8abSmrg   rtx offset, mem;
149463d1a8abSmrg   tree fntype = TREE_TYPE (current_function_decl);
149563d1a8abSmrg   int argadj = ((!stdarg_p (fntype))
149663d1a8abSmrg                 ? UNITS_PER_WORD : 0);
149763d1a8abSmrg   alias_set_type set = get_varargs_alias_set ();
149863d1a8abSmrg 
149963d1a8abSmrg   if (argadj)
150063d1a8abSmrg     offset = plus_constant (Pmode, crtl->args.arg_offset_rtx, argadj);
150163d1a8abSmrg   else
150263d1a8abSmrg     offset = crtl->args.arg_offset_rtx;
150363d1a8abSmrg 
150463d1a8abSmrg   mem = gen_rtx_MEM (SImode, crtl->args.internal_arg_pointer);
150563d1a8abSmrg   set_mem_alias_set (mem, set);
150663d1a8abSmrg   emit_move_insn (mem, gen_rtx_REG (SImode, 0));
150763d1a8abSmrg 
150863d1a8abSmrg   mem = gen_rtx_MEM (SImode,
150963d1a8abSmrg 		     plus_constant (Pmode,
151063d1a8abSmrg 				    crtl->args.internal_arg_pointer, 4));
151163d1a8abSmrg   set_mem_alias_set (mem, set);
151263d1a8abSmrg   emit_move_insn (mem, gen_rtx_REG (SImode, 1));
151363d1a8abSmrg 
151463d1a8abSmrg   return copy_to_reg (expand_binop (Pmode, add_optab,
151563d1a8abSmrg 				    crtl->args.internal_arg_pointer,
151663d1a8abSmrg 				    offset, 0, 0, OPTAB_LIB_WIDEN));
151763d1a8abSmrg }
151863d1a8abSmrg 
151963d1a8abSmrg static void
mn10300_va_start(tree valist,rtx nextarg)152063d1a8abSmrg mn10300_va_start (tree valist, rtx nextarg)
152163d1a8abSmrg {
152263d1a8abSmrg   nextarg = expand_builtin_saveregs ();
152363d1a8abSmrg   std_expand_builtin_va_start (valist, nextarg);
152463d1a8abSmrg }
152563d1a8abSmrg 
152663d1a8abSmrg /* Return true when a parameter should be passed by reference.  */
152763d1a8abSmrg 
152863d1a8abSmrg static bool
mn10300_pass_by_reference(cumulative_args_t,const function_arg_info & arg)1529*ec02198aSmrg mn10300_pass_by_reference (cumulative_args_t, const function_arg_info &arg)
153063d1a8abSmrg {
1531*ec02198aSmrg   unsigned HOST_WIDE_INT size = arg.type_size_in_bytes ();
153263d1a8abSmrg   return (size > 8 || size == 0);
153363d1a8abSmrg }
153463d1a8abSmrg 
1535*ec02198aSmrg /* Return an RTX to represent where argument ARG will be passed to a function.
1536*ec02198aSmrg    If the result is NULL_RTX, the argument is pushed.  */
153763d1a8abSmrg 
153863d1a8abSmrg static rtx
mn10300_function_arg(cumulative_args_t cum_v,const function_arg_info & arg)1539*ec02198aSmrg mn10300_function_arg (cumulative_args_t cum_v, const function_arg_info &arg)
154063d1a8abSmrg {
154163d1a8abSmrg   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
154263d1a8abSmrg   rtx result = NULL_RTX;
154363d1a8abSmrg   int size;
154463d1a8abSmrg 
154563d1a8abSmrg   /* We only support using 2 data registers as argument registers.  */
154663d1a8abSmrg   int nregs = 2;
154763d1a8abSmrg 
154863d1a8abSmrg   /* Figure out the size of the object to be passed.  */
1549*ec02198aSmrg   size = arg.promoted_size_in_bytes ();
155063d1a8abSmrg   cum->nbytes = (cum->nbytes + 3) & ~3;
155163d1a8abSmrg 
155263d1a8abSmrg   /* Don't pass this arg via a register if all the argument registers
155363d1a8abSmrg      are used up.  */
155463d1a8abSmrg   if (cum->nbytes > nregs * UNITS_PER_WORD)
155563d1a8abSmrg     return result;
155663d1a8abSmrg 
155763d1a8abSmrg   /* Don't pass this arg via a register if it would be split between
155863d1a8abSmrg      registers and memory.  */
1559*ec02198aSmrg   if (arg.type == NULL_TREE
156063d1a8abSmrg       && cum->nbytes + size > nregs * UNITS_PER_WORD)
156163d1a8abSmrg     return result;
156263d1a8abSmrg 
156363d1a8abSmrg   switch (cum->nbytes / UNITS_PER_WORD)
156463d1a8abSmrg     {
156563d1a8abSmrg     case 0:
1566*ec02198aSmrg       result = gen_rtx_REG (arg.mode, FIRST_ARGUMENT_REGNUM);
156763d1a8abSmrg       break;
156863d1a8abSmrg     case 1:
1569*ec02198aSmrg       result = gen_rtx_REG (arg.mode, FIRST_ARGUMENT_REGNUM + 1);
157063d1a8abSmrg       break;
157163d1a8abSmrg     default:
157263d1a8abSmrg       break;
157363d1a8abSmrg     }
157463d1a8abSmrg 
157563d1a8abSmrg   return result;
157663d1a8abSmrg }
157763d1a8abSmrg 
1578*ec02198aSmrg /* Update the data in CUM to advance over argument ARG.  */
157963d1a8abSmrg 
158063d1a8abSmrg static void
mn10300_function_arg_advance(cumulative_args_t cum_v,const function_arg_info & arg)1581*ec02198aSmrg mn10300_function_arg_advance (cumulative_args_t cum_v,
1582*ec02198aSmrg 			      const function_arg_info &arg)
158363d1a8abSmrg {
158463d1a8abSmrg   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
158563d1a8abSmrg 
1586*ec02198aSmrg   cum->nbytes += (arg.promoted_size_in_bytes () + 3) & ~3;
158763d1a8abSmrg }
158863d1a8abSmrg 
158963d1a8abSmrg /* Return the number of bytes of registers to use for an argument passed
159063d1a8abSmrg    partially in registers and partially in memory.  */
159163d1a8abSmrg 
159263d1a8abSmrg static int
mn10300_arg_partial_bytes(cumulative_args_t cum_v,const function_arg_info & arg)1593*ec02198aSmrg mn10300_arg_partial_bytes (cumulative_args_t cum_v,
1594*ec02198aSmrg 			   const function_arg_info &arg)
159563d1a8abSmrg {
159663d1a8abSmrg   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
159763d1a8abSmrg   int size;
159863d1a8abSmrg 
159963d1a8abSmrg   /* We only support using 2 data registers as argument registers.  */
160063d1a8abSmrg   int nregs = 2;
160163d1a8abSmrg 
160263d1a8abSmrg   /* Figure out the size of the object to be passed.  */
1603*ec02198aSmrg   size = arg.promoted_size_in_bytes ();
160463d1a8abSmrg   cum->nbytes = (cum->nbytes + 3) & ~3;
160563d1a8abSmrg 
160663d1a8abSmrg   /* Don't pass this arg via a register if all the argument registers
160763d1a8abSmrg      are used up.  */
160863d1a8abSmrg   if (cum->nbytes > nregs * UNITS_PER_WORD)
160963d1a8abSmrg     return 0;
161063d1a8abSmrg 
161163d1a8abSmrg   if (cum->nbytes + size <= nregs * UNITS_PER_WORD)
161263d1a8abSmrg     return 0;
161363d1a8abSmrg 
161463d1a8abSmrg   /* Don't pass this arg via a register if it would be split between
161563d1a8abSmrg      registers and memory.  */
1616*ec02198aSmrg   if (arg.type == NULL_TREE
161763d1a8abSmrg       && cum->nbytes + size > nregs * UNITS_PER_WORD)
161863d1a8abSmrg     return 0;
161963d1a8abSmrg 
162063d1a8abSmrg   return nregs * UNITS_PER_WORD - cum->nbytes;
162163d1a8abSmrg }
162263d1a8abSmrg 
162363d1a8abSmrg /* Return the location of the function's value.  This will be either
162463d1a8abSmrg    $d0 for integer functions, $a0 for pointers, or a PARALLEL of both
162563d1a8abSmrg    $d0 and $a0 if the -mreturn-pointer-on-do flag is set.  Note that
162663d1a8abSmrg    we only return the PARALLEL for outgoing values; we do not want
162763d1a8abSmrg    callers relying on this extra copy.  */
162863d1a8abSmrg 
162963d1a8abSmrg static rtx
mn10300_function_value(const_tree valtype,const_tree fn_decl_or_type ATTRIBUTE_UNUSED,bool outgoing)163063d1a8abSmrg mn10300_function_value (const_tree valtype,
163163d1a8abSmrg 			const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
163263d1a8abSmrg 			bool outgoing)
163363d1a8abSmrg {
163463d1a8abSmrg   rtx rv;
163563d1a8abSmrg   machine_mode mode = TYPE_MODE (valtype);
163663d1a8abSmrg 
163763d1a8abSmrg   if (! POINTER_TYPE_P (valtype))
163863d1a8abSmrg     return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
163963d1a8abSmrg   else if (! TARGET_PTR_A0D0 || ! outgoing
164063d1a8abSmrg 	   || cfun->returns_struct)
164163d1a8abSmrg     return gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM);
164263d1a8abSmrg 
164363d1a8abSmrg   rv = gen_rtx_PARALLEL (mode, rtvec_alloc (2));
164463d1a8abSmrg   XVECEXP (rv, 0, 0)
164563d1a8abSmrg     = gen_rtx_EXPR_LIST (VOIDmode,
164663d1a8abSmrg 			 gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM),
164763d1a8abSmrg 			 GEN_INT (0));
164863d1a8abSmrg 
164963d1a8abSmrg   XVECEXP (rv, 0, 1)
165063d1a8abSmrg     = gen_rtx_EXPR_LIST (VOIDmode,
165163d1a8abSmrg 			 gen_rtx_REG (mode, FIRST_DATA_REGNUM),
165263d1a8abSmrg 			 GEN_INT (0));
165363d1a8abSmrg   return rv;
165463d1a8abSmrg }
165563d1a8abSmrg 
165663d1a8abSmrg /* Implements TARGET_LIBCALL_VALUE.  */
165763d1a8abSmrg 
165863d1a8abSmrg static rtx
mn10300_libcall_value(machine_mode mode,const_rtx fun ATTRIBUTE_UNUSED)165963d1a8abSmrg mn10300_libcall_value (machine_mode mode,
166063d1a8abSmrg 		       const_rtx fun ATTRIBUTE_UNUSED)
166163d1a8abSmrg {
166263d1a8abSmrg   return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
166363d1a8abSmrg }
166463d1a8abSmrg 
166563d1a8abSmrg /* Implements FUNCTION_VALUE_REGNO_P.  */
166663d1a8abSmrg 
166763d1a8abSmrg bool
mn10300_function_value_regno_p(const unsigned int regno)166863d1a8abSmrg mn10300_function_value_regno_p (const unsigned int regno)
166963d1a8abSmrg {
167063d1a8abSmrg  return (regno == FIRST_DATA_REGNUM || regno == FIRST_ADDRESS_REGNUM);
167163d1a8abSmrg }
167263d1a8abSmrg 
167363d1a8abSmrg /* Output an addition operation.  */
167463d1a8abSmrg 
167563d1a8abSmrg const char *
mn10300_output_add(rtx operands[3],bool need_flags)167663d1a8abSmrg mn10300_output_add (rtx operands[3], bool need_flags)
167763d1a8abSmrg {
167863d1a8abSmrg   rtx dest, src1, src2;
167963d1a8abSmrg   unsigned int dest_regnum, src1_regnum, src2_regnum;
168063d1a8abSmrg   enum reg_class src1_class, src2_class, dest_class;
168163d1a8abSmrg 
168263d1a8abSmrg   dest = operands[0];
168363d1a8abSmrg   src1 = operands[1];
168463d1a8abSmrg   src2 = operands[2];
168563d1a8abSmrg 
168663d1a8abSmrg   dest_regnum = true_regnum (dest);
168763d1a8abSmrg   src1_regnum = true_regnum (src1);
168863d1a8abSmrg 
168963d1a8abSmrg   dest_class = REGNO_REG_CLASS (dest_regnum);
169063d1a8abSmrg   src1_class = REGNO_REG_CLASS (src1_regnum);
169163d1a8abSmrg 
169263d1a8abSmrg   if (CONST_INT_P (src2))
169363d1a8abSmrg     {
169463d1a8abSmrg       gcc_assert (dest_regnum == src1_regnum);
169563d1a8abSmrg 
169663d1a8abSmrg       if (src2 == const1_rtx && !need_flags)
169763d1a8abSmrg 	return "inc %0";
169863d1a8abSmrg       if (INTVAL (src2) == 4 && !need_flags && dest_class != DATA_REGS)
169963d1a8abSmrg         return "inc4 %0";
170063d1a8abSmrg 
170163d1a8abSmrg       gcc_assert (!need_flags || dest_class != SP_REGS);
170263d1a8abSmrg       return "add %2,%0";
170363d1a8abSmrg     }
170463d1a8abSmrg   else if (CONSTANT_P (src2))
170563d1a8abSmrg     return "add %2,%0";
170663d1a8abSmrg 
170763d1a8abSmrg   src2_regnum = true_regnum (src2);
170863d1a8abSmrg   src2_class = REGNO_REG_CLASS (src2_regnum);
170963d1a8abSmrg 
171063d1a8abSmrg   if (dest_regnum == src1_regnum)
171163d1a8abSmrg     return "add %2,%0";
171263d1a8abSmrg   if (dest_regnum == src2_regnum)
171363d1a8abSmrg     return "add %1,%0";
171463d1a8abSmrg 
171563d1a8abSmrg   /* The rest of the cases are reg = reg+reg.  For AM33, we can implement
171663d1a8abSmrg      this directly, as below, but when optimizing for space we can sometimes
171763d1a8abSmrg      do better by using a mov+add.  For MN103, we claimed that we could
171863d1a8abSmrg      implement a three-operand add because the various move and add insns
171963d1a8abSmrg      change sizes across register classes, and we can often do better than
172063d1a8abSmrg      reload in choosing which operand to move.  */
172163d1a8abSmrg   if (TARGET_AM33 && optimize_insn_for_speed_p ())
172263d1a8abSmrg     return "add %2,%1,%0";
172363d1a8abSmrg 
172463d1a8abSmrg   /* Catch cases where no extended register was used.  */
172563d1a8abSmrg   if (src1_class != EXTENDED_REGS
172663d1a8abSmrg       && src2_class != EXTENDED_REGS
172763d1a8abSmrg       && dest_class != EXTENDED_REGS)
172863d1a8abSmrg     {
172963d1a8abSmrg       /* We have to copy one of the sources into the destination, then
173063d1a8abSmrg          add the other source to the destination.
173163d1a8abSmrg 
173263d1a8abSmrg          Carefully select which source to copy to the destination; a
173363d1a8abSmrg          naive implementation will waste a byte when the source classes
173463d1a8abSmrg          are different and the destination is an address register.
173563d1a8abSmrg          Selecting the lowest cost register copy will optimize this
173663d1a8abSmrg          sequence.  */
173763d1a8abSmrg       if (src1_class == dest_class)
173863d1a8abSmrg         return "mov %1,%0\n\tadd %2,%0";
173963d1a8abSmrg       else
174063d1a8abSmrg 	return "mov %2,%0\n\tadd %1,%0";
174163d1a8abSmrg     }
174263d1a8abSmrg 
174363d1a8abSmrg   /* At least one register is an extended register.  */
174463d1a8abSmrg 
174563d1a8abSmrg   /* The three operand add instruction on the am33 is a win iff the
174663d1a8abSmrg      output register is an extended register, or if both source
174763d1a8abSmrg      registers are extended registers.  */
174863d1a8abSmrg   if (dest_class == EXTENDED_REGS || src1_class == src2_class)
174963d1a8abSmrg     return "add %2,%1,%0";
175063d1a8abSmrg 
175163d1a8abSmrg   /* It is better to copy one of the sources to the destination, then
175263d1a8abSmrg      perform a 2 address add.  The destination in this case must be
175363d1a8abSmrg      an address or data register and one of the sources must be an
175463d1a8abSmrg      extended register and the remaining source must not be an extended
175563d1a8abSmrg      register.
175663d1a8abSmrg 
175763d1a8abSmrg      The best code for this case is to copy the extended reg to the
175863d1a8abSmrg      destination, then emit a two address add.  */
175963d1a8abSmrg   if (src1_class == EXTENDED_REGS)
176063d1a8abSmrg     return "mov %1,%0\n\tadd %2,%0";
176163d1a8abSmrg   else
176263d1a8abSmrg     return "mov %2,%0\n\tadd %1,%0";
176363d1a8abSmrg }
176463d1a8abSmrg 
176563d1a8abSmrg /* Return 1 if X contains a symbolic expression.  We know these
176663d1a8abSmrg    expressions will have one of a few well defined forms, so
176763d1a8abSmrg    we need only check those forms.  */
176863d1a8abSmrg 
176963d1a8abSmrg int
mn10300_symbolic_operand(rtx op,machine_mode mode ATTRIBUTE_UNUSED)177063d1a8abSmrg mn10300_symbolic_operand (rtx op,
177163d1a8abSmrg 			  machine_mode mode ATTRIBUTE_UNUSED)
177263d1a8abSmrg {
177363d1a8abSmrg   switch (GET_CODE (op))
177463d1a8abSmrg     {
177563d1a8abSmrg     case SYMBOL_REF:
177663d1a8abSmrg     case LABEL_REF:
177763d1a8abSmrg       return 1;
177863d1a8abSmrg     case CONST:
177963d1a8abSmrg       op = XEXP (op, 0);
178063d1a8abSmrg       return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
178163d1a8abSmrg                || GET_CODE (XEXP (op, 0)) == LABEL_REF)
178263d1a8abSmrg               && CONST_INT_P (XEXP (op, 1)));
178363d1a8abSmrg     default:
178463d1a8abSmrg       return 0;
178563d1a8abSmrg     }
178663d1a8abSmrg }
178763d1a8abSmrg 
178863d1a8abSmrg /* Try machine dependent ways of modifying an illegitimate address
178963d1a8abSmrg    to be legitimate.  If we find one, return the new valid address.
179063d1a8abSmrg    This macro is used in only one place: `memory_address' in explow.c.
179163d1a8abSmrg 
179263d1a8abSmrg    OLDX is the address as it was before break_out_memory_refs was called.
179363d1a8abSmrg    In some cases it is useful to look at this to decide what needs to be done.
179463d1a8abSmrg 
179563d1a8abSmrg    Normally it is always safe for this macro to do nothing.  It exists to
179663d1a8abSmrg    recognize opportunities to optimize the output.
179763d1a8abSmrg 
179863d1a8abSmrg    But on a few ports with segmented architectures and indexed addressing
179963d1a8abSmrg    (mn10300, hppa) it is used to rewrite certain problematical addresses.  */
180063d1a8abSmrg 
180163d1a8abSmrg static rtx
mn10300_legitimize_address(rtx x,rtx oldx ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED)180263d1a8abSmrg mn10300_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
180363d1a8abSmrg 			    machine_mode mode ATTRIBUTE_UNUSED)
180463d1a8abSmrg {
180563d1a8abSmrg   if (flag_pic && ! mn10300_legitimate_pic_operand_p (x))
180663d1a8abSmrg     x = mn10300_legitimize_pic_address (oldx, NULL_RTX);
180763d1a8abSmrg 
180863d1a8abSmrg   /* Uh-oh.  We might have an address for x[n-100000].  This needs
180963d1a8abSmrg      special handling to avoid creating an indexed memory address
181063d1a8abSmrg      with x-100000 as the base.  */
181163d1a8abSmrg   if (GET_CODE (x) == PLUS
181263d1a8abSmrg       && mn10300_symbolic_operand (XEXP (x, 1), VOIDmode))
181363d1a8abSmrg     {
181463d1a8abSmrg       /* Ugly.  We modify things here so that the address offset specified
181563d1a8abSmrg          by the index expression is computed first, then added to x to form
181663d1a8abSmrg          the entire address.  */
181763d1a8abSmrg 
181863d1a8abSmrg       rtx regx1, regy1, regy2, y;
181963d1a8abSmrg 
182063d1a8abSmrg       /* Strip off any CONST.  */
182163d1a8abSmrg       y = XEXP (x, 1);
182263d1a8abSmrg       if (GET_CODE (y) == CONST)
182363d1a8abSmrg         y = XEXP (y, 0);
182463d1a8abSmrg 
182563d1a8abSmrg       if (GET_CODE (y) == PLUS || GET_CODE (y) == MINUS)
182663d1a8abSmrg 	{
182763d1a8abSmrg 	  regx1 = force_reg (Pmode, force_operand (XEXP (x, 0), 0));
182863d1a8abSmrg 	  regy1 = force_reg (Pmode, force_operand (XEXP (y, 0), 0));
182963d1a8abSmrg 	  regy2 = force_reg (Pmode, force_operand (XEXP (y, 1), 0));
183063d1a8abSmrg 	  regx1 = force_reg (Pmode,
183163d1a8abSmrg 			     gen_rtx_fmt_ee (GET_CODE (y), Pmode, regx1,
183263d1a8abSmrg 					     regy2));
183363d1a8abSmrg 	  return force_reg (Pmode, gen_rtx_PLUS (Pmode, regx1, regy1));
183463d1a8abSmrg 	}
183563d1a8abSmrg     }
183663d1a8abSmrg   return x;
183763d1a8abSmrg }
183863d1a8abSmrg 
183963d1a8abSmrg /* Convert a non-PIC address in `orig' to a PIC address using @GOT or
184063d1a8abSmrg    @GOTOFF in `reg'.  */
184163d1a8abSmrg 
184263d1a8abSmrg rtx
mn10300_legitimize_pic_address(rtx orig,rtx reg)184363d1a8abSmrg mn10300_legitimize_pic_address (rtx orig, rtx reg)
184463d1a8abSmrg {
184563d1a8abSmrg   rtx x;
184663d1a8abSmrg   rtx_insn *insn;
184763d1a8abSmrg 
184863d1a8abSmrg   if (GET_CODE (orig) == LABEL_REF
184963d1a8abSmrg       || (GET_CODE (orig) == SYMBOL_REF
185063d1a8abSmrg 	  && (CONSTANT_POOL_ADDRESS_P (orig)
185163d1a8abSmrg 	      || ! MN10300_GLOBAL_P (orig))))
185263d1a8abSmrg     {
185363d1a8abSmrg       if (reg == NULL)
185463d1a8abSmrg 	reg = gen_reg_rtx (Pmode);
185563d1a8abSmrg 
185663d1a8abSmrg       x = gen_rtx_UNSPEC (SImode, gen_rtvec (1, orig), UNSPEC_GOTOFF);
185763d1a8abSmrg       x = gen_rtx_CONST (SImode, x);
185863d1a8abSmrg       emit_move_insn (reg, x);
185963d1a8abSmrg 
186063d1a8abSmrg       insn = emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
186163d1a8abSmrg     }
186263d1a8abSmrg   else if (GET_CODE (orig) == SYMBOL_REF)
186363d1a8abSmrg     {
186463d1a8abSmrg       if (reg == NULL)
186563d1a8abSmrg 	reg = gen_reg_rtx (Pmode);
186663d1a8abSmrg 
186763d1a8abSmrg       x = gen_rtx_UNSPEC (SImode, gen_rtvec (1, orig), UNSPEC_GOT);
186863d1a8abSmrg       x = gen_rtx_CONST (SImode, x);
186963d1a8abSmrg       x = gen_rtx_PLUS (SImode, pic_offset_table_rtx, x);
187063d1a8abSmrg       x = gen_const_mem (SImode, x);
187163d1a8abSmrg 
187263d1a8abSmrg       insn = emit_move_insn (reg, x);
187363d1a8abSmrg     }
187463d1a8abSmrg   else
187563d1a8abSmrg     return orig;
187663d1a8abSmrg 
187763d1a8abSmrg   set_unique_reg_note (insn, REG_EQUAL, orig);
187863d1a8abSmrg   return reg;
187963d1a8abSmrg }
188063d1a8abSmrg 
188163d1a8abSmrg /* Return zero if X references a SYMBOL_REF or LABEL_REF whose symbol
188263d1a8abSmrg    isn't protected by a PIC unspec; nonzero otherwise.  */
188363d1a8abSmrg 
188463d1a8abSmrg int
mn10300_legitimate_pic_operand_p(rtx x)188563d1a8abSmrg mn10300_legitimate_pic_operand_p (rtx x)
188663d1a8abSmrg {
188763d1a8abSmrg   const char *fmt;
188863d1a8abSmrg   int i;
188963d1a8abSmrg 
189063d1a8abSmrg   if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
189163d1a8abSmrg     return 0;
189263d1a8abSmrg 
189363d1a8abSmrg   if (GET_CODE (x) == UNSPEC
189463d1a8abSmrg       && (XINT (x, 1) == UNSPEC_PIC
189563d1a8abSmrg 	  || XINT (x, 1) == UNSPEC_GOT
189663d1a8abSmrg 	  || XINT (x, 1) == UNSPEC_GOTOFF
189763d1a8abSmrg 	  || XINT (x, 1) == UNSPEC_PLT
189863d1a8abSmrg 	  || XINT (x, 1) == UNSPEC_GOTSYM_OFF))
189963d1a8abSmrg       return 1;
190063d1a8abSmrg 
190163d1a8abSmrg   fmt = GET_RTX_FORMAT (GET_CODE (x));
190263d1a8abSmrg   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
190363d1a8abSmrg     {
190463d1a8abSmrg       if (fmt[i] == 'E')
190563d1a8abSmrg 	{
190663d1a8abSmrg 	  int j;
190763d1a8abSmrg 
190863d1a8abSmrg 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
190963d1a8abSmrg 	    if (! mn10300_legitimate_pic_operand_p (XVECEXP (x, i, j)))
191063d1a8abSmrg 	      return 0;
191163d1a8abSmrg 	}
191263d1a8abSmrg       else if (fmt[i] == 'e'
191363d1a8abSmrg 	       && ! mn10300_legitimate_pic_operand_p (XEXP (x, i)))
191463d1a8abSmrg 	return 0;
191563d1a8abSmrg     }
191663d1a8abSmrg 
191763d1a8abSmrg   return 1;
191863d1a8abSmrg }
191963d1a8abSmrg 
192063d1a8abSmrg /* Return TRUE if the address X, taken from a (MEM:MODE X) rtx, is
192163d1a8abSmrg    legitimate, and FALSE otherwise.
192263d1a8abSmrg 
192363d1a8abSmrg    On the mn10300, the value in the address register must be
192463d1a8abSmrg    in the same memory space/segment as the effective address.
192563d1a8abSmrg 
192663d1a8abSmrg    This is problematical for reload since it does not understand
192763d1a8abSmrg    that base+index != index+base in a memory reference.
192863d1a8abSmrg 
192963d1a8abSmrg    Note it is still possible to use reg+reg addressing modes,
193063d1a8abSmrg    it's just much more difficult.  For a discussion of a possible
193163d1a8abSmrg    workaround and solution, see the comments in pa.c before the
193263d1a8abSmrg    function record_unscaled_index_insn_codes.  */
193363d1a8abSmrg 
193463d1a8abSmrg static bool
mn10300_legitimate_address_p(machine_mode mode,rtx x,bool strict)193563d1a8abSmrg mn10300_legitimate_address_p (machine_mode mode, rtx x, bool strict)
193663d1a8abSmrg {
193763d1a8abSmrg   rtx base, index;
193863d1a8abSmrg 
193963d1a8abSmrg   if (CONSTANT_ADDRESS_P (x))
194063d1a8abSmrg     return !flag_pic || mn10300_legitimate_pic_operand_p (x);
194163d1a8abSmrg 
194263d1a8abSmrg   if (RTX_OK_FOR_BASE_P (x, strict))
194363d1a8abSmrg     return true;
194463d1a8abSmrg 
194563d1a8abSmrg   if (TARGET_AM33 && (mode == SImode || mode == SFmode || mode == HImode))
194663d1a8abSmrg     {
194763d1a8abSmrg       if (GET_CODE (x) == POST_INC)
194863d1a8abSmrg 	return RTX_OK_FOR_BASE_P (XEXP (x, 0), strict);
194963d1a8abSmrg       if (GET_CODE (x) == POST_MODIFY)
195063d1a8abSmrg 	return (RTX_OK_FOR_BASE_P (XEXP (x, 0), strict)
195163d1a8abSmrg 		&& CONSTANT_ADDRESS_P (XEXP (x, 1)));
195263d1a8abSmrg     }
195363d1a8abSmrg 
195463d1a8abSmrg   if (GET_CODE (x) != PLUS)
195563d1a8abSmrg     return false;
195663d1a8abSmrg 
195763d1a8abSmrg   base = XEXP (x, 0);
195863d1a8abSmrg   index = XEXP (x, 1);
195963d1a8abSmrg 
196063d1a8abSmrg   if (!REG_P (base))
196163d1a8abSmrg     return false;
196263d1a8abSmrg   if (REG_P (index))
196363d1a8abSmrg     {
196463d1a8abSmrg       /* ??? Without AM33 generalized (Ri,Rn) addressing, reg+reg
196563d1a8abSmrg 	 addressing is hard to satisfy.  */
196663d1a8abSmrg       if (!TARGET_AM33)
196763d1a8abSmrg 	return false;
196863d1a8abSmrg 
196963d1a8abSmrg       return (REGNO_GENERAL_P (REGNO (base), strict)
197063d1a8abSmrg 	      && REGNO_GENERAL_P (REGNO (index), strict));
197163d1a8abSmrg     }
197263d1a8abSmrg 
197363d1a8abSmrg   if (!REGNO_STRICT_OK_FOR_BASE_P (REGNO (base), strict))
197463d1a8abSmrg     return false;
197563d1a8abSmrg 
197663d1a8abSmrg   if (CONST_INT_P (index))
197763d1a8abSmrg     return IN_RANGE (INTVAL (index), -1 - 0x7fffffff, 0x7fffffff);
197863d1a8abSmrg 
197963d1a8abSmrg   if (CONSTANT_ADDRESS_P (index))
198063d1a8abSmrg     return !flag_pic || mn10300_legitimate_pic_operand_p (index);
198163d1a8abSmrg 
198263d1a8abSmrg   return false;
198363d1a8abSmrg }
198463d1a8abSmrg 
198563d1a8abSmrg bool
mn10300_regno_in_class_p(unsigned regno,int rclass,bool strict)198663d1a8abSmrg mn10300_regno_in_class_p (unsigned regno, int rclass, bool strict)
198763d1a8abSmrg {
198863d1a8abSmrg   if (regno >= FIRST_PSEUDO_REGISTER)
198963d1a8abSmrg     {
199063d1a8abSmrg       if (!strict)
199163d1a8abSmrg 	return true;
199263d1a8abSmrg       if (!reg_renumber)
199363d1a8abSmrg 	return false;
199463d1a8abSmrg       regno = reg_renumber[regno];
199563d1a8abSmrg       if (regno == INVALID_REGNUM)
199663d1a8abSmrg 	return false;
199763d1a8abSmrg     }
199863d1a8abSmrg   return TEST_HARD_REG_BIT (reg_class_contents[rclass], regno);
199963d1a8abSmrg }
200063d1a8abSmrg 
200163d1a8abSmrg rtx
mn10300_legitimize_reload_address(rtx x,machine_mode mode ATTRIBUTE_UNUSED,int opnum,int type,int ind_levels ATTRIBUTE_UNUSED)200263d1a8abSmrg mn10300_legitimize_reload_address (rtx x,
200363d1a8abSmrg 				   machine_mode mode ATTRIBUTE_UNUSED,
200463d1a8abSmrg 				   int opnum, int type,
200563d1a8abSmrg 				   int ind_levels ATTRIBUTE_UNUSED)
200663d1a8abSmrg {
200763d1a8abSmrg   bool any_change = false;
200863d1a8abSmrg 
200963d1a8abSmrg   /* See above re disabling reg+reg addressing for MN103.  */
201063d1a8abSmrg   if (!TARGET_AM33)
201163d1a8abSmrg     return NULL_RTX;
201263d1a8abSmrg 
201363d1a8abSmrg   if (GET_CODE (x) != PLUS)
201463d1a8abSmrg     return NULL_RTX;
201563d1a8abSmrg 
201663d1a8abSmrg   if (XEXP (x, 0) == stack_pointer_rtx)
201763d1a8abSmrg     {
201863d1a8abSmrg       push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
201963d1a8abSmrg 		   GENERAL_REGS, GET_MODE (x), VOIDmode, 0, 0,
202063d1a8abSmrg 		   opnum, (enum reload_type) type);
202163d1a8abSmrg       any_change = true;
202263d1a8abSmrg     }
202363d1a8abSmrg   if (XEXP (x, 1) == stack_pointer_rtx)
202463d1a8abSmrg     {
202563d1a8abSmrg       push_reload (XEXP (x, 1), NULL_RTX, &XEXP (x, 1), NULL,
202663d1a8abSmrg 		   GENERAL_REGS, GET_MODE (x), VOIDmode, 0, 0,
202763d1a8abSmrg 		   opnum, (enum reload_type) type);
202863d1a8abSmrg       any_change = true;
202963d1a8abSmrg     }
203063d1a8abSmrg 
203163d1a8abSmrg   return any_change ? x : NULL_RTX;
203263d1a8abSmrg }
203363d1a8abSmrg 
203463d1a8abSmrg /* Implement TARGET_LEGITIMATE_CONSTANT_P.  Returns TRUE if X is a valid
203563d1a8abSmrg    constant.  Note that some "constants" aren't valid, such as TLS
203663d1a8abSmrg    symbols and unconverted GOT-based references, so we eliminate
203763d1a8abSmrg    those here.  */
203863d1a8abSmrg 
203963d1a8abSmrg static bool
mn10300_legitimate_constant_p(machine_mode mode ATTRIBUTE_UNUSED,rtx x)204063d1a8abSmrg mn10300_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
204163d1a8abSmrg {
204263d1a8abSmrg   switch (GET_CODE (x))
204363d1a8abSmrg     {
204463d1a8abSmrg     case CONST:
204563d1a8abSmrg       x = XEXP (x, 0);
204663d1a8abSmrg 
204763d1a8abSmrg       if (GET_CODE (x) == PLUS)
204863d1a8abSmrg 	{
204963d1a8abSmrg 	  if (! CONST_INT_P (XEXP (x, 1)))
205063d1a8abSmrg 	    return false;
205163d1a8abSmrg 	  x = XEXP (x, 0);
205263d1a8abSmrg 	}
205363d1a8abSmrg 
205463d1a8abSmrg       /* Only some unspecs are valid as "constants".  */
205563d1a8abSmrg       if (GET_CODE (x) == UNSPEC)
205663d1a8abSmrg 	{
205763d1a8abSmrg 	  switch (XINT (x, 1))
205863d1a8abSmrg 	    {
205963d1a8abSmrg 	    case UNSPEC_PIC:
206063d1a8abSmrg 	    case UNSPEC_GOT:
206163d1a8abSmrg 	    case UNSPEC_GOTOFF:
206263d1a8abSmrg 	    case UNSPEC_PLT:
206363d1a8abSmrg 	      return true;
206463d1a8abSmrg 	    default:
206563d1a8abSmrg 	      return false;
206663d1a8abSmrg 	    }
206763d1a8abSmrg 	}
206863d1a8abSmrg 
206963d1a8abSmrg       /* We must have drilled down to a symbol.  */
207063d1a8abSmrg       if (! mn10300_symbolic_operand (x, Pmode))
207163d1a8abSmrg 	return false;
207263d1a8abSmrg       break;
207363d1a8abSmrg 
207463d1a8abSmrg     default:
207563d1a8abSmrg       break;
207663d1a8abSmrg     }
207763d1a8abSmrg 
207863d1a8abSmrg   return true;
207963d1a8abSmrg }
208063d1a8abSmrg 
208163d1a8abSmrg /* Undo pic address legitimization for the benefit of debug info.  */
208263d1a8abSmrg 
208363d1a8abSmrg static rtx
mn10300_delegitimize_address(rtx orig_x)208463d1a8abSmrg mn10300_delegitimize_address (rtx orig_x)
208563d1a8abSmrg {
208663d1a8abSmrg   rtx x = orig_x, ret, addend = NULL;
208763d1a8abSmrg   bool need_mem;
208863d1a8abSmrg 
208963d1a8abSmrg   if (MEM_P (x))
209063d1a8abSmrg     x = XEXP (x, 0);
209163d1a8abSmrg   if (GET_CODE (x) != PLUS || GET_MODE (x) != Pmode)
209263d1a8abSmrg     return orig_x;
209363d1a8abSmrg 
209463d1a8abSmrg   if (XEXP (x, 0) == pic_offset_table_rtx)
209563d1a8abSmrg     ;
209663d1a8abSmrg   /* With the REG+REG addressing of AM33, var-tracking can re-assemble
209763d1a8abSmrg      some odd-looking "addresses" that were never valid in the first place.
209863d1a8abSmrg      We need to look harder to avoid warnings being emitted.  */
209963d1a8abSmrg   else if (GET_CODE (XEXP (x, 0)) == PLUS)
210063d1a8abSmrg     {
210163d1a8abSmrg       rtx x0 = XEXP (x, 0);
210263d1a8abSmrg       rtx x00 = XEXP (x0, 0);
210363d1a8abSmrg       rtx x01 = XEXP (x0, 1);
210463d1a8abSmrg 
210563d1a8abSmrg       if (x00 == pic_offset_table_rtx)
210663d1a8abSmrg 	addend = x01;
210763d1a8abSmrg       else if (x01 == pic_offset_table_rtx)
210863d1a8abSmrg 	addend = x00;
210963d1a8abSmrg       else
211063d1a8abSmrg 	return orig_x;
211163d1a8abSmrg 
211263d1a8abSmrg     }
211363d1a8abSmrg   else
211463d1a8abSmrg     return orig_x;
211563d1a8abSmrg   x = XEXP (x, 1);
211663d1a8abSmrg 
211763d1a8abSmrg   if (GET_CODE (x) != CONST)
211863d1a8abSmrg     return orig_x;
211963d1a8abSmrg   x = XEXP (x, 0);
212063d1a8abSmrg   if (GET_CODE (x) != UNSPEC)
212163d1a8abSmrg     return orig_x;
212263d1a8abSmrg 
212363d1a8abSmrg   ret = XVECEXP (x, 0, 0);
212463d1a8abSmrg   if (XINT (x, 1) == UNSPEC_GOTOFF)
212563d1a8abSmrg     need_mem = false;
212663d1a8abSmrg   else if (XINT (x, 1) == UNSPEC_GOT)
212763d1a8abSmrg     need_mem = true;
212863d1a8abSmrg   else
212963d1a8abSmrg     return orig_x;
213063d1a8abSmrg 
213163d1a8abSmrg   gcc_assert (GET_CODE (ret) == SYMBOL_REF);
213263d1a8abSmrg   if (need_mem != MEM_P (orig_x))
213363d1a8abSmrg     return orig_x;
213463d1a8abSmrg   if (need_mem && addend)
213563d1a8abSmrg     return orig_x;
213663d1a8abSmrg   if (addend)
213763d1a8abSmrg     ret = gen_rtx_PLUS (Pmode, addend, ret);
213863d1a8abSmrg   return ret;
213963d1a8abSmrg }
214063d1a8abSmrg 
214163d1a8abSmrg /* For addresses, costs are relative to "MOV (Rm),Rn".  For AM33 this is
214263d1a8abSmrg    the 3-byte fully general instruction; for MN103 this is the 2-byte form
214363d1a8abSmrg    with an address register.  */
214463d1a8abSmrg 
214563d1a8abSmrg static int
mn10300_address_cost(rtx x,machine_mode mode ATTRIBUTE_UNUSED,addr_space_t as ATTRIBUTE_UNUSED,bool speed)214663d1a8abSmrg mn10300_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
214763d1a8abSmrg 		      addr_space_t as ATTRIBUTE_UNUSED, bool speed)
214863d1a8abSmrg {
214963d1a8abSmrg   HOST_WIDE_INT i;
215063d1a8abSmrg   rtx base, index;
215163d1a8abSmrg 
215263d1a8abSmrg   switch (GET_CODE (x))
215363d1a8abSmrg     {
215463d1a8abSmrg     case CONST:
215563d1a8abSmrg     case SYMBOL_REF:
215663d1a8abSmrg     case LABEL_REF:
215763d1a8abSmrg       /* We assume all of these require a 32-bit constant, even though
215863d1a8abSmrg 	 some symbol and label references can be relaxed.  */
215963d1a8abSmrg       return speed ? 1 : 4;
216063d1a8abSmrg 
216163d1a8abSmrg     case REG:
216263d1a8abSmrg     case SUBREG:
216363d1a8abSmrg     case POST_INC:
216463d1a8abSmrg       return 0;
216563d1a8abSmrg 
216663d1a8abSmrg     case POST_MODIFY:
216763d1a8abSmrg       /* Assume any symbolic offset is a 32-bit constant.  */
216863d1a8abSmrg       i = (CONST_INT_P (XEXP (x, 1)) ? INTVAL (XEXP (x, 1)) : 0x12345678);
216963d1a8abSmrg       if (IN_RANGE (i, -128, 127))
217063d1a8abSmrg 	return speed ? 0 : 1;
217163d1a8abSmrg       if (speed)
217263d1a8abSmrg 	return 1;
217363d1a8abSmrg       if (IN_RANGE (i, -0x800000, 0x7fffff))
217463d1a8abSmrg 	return 3;
217563d1a8abSmrg       return 4;
217663d1a8abSmrg 
217763d1a8abSmrg     case PLUS:
217863d1a8abSmrg       base = XEXP (x, 0);
217963d1a8abSmrg       index = XEXP (x, 1);
218063d1a8abSmrg       if (register_operand (index, SImode))
218163d1a8abSmrg 	{
218263d1a8abSmrg 	  /* Attempt to minimize the number of registers in the address.
218363d1a8abSmrg 	     This is similar to what other ports do.  */
218463d1a8abSmrg 	  if (register_operand (base, SImode))
218563d1a8abSmrg 	    return 1;
218663d1a8abSmrg 
218763d1a8abSmrg 	  base = XEXP (x, 1);
218863d1a8abSmrg 	  index = XEXP (x, 0);
218963d1a8abSmrg 	}
219063d1a8abSmrg 
219163d1a8abSmrg       /* Assume any symbolic offset is a 32-bit constant.  */
219263d1a8abSmrg       i = (CONST_INT_P (XEXP (x, 1)) ? INTVAL (XEXP (x, 1)) : 0x12345678);
219363d1a8abSmrg       if (IN_RANGE (i, -128, 127))
219463d1a8abSmrg 	return speed ? 0 : 1;
219563d1a8abSmrg       if (IN_RANGE (i, -32768, 32767))
219663d1a8abSmrg 	return speed ? 0 : 2;
219763d1a8abSmrg       return speed ? 2 : 6;
219863d1a8abSmrg 
219963d1a8abSmrg     default:
220063d1a8abSmrg       return rtx_cost (x, Pmode, MEM, 0, speed);
220163d1a8abSmrg     }
220263d1a8abSmrg }
220363d1a8abSmrg 
220463d1a8abSmrg /* Implement the TARGET_REGISTER_MOVE_COST hook.
220563d1a8abSmrg 
220663d1a8abSmrg    Recall that the base value of 2 is required by assumptions elsewhere
220763d1a8abSmrg    in the body of the compiler, and that cost 2 is special-cased as an
220863d1a8abSmrg    early exit from reload meaning no work is required.  */
220963d1a8abSmrg 
221063d1a8abSmrg static int
mn10300_register_move_cost(machine_mode mode ATTRIBUTE_UNUSED,reg_class_t ifrom,reg_class_t ito)221163d1a8abSmrg mn10300_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
221263d1a8abSmrg 			    reg_class_t ifrom, reg_class_t ito)
221363d1a8abSmrg {
221463d1a8abSmrg   enum reg_class from = (enum reg_class) ifrom;
221563d1a8abSmrg   enum reg_class to = (enum reg_class) ito;
221663d1a8abSmrg   enum reg_class scratch, test;
221763d1a8abSmrg 
221863d1a8abSmrg   /* Simplify the following code by unifying the fp register classes.  */
221963d1a8abSmrg   if (to == FP_ACC_REGS)
222063d1a8abSmrg     to = FP_REGS;
222163d1a8abSmrg   if (from == FP_ACC_REGS)
222263d1a8abSmrg     from = FP_REGS;
222363d1a8abSmrg 
222463d1a8abSmrg   /* Diagnose invalid moves by costing them as two moves.  */
222563d1a8abSmrg 
222663d1a8abSmrg   scratch = NO_REGS;
222763d1a8abSmrg   test = from;
222863d1a8abSmrg   if (to == SP_REGS)
222963d1a8abSmrg     scratch = (TARGET_AM33 ? GENERAL_REGS : ADDRESS_REGS);
223063d1a8abSmrg   else if (to == MDR_REGS)
223163d1a8abSmrg     scratch = DATA_REGS;
223263d1a8abSmrg   else if (to == FP_REGS && to != from)
223363d1a8abSmrg     scratch = GENERAL_REGS;
223463d1a8abSmrg   else
223563d1a8abSmrg     {
223663d1a8abSmrg       test = to;
223763d1a8abSmrg       if (from == SP_REGS)
223863d1a8abSmrg 	scratch = (TARGET_AM33 ? GENERAL_REGS : ADDRESS_REGS);
223963d1a8abSmrg       else if (from == MDR_REGS)
224063d1a8abSmrg 	scratch = DATA_REGS;
224163d1a8abSmrg       else if (from == FP_REGS && to != from)
224263d1a8abSmrg 	scratch = GENERAL_REGS;
224363d1a8abSmrg     }
224463d1a8abSmrg   if (scratch != NO_REGS && !reg_class_subset_p (test, scratch))
224563d1a8abSmrg     return (mn10300_register_move_cost (VOIDmode, from, scratch)
224663d1a8abSmrg 	    + mn10300_register_move_cost (VOIDmode, scratch, to));
224763d1a8abSmrg 
224863d1a8abSmrg   /* From here on, all we need consider are legal combinations.  */
224963d1a8abSmrg 
225063d1a8abSmrg   if (optimize_size)
225163d1a8abSmrg     {
225263d1a8abSmrg       /* The scale here is bytes * 2.  */
225363d1a8abSmrg 
225463d1a8abSmrg       if (from == to && (to == ADDRESS_REGS || to == DATA_REGS))
225563d1a8abSmrg 	return 2;
225663d1a8abSmrg 
225763d1a8abSmrg       if (from == SP_REGS)
225863d1a8abSmrg 	return (to == ADDRESS_REGS ? 2 : 6);
225963d1a8abSmrg 
226063d1a8abSmrg       /* For MN103, all remaining legal moves are two bytes.  */
226163d1a8abSmrg       if (TARGET_AM33)
226263d1a8abSmrg 	return 4;
226363d1a8abSmrg 
226463d1a8abSmrg       if (to == SP_REGS)
226563d1a8abSmrg 	return (from == ADDRESS_REGS ? 4 : 6);
226663d1a8abSmrg 
226763d1a8abSmrg       if ((from == ADDRESS_REGS || from == DATA_REGS)
226863d1a8abSmrg 	   && (to == ADDRESS_REGS || to == DATA_REGS))
226963d1a8abSmrg 	return 4;
227063d1a8abSmrg 
227163d1a8abSmrg       if (to == EXTENDED_REGS)
227263d1a8abSmrg 	return (to == from ? 6 : 4);
227363d1a8abSmrg 
227463d1a8abSmrg       /* What's left are SP_REGS, FP_REGS, or combinations of the above.  */
227563d1a8abSmrg       return 6;
227663d1a8abSmrg     }
227763d1a8abSmrg   else
227863d1a8abSmrg     {
227963d1a8abSmrg       /* The scale here is cycles * 2.  */
228063d1a8abSmrg 
228163d1a8abSmrg       if (to == FP_REGS)
228263d1a8abSmrg 	return 8;
228363d1a8abSmrg       if (from == FP_REGS)
228463d1a8abSmrg 	return 4;
228563d1a8abSmrg 
228663d1a8abSmrg       /* All legal moves between integral registers are single cycle.  */
228763d1a8abSmrg       return 2;
228863d1a8abSmrg     }
228963d1a8abSmrg }
229063d1a8abSmrg 
229163d1a8abSmrg /* Implement the TARGET_MEMORY_MOVE_COST hook.
229263d1a8abSmrg 
229363d1a8abSmrg    Given lack of the form of the address, this must be speed-relative,
229463d1a8abSmrg    though we should never be less expensive than a size-relative register
229563d1a8abSmrg    move cost above.  This is not a problem.  */
229663d1a8abSmrg 
229763d1a8abSmrg static int
mn10300_memory_move_cost(machine_mode mode ATTRIBUTE_UNUSED,reg_class_t iclass,bool in ATTRIBUTE_UNUSED)229863d1a8abSmrg mn10300_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
229963d1a8abSmrg 			  reg_class_t iclass, bool in ATTRIBUTE_UNUSED)
230063d1a8abSmrg {
230163d1a8abSmrg   enum reg_class rclass = (enum reg_class) iclass;
230263d1a8abSmrg 
230363d1a8abSmrg   if (rclass == FP_REGS)
230463d1a8abSmrg     return 8;
230563d1a8abSmrg   return 6;
230663d1a8abSmrg }
230763d1a8abSmrg 
230863d1a8abSmrg /* Implement the TARGET_RTX_COSTS hook.
230963d1a8abSmrg 
231063d1a8abSmrg    Speed-relative costs are relative to COSTS_N_INSNS, which is intended
231163d1a8abSmrg    to represent cycles.  Size-relative costs are in bytes.  */
231263d1a8abSmrg 
231363d1a8abSmrg static bool
mn10300_rtx_costs(rtx x,machine_mode mode,int outer_code,int opno ATTRIBUTE_UNUSED,int * ptotal,bool speed)231463d1a8abSmrg mn10300_rtx_costs (rtx x, machine_mode mode, int outer_code,
231563d1a8abSmrg 		   int opno ATTRIBUTE_UNUSED, int *ptotal, bool speed)
231663d1a8abSmrg {
231763d1a8abSmrg   /* This value is used for SYMBOL_REF etc where we want to pretend
231863d1a8abSmrg      we have a full 32-bit constant.  */
231963d1a8abSmrg   HOST_WIDE_INT i = 0x12345678;
232063d1a8abSmrg   int total;
232163d1a8abSmrg   int code = GET_CODE (x);
232263d1a8abSmrg 
232363d1a8abSmrg   switch (code)
232463d1a8abSmrg     {
232563d1a8abSmrg     case CONST_INT:
232663d1a8abSmrg       i = INTVAL (x);
232763d1a8abSmrg     do_int_costs:
232863d1a8abSmrg       if (speed)
232963d1a8abSmrg 	{
233063d1a8abSmrg 	  if (outer_code == SET)
233163d1a8abSmrg 	    {
233263d1a8abSmrg 	      /* 16-bit integer loads have latency 1, 32-bit loads 2.  */
233363d1a8abSmrg 	      if (IN_RANGE (i, -32768, 32767))
233463d1a8abSmrg 		total = COSTS_N_INSNS (1);
233563d1a8abSmrg 	      else
233663d1a8abSmrg 		total = COSTS_N_INSNS (2);
233763d1a8abSmrg 	    }
233863d1a8abSmrg 	  else
233963d1a8abSmrg 	    {
234063d1a8abSmrg 	      /* 16-bit integer operands don't affect latency;
234163d1a8abSmrg 		 24-bit and 32-bit operands add a cycle.  */
234263d1a8abSmrg 	      if (IN_RANGE (i, -32768, 32767))
234363d1a8abSmrg 		total = 0;
234463d1a8abSmrg 	      else
234563d1a8abSmrg 		total = COSTS_N_INSNS (1);
234663d1a8abSmrg 	    }
234763d1a8abSmrg 	}
234863d1a8abSmrg       else
234963d1a8abSmrg 	{
235063d1a8abSmrg 	  if (outer_code == SET)
235163d1a8abSmrg 	    {
235263d1a8abSmrg 	      if (i == 0)
235363d1a8abSmrg 		total = 1;
235463d1a8abSmrg 	      else if (IN_RANGE (i, -128, 127))
235563d1a8abSmrg 		total = 2;
235663d1a8abSmrg 	      else if (IN_RANGE (i, -32768, 32767))
235763d1a8abSmrg 		total = 3;
235863d1a8abSmrg 	      else
235963d1a8abSmrg 		total = 6;
236063d1a8abSmrg 	    }
236163d1a8abSmrg 	  else
236263d1a8abSmrg 	    {
236363d1a8abSmrg 	      /* Reference here is ADD An,Dn, vs ADD imm,Dn.  */
236463d1a8abSmrg 	      if (IN_RANGE (i, -128, 127))
236563d1a8abSmrg 		total = 0;
236663d1a8abSmrg 	      else if (IN_RANGE (i, -32768, 32767))
236763d1a8abSmrg 		total = 2;
236863d1a8abSmrg 	      else if (TARGET_AM33 && IN_RANGE (i, -0x01000000, 0x00ffffff))
236963d1a8abSmrg 		total = 3;
237063d1a8abSmrg 	      else
237163d1a8abSmrg 		total = 4;
237263d1a8abSmrg 	    }
237363d1a8abSmrg 	}
237463d1a8abSmrg       goto alldone;
237563d1a8abSmrg 
237663d1a8abSmrg     case CONST:
237763d1a8abSmrg     case LABEL_REF:
237863d1a8abSmrg     case SYMBOL_REF:
237963d1a8abSmrg     case CONST_DOUBLE:
238063d1a8abSmrg       /* We assume all of these require a 32-bit constant, even though
238163d1a8abSmrg 	 some symbol and label references can be relaxed.  */
238263d1a8abSmrg       goto do_int_costs;
238363d1a8abSmrg 
238463d1a8abSmrg     case UNSPEC:
238563d1a8abSmrg       switch (XINT (x, 1))
238663d1a8abSmrg 	{
238763d1a8abSmrg 	case UNSPEC_PIC:
238863d1a8abSmrg 	case UNSPEC_GOT:
238963d1a8abSmrg 	case UNSPEC_GOTOFF:
239063d1a8abSmrg 	case UNSPEC_PLT:
239163d1a8abSmrg 	case UNSPEC_GOTSYM_OFF:
239263d1a8abSmrg 	  /* The PIC unspecs also resolve to a 32-bit constant.  */
239363d1a8abSmrg 	  goto do_int_costs;
239463d1a8abSmrg 
239563d1a8abSmrg 	default:
239663d1a8abSmrg 	  /* Assume any non-listed unspec is some sort of arithmetic.  */
239763d1a8abSmrg 	  goto do_arith_costs;
239863d1a8abSmrg 	}
239963d1a8abSmrg 
240063d1a8abSmrg     case PLUS:
240163d1a8abSmrg       /* Notice the size difference of INC and INC4.  */
240263d1a8abSmrg       if (!speed && outer_code == SET && CONST_INT_P (XEXP (x, 1)))
240363d1a8abSmrg 	{
240463d1a8abSmrg 	  i = INTVAL (XEXP (x, 1));
240563d1a8abSmrg 	  if (i == 1 || i == 4)
240663d1a8abSmrg 	    {
240763d1a8abSmrg 	      total = 1 + rtx_cost (XEXP (x, 0), mode, PLUS, 0, speed);
240863d1a8abSmrg 	      goto alldone;
240963d1a8abSmrg 	    }
241063d1a8abSmrg 	}
241163d1a8abSmrg       goto do_arith_costs;
241263d1a8abSmrg 
241363d1a8abSmrg     case MINUS:
241463d1a8abSmrg     case AND:
241563d1a8abSmrg     case IOR:
241663d1a8abSmrg     case XOR:
241763d1a8abSmrg     case NOT:
241863d1a8abSmrg     case NEG:
241963d1a8abSmrg     case ZERO_EXTEND:
242063d1a8abSmrg     case SIGN_EXTEND:
242163d1a8abSmrg     case COMPARE:
242263d1a8abSmrg     case BSWAP:
242363d1a8abSmrg     case CLZ:
242463d1a8abSmrg     do_arith_costs:
242563d1a8abSmrg       total = (speed ? COSTS_N_INSNS (1) : 2);
242663d1a8abSmrg       break;
242763d1a8abSmrg 
242863d1a8abSmrg     case ASHIFT:
242963d1a8abSmrg       /* Notice the size difference of ASL2 and variants.  */
243063d1a8abSmrg       if (!speed && CONST_INT_P (XEXP (x, 1)))
243163d1a8abSmrg 	switch (INTVAL (XEXP (x, 1)))
243263d1a8abSmrg 	  {
243363d1a8abSmrg 	  case 1:
243463d1a8abSmrg 	  case 2:
243563d1a8abSmrg 	    total = 1;
243663d1a8abSmrg 	    goto alldone;
243763d1a8abSmrg 	  case 3:
243863d1a8abSmrg 	  case 4:
243963d1a8abSmrg 	    total = 2;
244063d1a8abSmrg 	    goto alldone;
244163d1a8abSmrg 	  }
244263d1a8abSmrg       /* FALLTHRU */
244363d1a8abSmrg 
244463d1a8abSmrg     case ASHIFTRT:
244563d1a8abSmrg     case LSHIFTRT:
244663d1a8abSmrg       total = (speed ? COSTS_N_INSNS (1) : 3);
244763d1a8abSmrg       goto alldone;
244863d1a8abSmrg 
244963d1a8abSmrg     case MULT:
245063d1a8abSmrg       total = (speed ? COSTS_N_INSNS (3) : 2);
245163d1a8abSmrg       break;
245263d1a8abSmrg 
245363d1a8abSmrg     case DIV:
245463d1a8abSmrg     case UDIV:
245563d1a8abSmrg     case MOD:
245663d1a8abSmrg     case UMOD:
245763d1a8abSmrg       total = (speed ? COSTS_N_INSNS (39)
245863d1a8abSmrg 		/* Include space to load+retrieve MDR.  */
245963d1a8abSmrg 		: code == MOD || code == UMOD ? 6 : 4);
246063d1a8abSmrg       break;
246163d1a8abSmrg 
246263d1a8abSmrg     case MEM:
246363d1a8abSmrg       total = mn10300_address_cost (XEXP (x, 0), mode,
246463d1a8abSmrg 				    MEM_ADDR_SPACE (x), speed);
246563d1a8abSmrg       if (speed)
246663d1a8abSmrg 	total = COSTS_N_INSNS (2 + total);
246763d1a8abSmrg       goto alldone;
246863d1a8abSmrg 
246963d1a8abSmrg     default:
247063d1a8abSmrg       /* Probably not implemented.  Assume external call.  */
247163d1a8abSmrg       total = (speed ? COSTS_N_INSNS (10) : 7);
247263d1a8abSmrg       break;
247363d1a8abSmrg     }
247463d1a8abSmrg 
247563d1a8abSmrg   *ptotal = total;
247663d1a8abSmrg   return false;
247763d1a8abSmrg 
247863d1a8abSmrg  alldone:
247963d1a8abSmrg   *ptotal = total;
248063d1a8abSmrg   return true;
248163d1a8abSmrg }
248263d1a8abSmrg 
248363d1a8abSmrg /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
248463d1a8abSmrg    may access it using GOTOFF instead of GOT.  */
248563d1a8abSmrg 
248663d1a8abSmrg static void
mn10300_encode_section_info(tree decl,rtx rtl,int first)248763d1a8abSmrg mn10300_encode_section_info (tree decl, rtx rtl, int first)
248863d1a8abSmrg {
248963d1a8abSmrg   rtx symbol;
249063d1a8abSmrg 
249163d1a8abSmrg   default_encode_section_info (decl, rtl, first);
249263d1a8abSmrg 
249363d1a8abSmrg   if (! MEM_P (rtl))
249463d1a8abSmrg     return;
249563d1a8abSmrg 
249663d1a8abSmrg   symbol = XEXP (rtl, 0);
249763d1a8abSmrg   if (GET_CODE (symbol) != SYMBOL_REF)
249863d1a8abSmrg     return;
249963d1a8abSmrg 
250063d1a8abSmrg   if (flag_pic)
250163d1a8abSmrg     SYMBOL_REF_FLAG (symbol) = (*targetm.binds_local_p) (decl);
250263d1a8abSmrg }
250363d1a8abSmrg 
250463d1a8abSmrg /* Dispatch tables on the mn10300 are extremely expensive in terms of code
250563d1a8abSmrg    and readonly data size.  So we crank up the case threshold value to
250663d1a8abSmrg    encourage a series of if/else comparisons to implement many small switch
250763d1a8abSmrg    statements.  In theory, this value could be increased much more if we
250863d1a8abSmrg    were solely optimizing for space, but we keep it "reasonable" to avoid
250963d1a8abSmrg    serious code efficiency lossage.  */
251063d1a8abSmrg 
251163d1a8abSmrg static unsigned int
mn10300_case_values_threshold(void)251263d1a8abSmrg mn10300_case_values_threshold (void)
251363d1a8abSmrg {
251463d1a8abSmrg   return 6;
251563d1a8abSmrg }
251663d1a8abSmrg 
251763d1a8abSmrg /* Worker function for TARGET_TRAMPOLINE_INIT.  */
251863d1a8abSmrg 
251963d1a8abSmrg static void
mn10300_trampoline_init(rtx m_tramp,tree fndecl,rtx chain_value)252063d1a8abSmrg mn10300_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
252163d1a8abSmrg {
252263d1a8abSmrg   rtx mem, disp, fnaddr = XEXP (DECL_RTL (fndecl), 0);
252363d1a8abSmrg 
252463d1a8abSmrg   /* This is a strict alignment target, which means that we play
252563d1a8abSmrg      some games to make sure that the locations at which we need
252663d1a8abSmrg      to store <chain> and <disp> wind up at aligned addresses.
252763d1a8abSmrg 
252863d1a8abSmrg 	0x28 0x00			add 0,d0
252963d1a8abSmrg 	          0xfc 0xdd		mov chain,a1
253063d1a8abSmrg         <chain>
253163d1a8abSmrg 	0xf8 0xed 0x00			btst 0,d1
253263d1a8abSmrg 	               0xdc		jmp fnaddr
253363d1a8abSmrg 	<disp>
253463d1a8abSmrg 
253563d1a8abSmrg      Note that the two extra insns are effectively nops; they
253663d1a8abSmrg      clobber the flags but do not affect the contents of D0 or D1.  */
253763d1a8abSmrg 
253863d1a8abSmrg   disp = expand_binop (SImode, sub_optab, fnaddr,
253963d1a8abSmrg 		       plus_constant (Pmode, XEXP (m_tramp, 0), 11),
254063d1a8abSmrg 		       NULL_RTX, 1, OPTAB_DIRECT);
254163d1a8abSmrg 
254263d1a8abSmrg   mem = adjust_address (m_tramp, SImode, 0);
254363d1a8abSmrg   emit_move_insn (mem, gen_int_mode (0xddfc0028, SImode));
254463d1a8abSmrg   mem = adjust_address (m_tramp, SImode, 4);
254563d1a8abSmrg   emit_move_insn (mem, chain_value);
254663d1a8abSmrg   mem = adjust_address (m_tramp, SImode, 8);
254763d1a8abSmrg   emit_move_insn (mem, gen_int_mode (0xdc00edf8, SImode));
254863d1a8abSmrg   mem = adjust_address (m_tramp, SImode, 12);
254963d1a8abSmrg   emit_move_insn (mem, disp);
255063d1a8abSmrg }
255163d1a8abSmrg 
255263d1a8abSmrg /* Output the assembler code for a C++ thunk function.
255363d1a8abSmrg    THUNK_DECL is the declaration for the thunk function itself, FUNCTION
255463d1a8abSmrg    is the decl for the target function.  DELTA is an immediate constant
255563d1a8abSmrg    offset to be added to the THIS parameter.  If VCALL_OFFSET is nonzero
255663d1a8abSmrg    the word at the adjusted address *(*THIS' + VCALL_OFFSET) should be
255763d1a8abSmrg    additionally added to THIS.  Finally jump to the entry point of
255863d1a8abSmrg    FUNCTION.  */
255963d1a8abSmrg 
256063d1a8abSmrg static void
mn10300_asm_output_mi_thunk(FILE * file,tree thunk_fndecl ATTRIBUTE_UNUSED,HOST_WIDE_INT delta,HOST_WIDE_INT vcall_offset,tree function)256163d1a8abSmrg mn10300_asm_output_mi_thunk (FILE *        file,
256263d1a8abSmrg 			     tree          thunk_fndecl ATTRIBUTE_UNUSED,
256363d1a8abSmrg 			     HOST_WIDE_INT delta,
256463d1a8abSmrg 			     HOST_WIDE_INT vcall_offset,
256563d1a8abSmrg 			     tree          function)
256663d1a8abSmrg {
2567*ec02198aSmrg   const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
256863d1a8abSmrg   const char * _this;
256963d1a8abSmrg 
2570*ec02198aSmrg   assemble_start_function (thunk_fndecl, fnname);
257163d1a8abSmrg   /* Get the register holding the THIS parameter.  Handle the case
257263d1a8abSmrg      where there is a hidden first argument for a returned structure.  */
257363d1a8abSmrg   if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
257463d1a8abSmrg     _this = reg_names [FIRST_ARGUMENT_REGNUM + 1];
257563d1a8abSmrg   else
257663d1a8abSmrg     _this = reg_names [FIRST_ARGUMENT_REGNUM];
257763d1a8abSmrg 
257863d1a8abSmrg   fprintf (file, "\t%s Thunk Entry Point:\n", ASM_COMMENT_START);
257963d1a8abSmrg 
258063d1a8abSmrg   if (delta)
258163d1a8abSmrg     fprintf (file, "\tadd %d, %s\n", (int) delta, _this);
258263d1a8abSmrg 
258363d1a8abSmrg   if (vcall_offset)
258463d1a8abSmrg     {
258563d1a8abSmrg       const char * scratch = reg_names [FIRST_ADDRESS_REGNUM + 1];
258663d1a8abSmrg 
258763d1a8abSmrg       fprintf (file, "\tmov %s, %s\n", _this, scratch);
258863d1a8abSmrg       fprintf (file, "\tmov (%s), %s\n", scratch, scratch);
258963d1a8abSmrg       fprintf (file, "\tadd %d, %s\n", (int) vcall_offset, scratch);
259063d1a8abSmrg       fprintf (file, "\tmov (%s), %s\n", scratch, scratch);
259163d1a8abSmrg       fprintf (file, "\tadd %s, %s\n", scratch, _this);
259263d1a8abSmrg     }
259363d1a8abSmrg 
259463d1a8abSmrg   fputs ("\tjmp ", file);
259563d1a8abSmrg   assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
259663d1a8abSmrg   putc ('\n', file);
2597*ec02198aSmrg   assemble_end_function (thunk_fndecl, fnname);
259863d1a8abSmrg }
259963d1a8abSmrg 
260063d1a8abSmrg /* Return true if mn10300_output_mi_thunk would be able to output the
260163d1a8abSmrg    assembler code for the thunk function specified by the arguments
260263d1a8abSmrg    it is passed, and false otherwise.  */
260363d1a8abSmrg 
260463d1a8abSmrg static bool
mn10300_can_output_mi_thunk(const_tree thunk_fndecl ATTRIBUTE_UNUSED,HOST_WIDE_INT delta ATTRIBUTE_UNUSED,HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,const_tree function ATTRIBUTE_UNUSED)260563d1a8abSmrg mn10300_can_output_mi_thunk (const_tree    thunk_fndecl ATTRIBUTE_UNUSED,
260663d1a8abSmrg 			     HOST_WIDE_INT delta        ATTRIBUTE_UNUSED,
260763d1a8abSmrg 			     HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
260863d1a8abSmrg 			     const_tree    function     ATTRIBUTE_UNUSED)
260963d1a8abSmrg {
261063d1a8abSmrg   return true;
261163d1a8abSmrg }
261263d1a8abSmrg 
2613c7a68eb7Smrg /* Implement TARGET_HARD_REGNO_MODE_OK.  */
2614c7a68eb7Smrg 
2615c7a68eb7Smrg static bool
mn10300_hard_regno_mode_ok(unsigned int regno,machine_mode mode)261663d1a8abSmrg mn10300_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
261763d1a8abSmrg {
261863d1a8abSmrg   if (REGNO_REG_CLASS (regno) == FP_REGS
261963d1a8abSmrg       || REGNO_REG_CLASS (regno) == FP_ACC_REGS)
262063d1a8abSmrg     /* Do not store integer values in FP registers.  */
262163d1a8abSmrg     return GET_MODE_CLASS (mode) == MODE_FLOAT && ((regno & 1) == 0);
262263d1a8abSmrg 
262363d1a8abSmrg   if (! TARGET_AM33 && REGNO_REG_CLASS (regno) == EXTENDED_REGS)
262463d1a8abSmrg     return false;
262563d1a8abSmrg 
262663d1a8abSmrg   if (((regno) & 1) == 0 || GET_MODE_SIZE (mode) == 4)
262763d1a8abSmrg     return true;
262863d1a8abSmrg 
262963d1a8abSmrg   if (REGNO_REG_CLASS (regno) == DATA_REGS
263063d1a8abSmrg       || (TARGET_AM33 && REGNO_REG_CLASS (regno) == ADDRESS_REGS)
263163d1a8abSmrg       || REGNO_REG_CLASS (regno) == EXTENDED_REGS)
263263d1a8abSmrg     return GET_MODE_SIZE (mode) <= 4;
263363d1a8abSmrg 
263463d1a8abSmrg   return false;
263563d1a8abSmrg }
263663d1a8abSmrg 
2637c7a68eb7Smrg /* Implement TARGET_MODES_TIEABLE_P.  */
2638c7a68eb7Smrg 
2639c7a68eb7Smrg static bool
mn10300_modes_tieable_p(machine_mode mode1,machine_mode mode2)2640c7a68eb7Smrg mn10300_modes_tieable_p (machine_mode mode1, machine_mode mode2)
264163d1a8abSmrg {
264263d1a8abSmrg   if (GET_MODE_CLASS (mode1) == MODE_FLOAT
264363d1a8abSmrg       && GET_MODE_CLASS (mode2) != MODE_FLOAT)
264463d1a8abSmrg     return false;
264563d1a8abSmrg 
264663d1a8abSmrg   if (GET_MODE_CLASS (mode2) == MODE_FLOAT
264763d1a8abSmrg       && GET_MODE_CLASS (mode1) != MODE_FLOAT)
264863d1a8abSmrg     return false;
264963d1a8abSmrg 
265063d1a8abSmrg   if (TARGET_AM33
265163d1a8abSmrg       || mode1 == mode2
265263d1a8abSmrg       || (GET_MODE_SIZE (mode1) <= 4 && GET_MODE_SIZE (mode2) <= 4))
265363d1a8abSmrg     return true;
265463d1a8abSmrg 
265563d1a8abSmrg   return false;
265663d1a8abSmrg }
265763d1a8abSmrg 
265863d1a8abSmrg static int
cc_flags_for_mode(machine_mode mode)265963d1a8abSmrg cc_flags_for_mode (machine_mode mode)
266063d1a8abSmrg {
266163d1a8abSmrg   switch (mode)
266263d1a8abSmrg     {
2663c7a68eb7Smrg     case E_CCmode:
266463d1a8abSmrg       return CC_FLAG_Z | CC_FLAG_N | CC_FLAG_C | CC_FLAG_V;
2665c7a68eb7Smrg     case E_CCZNCmode:
266663d1a8abSmrg       return CC_FLAG_Z | CC_FLAG_N | CC_FLAG_C;
2667c7a68eb7Smrg     case E_CCZNmode:
266863d1a8abSmrg       return CC_FLAG_Z | CC_FLAG_N;
2669c7a68eb7Smrg     case E_CC_FLOATmode:
267063d1a8abSmrg       return -1;
267163d1a8abSmrg     default:
267263d1a8abSmrg       gcc_unreachable ();
267363d1a8abSmrg     }
267463d1a8abSmrg }
267563d1a8abSmrg 
267663d1a8abSmrg static int
cc_flags_for_code(enum rtx_code code)267763d1a8abSmrg cc_flags_for_code (enum rtx_code code)
267863d1a8abSmrg {
267963d1a8abSmrg   switch (code)
268063d1a8abSmrg     {
268163d1a8abSmrg     case EQ:	/* Z */
268263d1a8abSmrg     case NE:	/* ~Z */
268363d1a8abSmrg       return CC_FLAG_Z;
268463d1a8abSmrg 
268563d1a8abSmrg     case LT:	/* N */
268663d1a8abSmrg     case GE:	/* ~N */
268763d1a8abSmrg       return CC_FLAG_N;
268863d1a8abSmrg 
268963d1a8abSmrg     case GT:    /* ~(Z|(N^V)) */
269063d1a8abSmrg     case LE:    /* Z|(N^V) */
269163d1a8abSmrg       return CC_FLAG_Z | CC_FLAG_N | CC_FLAG_V;
269263d1a8abSmrg 
269363d1a8abSmrg     case GEU:	/* ~C */
269463d1a8abSmrg     case LTU:	/* C */
269563d1a8abSmrg       return CC_FLAG_C;
269663d1a8abSmrg 
269763d1a8abSmrg     case GTU:	/* ~(C | Z) */
269863d1a8abSmrg     case LEU:	/* C | Z */
269963d1a8abSmrg       return CC_FLAG_Z | CC_FLAG_C;
270063d1a8abSmrg 
270163d1a8abSmrg     case ORDERED:
270263d1a8abSmrg     case UNORDERED:
270363d1a8abSmrg     case LTGT:
270463d1a8abSmrg     case UNEQ:
270563d1a8abSmrg     case UNGE:
270663d1a8abSmrg     case UNGT:
270763d1a8abSmrg     case UNLE:
270863d1a8abSmrg     case UNLT:
270963d1a8abSmrg       return -1;
271063d1a8abSmrg 
271163d1a8abSmrg     default:
271263d1a8abSmrg       gcc_unreachable ();
271363d1a8abSmrg     }
271463d1a8abSmrg }
271563d1a8abSmrg 
271663d1a8abSmrg machine_mode
mn10300_select_cc_mode(enum rtx_code code,rtx x,rtx y ATTRIBUTE_UNUSED)271763d1a8abSmrg mn10300_select_cc_mode (enum rtx_code code, rtx x, rtx y ATTRIBUTE_UNUSED)
271863d1a8abSmrg {
271963d1a8abSmrg   int req;
272063d1a8abSmrg 
272163d1a8abSmrg   if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
272263d1a8abSmrg     return CC_FLOATmode;
272363d1a8abSmrg 
272463d1a8abSmrg   req = cc_flags_for_code (code);
272563d1a8abSmrg 
272663d1a8abSmrg   if (req & CC_FLAG_V)
272763d1a8abSmrg     return CCmode;
272863d1a8abSmrg   if (req & CC_FLAG_C)
272963d1a8abSmrg     return CCZNCmode;
273063d1a8abSmrg   return CCZNmode;
273163d1a8abSmrg }
273263d1a8abSmrg 
273363d1a8abSmrg static inline bool
set_is_load_p(rtx set)273463d1a8abSmrg set_is_load_p (rtx set)
273563d1a8abSmrg {
273663d1a8abSmrg   return MEM_P (SET_SRC (set));
273763d1a8abSmrg }
273863d1a8abSmrg 
273963d1a8abSmrg static inline bool
set_is_store_p(rtx set)274063d1a8abSmrg set_is_store_p (rtx set)
274163d1a8abSmrg {
274263d1a8abSmrg   return MEM_P (SET_DEST (set));
274363d1a8abSmrg }
274463d1a8abSmrg 
274563d1a8abSmrg /* Update scheduling costs for situations that cannot be
274663d1a8abSmrg    described using the attributes and DFA machinery.
274763d1a8abSmrg    DEP is the insn being scheduled.
274863d1a8abSmrg    INSN is the previous insn.
274963d1a8abSmrg    COST is the current cycle cost for DEP.  */
275063d1a8abSmrg 
275163d1a8abSmrg static int
mn10300_adjust_sched_cost(rtx_insn * insn,int dep_type,rtx_insn * dep,int cost,unsigned int)275263d1a8abSmrg mn10300_adjust_sched_cost (rtx_insn *insn, int dep_type, rtx_insn *dep,
275363d1a8abSmrg 			   int cost, unsigned int)
275463d1a8abSmrg {
275563d1a8abSmrg   rtx insn_set;
275663d1a8abSmrg   rtx dep_set;
275763d1a8abSmrg   int timings;
275863d1a8abSmrg 
275963d1a8abSmrg   if (!TARGET_AM33)
276063d1a8abSmrg     return 1;
276163d1a8abSmrg 
276263d1a8abSmrg   /* We are only interested in pairs of SET. */
276363d1a8abSmrg   insn_set = single_set (insn);
276463d1a8abSmrg   if (!insn_set)
276563d1a8abSmrg     return cost;
276663d1a8abSmrg 
276763d1a8abSmrg   dep_set = single_set (dep);
276863d1a8abSmrg   if (!dep_set)
276963d1a8abSmrg     return cost;
277063d1a8abSmrg 
277163d1a8abSmrg   /* For the AM34 a load instruction that follows a
277263d1a8abSmrg      store instruction incurs an extra cycle of delay.  */
277363d1a8abSmrg   if (mn10300_tune_cpu == PROCESSOR_AM34
277463d1a8abSmrg       && set_is_load_p (dep_set)
277563d1a8abSmrg       && set_is_store_p (insn_set))
277663d1a8abSmrg     cost += 1;
277763d1a8abSmrg 
277863d1a8abSmrg   /* For the AM34 a non-store, non-branch FPU insn that follows
277963d1a8abSmrg      another FPU insn incurs a one cycle throughput increase.  */
278063d1a8abSmrg   else if (mn10300_tune_cpu == PROCESSOR_AM34
278163d1a8abSmrg       && ! set_is_store_p (insn_set)
278263d1a8abSmrg       && ! JUMP_P (insn)
278363d1a8abSmrg       && GET_MODE_CLASS (GET_MODE (SET_SRC (dep_set))) == MODE_FLOAT
278463d1a8abSmrg       && GET_MODE_CLASS (GET_MODE (SET_SRC (insn_set))) == MODE_FLOAT)
278563d1a8abSmrg     cost += 1;
278663d1a8abSmrg 
278763d1a8abSmrg   /*  Resolve the conflict described in section 1-7-4 of
278863d1a8abSmrg       Chapter 3 of the MN103E Series Instruction Manual
278963d1a8abSmrg       where it says:
279063d1a8abSmrg 
279163d1a8abSmrg         "When the preceding instruction is a CPU load or
279263d1a8abSmrg 	 store instruction, a following FPU instruction
279363d1a8abSmrg 	 cannot be executed until the CPU completes the
279463d1a8abSmrg 	 latency period even though there are no register
279563d1a8abSmrg 	 or flag dependencies between them."  */
279663d1a8abSmrg 
279763d1a8abSmrg   /* Only the AM33-2 (and later) CPUs have FPU instructions.  */
279863d1a8abSmrg   if (! TARGET_AM33_2)
279963d1a8abSmrg     return cost;
280063d1a8abSmrg 
280163d1a8abSmrg   /* If a data dependence already exists then the cost is correct.  */
280263d1a8abSmrg   if (dep_type == 0)
280363d1a8abSmrg     return cost;
280463d1a8abSmrg 
280563d1a8abSmrg   /* Check that the instruction about to scheduled is an FPU instruction.  */
280663d1a8abSmrg   if (GET_MODE_CLASS (GET_MODE (SET_SRC (dep_set))) != MODE_FLOAT)
280763d1a8abSmrg     return cost;
280863d1a8abSmrg 
280963d1a8abSmrg   /* Now check to see if the previous instruction is a load or store.  */
281063d1a8abSmrg   if (! set_is_load_p (insn_set) && ! set_is_store_p (insn_set))
281163d1a8abSmrg     return cost;
281263d1a8abSmrg 
281363d1a8abSmrg   /* XXX: Verify: The text of 1-7-4 implies that the restriction
281463d1a8abSmrg      only applies when an INTEGER load/store precedes an FPU
281563d1a8abSmrg      instruction, but is this true ?  For now we assume that it is.  */
281663d1a8abSmrg   if (GET_MODE_CLASS (GET_MODE (SET_SRC (insn_set))) != MODE_INT)
281763d1a8abSmrg     return cost;
281863d1a8abSmrg 
281963d1a8abSmrg   /* Extract the latency value from the timings attribute.  */
282063d1a8abSmrg   timings = get_attr_timings (insn);
282163d1a8abSmrg   return timings < 100 ? (timings % 10) : (timings % 100);
282263d1a8abSmrg }
282363d1a8abSmrg 
282463d1a8abSmrg static void
mn10300_conditional_register_usage(void)282563d1a8abSmrg mn10300_conditional_register_usage (void)
282663d1a8abSmrg {
282763d1a8abSmrg   unsigned int i;
282863d1a8abSmrg 
282963d1a8abSmrg   if (!TARGET_AM33)
283063d1a8abSmrg     {
283163d1a8abSmrg       for (i = FIRST_EXTENDED_REGNUM;
283263d1a8abSmrg 	   i <= LAST_EXTENDED_REGNUM; i++)
2833*ec02198aSmrg 	fixed_regs[i] = 1;
283463d1a8abSmrg     }
283563d1a8abSmrg   if (!TARGET_AM33_2)
283663d1a8abSmrg     {
283763d1a8abSmrg       for (i = FIRST_FP_REGNUM;
283863d1a8abSmrg 	   i <= LAST_FP_REGNUM; i++)
2839*ec02198aSmrg 	fixed_regs[i] = 1;
284063d1a8abSmrg     }
284163d1a8abSmrg   if (flag_pic)
2842*ec02198aSmrg     fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
284363d1a8abSmrg }
284463d1a8abSmrg 
284563d1a8abSmrg /* Worker function for TARGET_MD_ASM_ADJUST.
284663d1a8abSmrg    We do this in the mn10300 backend to maintain source compatibility
284763d1a8abSmrg    with the old cc0-based compiler.  */
284863d1a8abSmrg 
284963d1a8abSmrg static rtx_insn *
mn10300_md_asm_adjust(vec<rtx> &,vec<rtx> &,vec<const char * > &,vec<rtx> & clobbers,HARD_REG_SET & clobbered_regs)285063d1a8abSmrg mn10300_md_asm_adjust (vec<rtx> &/*outputs*/, vec<rtx> &/*inputs*/,
285163d1a8abSmrg 		       vec<const char *> &/*constraints*/,
285263d1a8abSmrg 		       vec<rtx> &clobbers, HARD_REG_SET &clobbered_regs)
285363d1a8abSmrg {
285463d1a8abSmrg   clobbers.safe_push (gen_rtx_REG (CCmode, CC_REG));
285563d1a8abSmrg   SET_HARD_REG_BIT (clobbered_regs, CC_REG);
285663d1a8abSmrg   return NULL;
285763d1a8abSmrg }
285863d1a8abSmrg 
285963d1a8abSmrg /* A helper function for splitting cbranch patterns after reload.  */
286063d1a8abSmrg 
286163d1a8abSmrg void
mn10300_split_cbranch(machine_mode cmp_mode,rtx cmp_op,rtx label_ref)286263d1a8abSmrg mn10300_split_cbranch (machine_mode cmp_mode, rtx cmp_op, rtx label_ref)
286363d1a8abSmrg {
286463d1a8abSmrg   rtx flags, x;
286563d1a8abSmrg 
286663d1a8abSmrg   flags = gen_rtx_REG (cmp_mode, CC_REG);
286763d1a8abSmrg   x = gen_rtx_COMPARE (cmp_mode, XEXP (cmp_op, 0), XEXP (cmp_op, 1));
286863d1a8abSmrg   x = gen_rtx_SET (flags, x);
286963d1a8abSmrg   emit_insn (x);
287063d1a8abSmrg 
287163d1a8abSmrg   x = gen_rtx_fmt_ee (GET_CODE (cmp_op), VOIDmode, flags, const0_rtx);
287263d1a8abSmrg   x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label_ref, pc_rtx);
287363d1a8abSmrg   x = gen_rtx_SET (pc_rtx, x);
287463d1a8abSmrg   emit_jump_insn (x);
287563d1a8abSmrg }
287663d1a8abSmrg 
287763d1a8abSmrg /* A helper function for matching parallels that set the flags.  */
287863d1a8abSmrg 
287963d1a8abSmrg bool
mn10300_match_ccmode(rtx insn,machine_mode cc_mode)288063d1a8abSmrg mn10300_match_ccmode (rtx insn, machine_mode cc_mode)
288163d1a8abSmrg {
288263d1a8abSmrg   rtx op1, flags;
288363d1a8abSmrg   machine_mode flags_mode;
288463d1a8abSmrg 
288563d1a8abSmrg   gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
288663d1a8abSmrg 
2887c7a68eb7Smrg   op1 = XVECEXP (PATTERN (insn), 0, 0);
288863d1a8abSmrg   gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
288963d1a8abSmrg 
289063d1a8abSmrg   flags = SET_DEST (op1);
289163d1a8abSmrg   flags_mode = GET_MODE (flags);
289263d1a8abSmrg 
289363d1a8abSmrg   if (GET_MODE (SET_SRC (op1)) != flags_mode)
289463d1a8abSmrg     return false;
289563d1a8abSmrg   if (GET_MODE_CLASS (flags_mode) != MODE_CC)
289663d1a8abSmrg     return false;
289763d1a8abSmrg 
289863d1a8abSmrg   /* Ensure that the mode of FLAGS is compatible with CC_MODE.  */
289963d1a8abSmrg   if (cc_flags_for_mode (flags_mode) & ~cc_flags_for_mode (cc_mode))
290063d1a8abSmrg     return false;
290163d1a8abSmrg 
290263d1a8abSmrg   return true;
290363d1a8abSmrg }
290463d1a8abSmrg 
290563d1a8abSmrg /* This function is used to help split:
290663d1a8abSmrg 
290763d1a8abSmrg      (set (reg) (and (reg) (int)))
290863d1a8abSmrg 
290963d1a8abSmrg    into:
291063d1a8abSmrg 
291163d1a8abSmrg      (set (reg) (shift (reg) (int))
291263d1a8abSmrg      (set (reg) (shift (reg) (int))
291363d1a8abSmrg 
291463d1a8abSmrg    where the shitfs will be shorter than the "and" insn.
291563d1a8abSmrg 
291663d1a8abSmrg    It returns the number of bits that should be shifted.  A positive
291763d1a8abSmrg    values means that the low bits are to be cleared (and hence the
291863d1a8abSmrg    shifts should be right followed by left) whereas a negative value
291963d1a8abSmrg    means that the high bits are to be cleared (left followed by right).
292063d1a8abSmrg    Zero is returned when it would not be economical to split the AND.  */
292163d1a8abSmrg 
292263d1a8abSmrg int
mn10300_split_and_operand_count(rtx op)292363d1a8abSmrg mn10300_split_and_operand_count (rtx op)
292463d1a8abSmrg {
292563d1a8abSmrg   HOST_WIDE_INT val = INTVAL (op);
292663d1a8abSmrg   int count;
292763d1a8abSmrg 
292863d1a8abSmrg   if (val < 0)
292963d1a8abSmrg     {
293063d1a8abSmrg       /* High bit is set, look for bits clear at the bottom.  */
293163d1a8abSmrg       count = exact_log2 (-val);
293263d1a8abSmrg       if (count < 0)
293363d1a8abSmrg 	return 0;
293463d1a8abSmrg       /* This is only size win if we can use the asl2 insn.  Otherwise we
293563d1a8abSmrg 	 would be replacing 1 6-byte insn with 2 3-byte insns.  */
293663d1a8abSmrg       if (count > (optimize_insn_for_speed_p () ? 2 : 4))
293763d1a8abSmrg 	return 0;
293863d1a8abSmrg       return count;
293963d1a8abSmrg     }
294063d1a8abSmrg   else
294163d1a8abSmrg     {
294263d1a8abSmrg       /* High bit is clear, look for bits set at the bottom.  */
294363d1a8abSmrg       count = exact_log2 (val + 1);
294463d1a8abSmrg       count = 32 - count;
294563d1a8abSmrg       /* Again, this is only a size win with asl2.  */
294663d1a8abSmrg       if (count > (optimize_insn_for_speed_p () ? 2 : 4))
294763d1a8abSmrg 	return 0;
294863d1a8abSmrg       return -count;
294963d1a8abSmrg     }
295063d1a8abSmrg }
295163d1a8abSmrg 
295263d1a8abSmrg struct liw_data
295363d1a8abSmrg {
295463d1a8abSmrg   enum attr_liw slot;
295563d1a8abSmrg   enum attr_liw_op op;
295663d1a8abSmrg   rtx dest;
295763d1a8abSmrg   rtx src;
295863d1a8abSmrg };
295963d1a8abSmrg 
296063d1a8abSmrg /* Decide if the given insn is a candidate for LIW bundling.  If it is then
296163d1a8abSmrg    extract the operands and LIW attributes from the insn and use them to fill
296263d1a8abSmrg    in the liw_data structure.  Return true upon success or false if the insn
296363d1a8abSmrg    cannot be bundled.  */
296463d1a8abSmrg 
296563d1a8abSmrg static bool
extract_bundle(rtx_insn * insn,struct liw_data * pdata)296663d1a8abSmrg extract_bundle (rtx_insn *insn, struct liw_data * pdata)
296763d1a8abSmrg {
296863d1a8abSmrg   bool allow_consts = true;
296963d1a8abSmrg   rtx p;
297063d1a8abSmrg 
297163d1a8abSmrg   gcc_assert (pdata != NULL);
297263d1a8abSmrg 
297363d1a8abSmrg   if (insn == NULL)
297463d1a8abSmrg     return false;
297563d1a8abSmrg   /* Make sure that we are dealing with a simple SET insn.  */
297663d1a8abSmrg   p = single_set (insn);
297763d1a8abSmrg   if (p == NULL_RTX)
297863d1a8abSmrg     return false;
297963d1a8abSmrg 
298063d1a8abSmrg   /* Make sure that it could go into one of the LIW pipelines.  */
298163d1a8abSmrg   pdata->slot = get_attr_liw (insn);
298263d1a8abSmrg   if (pdata->slot == LIW_BOTH)
298363d1a8abSmrg     return false;
298463d1a8abSmrg 
298563d1a8abSmrg   pdata->op = get_attr_liw_op (insn);
298663d1a8abSmrg 
298763d1a8abSmrg   switch (pdata->op)
298863d1a8abSmrg     {
298963d1a8abSmrg     case LIW_OP_MOV:
299063d1a8abSmrg       pdata->dest = SET_DEST (p);
299163d1a8abSmrg       pdata->src = SET_SRC (p);
299263d1a8abSmrg       break;
299363d1a8abSmrg     case LIW_OP_CMP:
299463d1a8abSmrg       pdata->dest = XEXP (SET_SRC (p), 0);
299563d1a8abSmrg       pdata->src = XEXP (SET_SRC (p), 1);
299663d1a8abSmrg       break;
299763d1a8abSmrg     case LIW_OP_NONE:
299863d1a8abSmrg       return false;
299963d1a8abSmrg     case LIW_OP_AND:
300063d1a8abSmrg     case LIW_OP_OR:
300163d1a8abSmrg     case LIW_OP_XOR:
300263d1a8abSmrg       /* The AND, OR and XOR long instruction words only accept register arguments.  */
300363d1a8abSmrg       allow_consts = false;
300463d1a8abSmrg       /* Fall through.  */
300563d1a8abSmrg     default:
300663d1a8abSmrg       pdata->dest = SET_DEST (p);
300763d1a8abSmrg       pdata->src = XEXP (SET_SRC (p), 1);
300863d1a8abSmrg       break;
300963d1a8abSmrg     }
301063d1a8abSmrg 
301163d1a8abSmrg   if (! REG_P (pdata->dest))
301263d1a8abSmrg     return false;
301363d1a8abSmrg 
301463d1a8abSmrg   if (REG_P (pdata->src))
301563d1a8abSmrg     return true;
301663d1a8abSmrg 
301763d1a8abSmrg   return allow_consts && satisfies_constraint_O (pdata->src);
301863d1a8abSmrg }
301963d1a8abSmrg 
302063d1a8abSmrg /* Make sure that it is OK to execute LIW1 and LIW2 in parallel.  GCC generated
302163d1a8abSmrg    the instructions with the assumption that LIW1 would be executed before LIW2
302263d1a8abSmrg    so we must check for overlaps between their sources and destinations.  */
302363d1a8abSmrg 
302463d1a8abSmrg static bool
check_liw_constraints(struct liw_data * pliw1,struct liw_data * pliw2)302563d1a8abSmrg check_liw_constraints (struct liw_data * pliw1, struct liw_data * pliw2)
302663d1a8abSmrg {
302763d1a8abSmrg   /* Check for slot conflicts.  */
302863d1a8abSmrg   if (pliw2->slot == pliw1->slot && pliw1->slot != LIW_EITHER)
302963d1a8abSmrg     return false;
303063d1a8abSmrg 
303163d1a8abSmrg   /* If either operation is a compare, then "dest" is really an input; the real
303263d1a8abSmrg      destination is CC_REG.  So these instructions need different checks.  */
303363d1a8abSmrg 
303463d1a8abSmrg   /* Changing "CMP ; OP" into "CMP | OP" is OK because the comparison will
303563d1a8abSmrg      check its values prior to any changes made by OP.  */
303663d1a8abSmrg   if (pliw1->op == LIW_OP_CMP)
303763d1a8abSmrg     {
303863d1a8abSmrg       /* Two sequential comparisons means dead code, which ought to
303963d1a8abSmrg          have been eliminated given that bundling only happens with
304063d1a8abSmrg          optimization.  We cannot bundle them in any case.  */
304163d1a8abSmrg       gcc_assert (pliw1->op != pliw2->op);
304263d1a8abSmrg       return true;
304363d1a8abSmrg     }
304463d1a8abSmrg 
304563d1a8abSmrg   /* Changing "OP ; CMP" into "OP | CMP" does not work if the value being compared
304663d1a8abSmrg      is the destination of OP, as the CMP will look at the old value, not the new
304763d1a8abSmrg      one.  */
304863d1a8abSmrg   if (pliw2->op == LIW_OP_CMP)
304963d1a8abSmrg     {
305063d1a8abSmrg       if (REGNO (pliw2->dest) == REGNO (pliw1->dest))
305163d1a8abSmrg 	return false;
305263d1a8abSmrg 
305363d1a8abSmrg       if (REG_P (pliw2->src))
305463d1a8abSmrg 	return REGNO (pliw2->src) != REGNO (pliw1->dest);
305563d1a8abSmrg 
305663d1a8abSmrg       return true;
305763d1a8abSmrg     }
305863d1a8abSmrg 
305963d1a8abSmrg   /* Changing "OP1 ; OP2" into "OP1 | OP2" does not work if they both write to the
306063d1a8abSmrg      same destination register.  */
306163d1a8abSmrg   if (REGNO (pliw2->dest) == REGNO (pliw1->dest))
306263d1a8abSmrg     return false;
306363d1a8abSmrg 
306463d1a8abSmrg   /* Changing "OP1 ; OP2" into "OP1 | OP2" generally does not work if the destination
306563d1a8abSmrg      of OP1 is the source of OP2.  The exception is when OP1 is a MOVE instruction when
306663d1a8abSmrg      we can replace the source in OP2 with the source of OP1.  */
306763d1a8abSmrg   if (REG_P (pliw2->src) && REGNO (pliw2->src) == REGNO (pliw1->dest))
306863d1a8abSmrg     {
306963d1a8abSmrg       if (pliw1->op == LIW_OP_MOV && REG_P (pliw1->src))
307063d1a8abSmrg 	{
307163d1a8abSmrg 	  if (! REG_P (pliw1->src)
307263d1a8abSmrg 	      && (pliw2->op == LIW_OP_AND
307363d1a8abSmrg 		  || pliw2->op == LIW_OP_OR
307463d1a8abSmrg 		  || pliw2->op == LIW_OP_XOR))
307563d1a8abSmrg 	    return false;
307663d1a8abSmrg 
307763d1a8abSmrg 	  pliw2->src = pliw1->src;
307863d1a8abSmrg 	  return true;
307963d1a8abSmrg 	}
308063d1a8abSmrg       return false;
308163d1a8abSmrg     }
308263d1a8abSmrg 
308363d1a8abSmrg   /* Everything else is OK.  */
308463d1a8abSmrg   return true;
308563d1a8abSmrg }
308663d1a8abSmrg 
308763d1a8abSmrg /* Combine pairs of insns into LIW bundles.  */
308863d1a8abSmrg 
308963d1a8abSmrg static void
mn10300_bundle_liw(void)309063d1a8abSmrg mn10300_bundle_liw (void)
309163d1a8abSmrg {
309263d1a8abSmrg   rtx_insn *r;
309363d1a8abSmrg 
309463d1a8abSmrg   for (r = get_insns (); r != NULL; r = next_nonnote_nondebug_insn (r))
309563d1a8abSmrg     {
309663d1a8abSmrg       rtx_insn *insn1, *insn2;
309763d1a8abSmrg       struct liw_data liw1, liw2;
309863d1a8abSmrg 
309963d1a8abSmrg       insn1 = r;
310063d1a8abSmrg       if (! extract_bundle (insn1, & liw1))
310163d1a8abSmrg 	continue;
310263d1a8abSmrg 
310363d1a8abSmrg       insn2 = next_nonnote_nondebug_insn (insn1);
310463d1a8abSmrg       if (! extract_bundle (insn2, & liw2))
310563d1a8abSmrg 	continue;
310663d1a8abSmrg 
310763d1a8abSmrg       /* Check for source/destination overlap.  */
310863d1a8abSmrg       if (! check_liw_constraints (& liw1, & liw2))
310963d1a8abSmrg 	continue;
311063d1a8abSmrg 
311163d1a8abSmrg       if (liw1.slot == LIW_OP2 || liw2.slot == LIW_OP1)
311263d1a8abSmrg 	{
311363d1a8abSmrg 	  struct liw_data temp;
311463d1a8abSmrg 
311563d1a8abSmrg 	  temp = liw1;
311663d1a8abSmrg 	  liw1 = liw2;
311763d1a8abSmrg 	  liw2 = temp;
311863d1a8abSmrg 	}
311963d1a8abSmrg 
312063d1a8abSmrg       delete_insn (insn2);
312163d1a8abSmrg 
312263d1a8abSmrg       rtx insn2_pat;
312363d1a8abSmrg       if (liw1.op == LIW_OP_CMP)
312463d1a8abSmrg 	insn2_pat = gen_cmp_liw (liw2.dest, liw2.src, liw1.dest, liw1.src,
312563d1a8abSmrg 				 GEN_INT (liw2.op));
312663d1a8abSmrg       else if (liw2.op == LIW_OP_CMP)
312763d1a8abSmrg 	insn2_pat = gen_liw_cmp (liw1.dest, liw1.src, liw2.dest, liw2.src,
312863d1a8abSmrg 				 GEN_INT (liw1.op));
312963d1a8abSmrg       else
313063d1a8abSmrg 	insn2_pat = gen_liw (liw1.dest, liw2.dest, liw1.src, liw2.src,
313163d1a8abSmrg 			     GEN_INT (liw1.op), GEN_INT (liw2.op));
313263d1a8abSmrg 
313363d1a8abSmrg       insn2 = emit_insn_after (insn2_pat, insn1);
313463d1a8abSmrg       delete_insn (insn1);
313563d1a8abSmrg       r = insn2;
313663d1a8abSmrg     }
313763d1a8abSmrg }
313863d1a8abSmrg 
313963d1a8abSmrg #define DUMP(reason, insn)			\
314063d1a8abSmrg   do						\
314163d1a8abSmrg     {						\
314263d1a8abSmrg       if (dump_file)				\
314363d1a8abSmrg 	{					\
314463d1a8abSmrg 	  fprintf (dump_file, reason "\n");	\
314563d1a8abSmrg 	  if (insn != NULL_RTX)			\
314663d1a8abSmrg 	    print_rtl_single (dump_file, insn);	\
314763d1a8abSmrg 	  fprintf(dump_file, "\n");		\
314863d1a8abSmrg 	}					\
314963d1a8abSmrg     }						\
315063d1a8abSmrg   while (0)
315163d1a8abSmrg 
315263d1a8abSmrg /* Replace the BRANCH insn with a Lcc insn that goes to LABEL.
315363d1a8abSmrg    Insert a SETLB insn just before LABEL.  */
315463d1a8abSmrg 
315563d1a8abSmrg static void
mn10300_insert_setlb_lcc(rtx_insn * label,rtx_insn * branch)315663d1a8abSmrg mn10300_insert_setlb_lcc (rtx_insn *label, rtx_insn *branch)
315763d1a8abSmrg {
315863d1a8abSmrg   rtx lcc, comparison, cmp_reg;
315963d1a8abSmrg 
316063d1a8abSmrg   if (LABEL_NUSES (label) > 1)
316163d1a8abSmrg     {
316263d1a8abSmrg       rtx_insn *insn;
316363d1a8abSmrg 
316463d1a8abSmrg       /* This label is used both as an entry point to the loop
316563d1a8abSmrg 	 and as a loop-back point for the loop.  We need to separate
316663d1a8abSmrg 	 these two functions so that the SETLB happens upon entry,
316763d1a8abSmrg 	 but the loop-back does not go to the SETLB instruction.  */
316863d1a8abSmrg       DUMP ("Inserting SETLB insn after:", label);
316963d1a8abSmrg       insn = emit_insn_after (gen_setlb (), label);
317063d1a8abSmrg       label = gen_label_rtx ();
317163d1a8abSmrg       emit_label_after (label, insn);
317263d1a8abSmrg       DUMP ("Created new loop-back label:", label);
317363d1a8abSmrg     }
317463d1a8abSmrg   else
317563d1a8abSmrg     {
317663d1a8abSmrg       DUMP ("Inserting SETLB insn before:", label);
317763d1a8abSmrg       emit_insn_before (gen_setlb (), label);
317863d1a8abSmrg     }
317963d1a8abSmrg 
318063d1a8abSmrg   comparison = XEXP (SET_SRC (PATTERN (branch)), 0);
318163d1a8abSmrg   cmp_reg = XEXP (comparison, 0);
318263d1a8abSmrg   gcc_assert (REG_P (cmp_reg));
318363d1a8abSmrg 
318463d1a8abSmrg   /* If the comparison has not already been split out of the branch
318563d1a8abSmrg      then do so now.  */
318663d1a8abSmrg   gcc_assert (REGNO (cmp_reg) == CC_REG);
318763d1a8abSmrg 
318863d1a8abSmrg   if (GET_MODE (cmp_reg) == CC_FLOATmode)
318963d1a8abSmrg     lcc = gen_FLcc (comparison, label);
319063d1a8abSmrg   else
319163d1a8abSmrg     lcc = gen_Lcc (comparison, label);
319263d1a8abSmrg 
319363d1a8abSmrg   rtx_insn *jump = emit_jump_insn_before (lcc, branch);
319463d1a8abSmrg   mark_jump_label (XVECEXP (lcc, 0, 0), jump, 0);
319563d1a8abSmrg   JUMP_LABEL (jump) = label;
319663d1a8abSmrg   DUMP ("Replacing branch insn...", branch);
319763d1a8abSmrg   DUMP ("... with Lcc insn:", jump);
319863d1a8abSmrg   delete_insn (branch);
319963d1a8abSmrg }
320063d1a8abSmrg 
320163d1a8abSmrg static bool
mn10300_block_contains_call(basic_block block)320263d1a8abSmrg mn10300_block_contains_call (basic_block block)
320363d1a8abSmrg {
320463d1a8abSmrg   rtx_insn *insn;
320563d1a8abSmrg 
320663d1a8abSmrg   FOR_BB_INSNS (block, insn)
320763d1a8abSmrg     if (CALL_P (insn))
320863d1a8abSmrg       return true;
320963d1a8abSmrg 
321063d1a8abSmrg   return false;
321163d1a8abSmrg }
321263d1a8abSmrg 
321363d1a8abSmrg static bool
mn10300_loop_contains_call_insn(loop_p loop)321463d1a8abSmrg mn10300_loop_contains_call_insn (loop_p loop)
321563d1a8abSmrg {
321663d1a8abSmrg   basic_block * bbs;
321763d1a8abSmrg   bool result = false;
321863d1a8abSmrg   unsigned int i;
321963d1a8abSmrg 
322063d1a8abSmrg   bbs = get_loop_body (loop);
322163d1a8abSmrg 
322263d1a8abSmrg   for (i = 0; i < loop->num_nodes; i++)
322363d1a8abSmrg     if (mn10300_block_contains_call (bbs[i]))
322463d1a8abSmrg       {
322563d1a8abSmrg 	result = true;
322663d1a8abSmrg 	break;
322763d1a8abSmrg       }
322863d1a8abSmrg 
322963d1a8abSmrg   free (bbs);
323063d1a8abSmrg   return result;
323163d1a8abSmrg }
323263d1a8abSmrg 
323363d1a8abSmrg static void
mn10300_scan_for_setlb_lcc(void)323463d1a8abSmrg mn10300_scan_for_setlb_lcc (void)
323563d1a8abSmrg {
323663d1a8abSmrg   loop_p loop;
323763d1a8abSmrg 
323863d1a8abSmrg   DUMP ("Looking for loops that can use the SETLB insn", NULL_RTX);
323963d1a8abSmrg 
324063d1a8abSmrg   df_analyze ();
324163d1a8abSmrg   compute_bb_for_insn ();
324263d1a8abSmrg 
324363d1a8abSmrg   /* Find the loops.  */
324463d1a8abSmrg   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
324563d1a8abSmrg 
324663d1a8abSmrg   /* FIXME: For now we only investigate innermost loops.  In practice however
324763d1a8abSmrg      if an inner loop is not suitable for use with the SETLB/Lcc insns, it may
324863d1a8abSmrg      be the case that its parent loop is suitable.  Thus we should check all
324963d1a8abSmrg      loops, but work from the innermost outwards.  */
325063d1a8abSmrg   FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
325163d1a8abSmrg     {
325263d1a8abSmrg       const char * reason = NULL;
325363d1a8abSmrg 
325463d1a8abSmrg       /* Check to see if we can modify this loop.  If we cannot
325563d1a8abSmrg 	 then set 'reason' to describe why it could not be done.  */
325663d1a8abSmrg       if (loop->latch == NULL)
325763d1a8abSmrg 	reason = "it contains multiple latches";
325863d1a8abSmrg       else if (loop->header != loop->latch)
325963d1a8abSmrg 	/* FIXME: We could handle loops that span multiple blocks,
326063d1a8abSmrg 	   but this requires a lot more work tracking down the branches
326163d1a8abSmrg 	   that need altering, so for now keep things simple.  */
326263d1a8abSmrg 	reason = "the loop spans multiple blocks";
326363d1a8abSmrg       else if (mn10300_loop_contains_call_insn (loop))
326463d1a8abSmrg 	reason = "it contains CALL insns";
326563d1a8abSmrg       else
326663d1a8abSmrg 	{
326763d1a8abSmrg 	  rtx_insn *branch = BB_END (loop->latch);
326863d1a8abSmrg 
326963d1a8abSmrg 	  gcc_assert (JUMP_P (branch));
327063d1a8abSmrg 	  if (single_set (branch) == NULL_RTX || ! any_condjump_p (branch))
327163d1a8abSmrg 	    /* We cannot optimize tablejumps and the like.  */
327263d1a8abSmrg 	    /* FIXME: We could handle unconditional jumps.  */
327363d1a8abSmrg 	    reason = "it is not a simple loop";
327463d1a8abSmrg 	  else
327563d1a8abSmrg 	    {
327663d1a8abSmrg 	      rtx_insn *label;
327763d1a8abSmrg 
327863d1a8abSmrg 	      if (dump_file)
327963d1a8abSmrg 		flow_loop_dump (loop, dump_file, NULL, 0);
328063d1a8abSmrg 
328163d1a8abSmrg 	      label = BB_HEAD (loop->header);
328263d1a8abSmrg 	      gcc_assert (LABEL_P (label));
328363d1a8abSmrg 
328463d1a8abSmrg 	      mn10300_insert_setlb_lcc (label, branch);
328563d1a8abSmrg 	    }
328663d1a8abSmrg 	}
328763d1a8abSmrg 
328863d1a8abSmrg       if (dump_file && reason != NULL)
328963d1a8abSmrg 	fprintf (dump_file, "Loop starting with insn %d is not suitable because %s\n",
329063d1a8abSmrg 		 INSN_UID (BB_HEAD (loop->header)),
329163d1a8abSmrg 		 reason);
329263d1a8abSmrg     }
329363d1a8abSmrg 
329463d1a8abSmrg   loop_optimizer_finalize ();
329563d1a8abSmrg 
329663d1a8abSmrg   df_finish_pass (false);
329763d1a8abSmrg 
329863d1a8abSmrg   DUMP ("SETLB scan complete", NULL_RTX);
329963d1a8abSmrg }
330063d1a8abSmrg 
330163d1a8abSmrg static void
mn10300_reorg(void)330263d1a8abSmrg mn10300_reorg (void)
330363d1a8abSmrg {
330463d1a8abSmrg   /* These are optimizations, so only run them if optimizing.  */
330563d1a8abSmrg   if (TARGET_AM33 && (optimize > 0 || optimize_size))
330663d1a8abSmrg     {
330763d1a8abSmrg       if (TARGET_ALLOW_SETLB)
330863d1a8abSmrg 	mn10300_scan_for_setlb_lcc ();
330963d1a8abSmrg 
331063d1a8abSmrg       if (TARGET_ALLOW_LIW)
331163d1a8abSmrg 	mn10300_bundle_liw ();
331263d1a8abSmrg     }
331363d1a8abSmrg }
331463d1a8abSmrg 
331563d1a8abSmrg /* Initialize the GCC target structure.  */
331663d1a8abSmrg 
331763d1a8abSmrg #undef  TARGET_MACHINE_DEPENDENT_REORG
331863d1a8abSmrg #define TARGET_MACHINE_DEPENDENT_REORG mn10300_reorg
331963d1a8abSmrg 
332063d1a8abSmrg #undef  TARGET_ASM_ALIGNED_HI_OP
332163d1a8abSmrg #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
332263d1a8abSmrg 
332363d1a8abSmrg #undef  TARGET_LEGITIMIZE_ADDRESS
332463d1a8abSmrg #define TARGET_LEGITIMIZE_ADDRESS mn10300_legitimize_address
332563d1a8abSmrg 
332663d1a8abSmrg #undef  TARGET_ADDRESS_COST
332763d1a8abSmrg #define TARGET_ADDRESS_COST  mn10300_address_cost
332863d1a8abSmrg #undef  TARGET_REGISTER_MOVE_COST
332963d1a8abSmrg #define TARGET_REGISTER_MOVE_COST  mn10300_register_move_cost
333063d1a8abSmrg #undef  TARGET_MEMORY_MOVE_COST
333163d1a8abSmrg #define TARGET_MEMORY_MOVE_COST  mn10300_memory_move_cost
333263d1a8abSmrg #undef  TARGET_RTX_COSTS
333363d1a8abSmrg #define TARGET_RTX_COSTS mn10300_rtx_costs
333463d1a8abSmrg 
333563d1a8abSmrg #undef  TARGET_ASM_FILE_START
333663d1a8abSmrg #define TARGET_ASM_FILE_START mn10300_file_start
333763d1a8abSmrg #undef  TARGET_ASM_FILE_START_FILE_DIRECTIVE
333863d1a8abSmrg #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
333963d1a8abSmrg 
334063d1a8abSmrg #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
334163d1a8abSmrg #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA mn10300_asm_output_addr_const_extra
334263d1a8abSmrg 
334363d1a8abSmrg #undef  TARGET_OPTION_OVERRIDE
334463d1a8abSmrg #define TARGET_OPTION_OVERRIDE mn10300_option_override
334563d1a8abSmrg 
334663d1a8abSmrg #undef  TARGET_ENCODE_SECTION_INFO
334763d1a8abSmrg #define TARGET_ENCODE_SECTION_INFO mn10300_encode_section_info
334863d1a8abSmrg 
334963d1a8abSmrg #undef  TARGET_PROMOTE_PROTOTYPES
335063d1a8abSmrg #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
335163d1a8abSmrg #undef  TARGET_RETURN_IN_MEMORY
335263d1a8abSmrg #define TARGET_RETURN_IN_MEMORY mn10300_return_in_memory
335363d1a8abSmrg #undef  TARGET_PASS_BY_REFERENCE
335463d1a8abSmrg #define TARGET_PASS_BY_REFERENCE mn10300_pass_by_reference
335563d1a8abSmrg #undef  TARGET_CALLEE_COPIES
3356*ec02198aSmrg #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_arg_info_true
335763d1a8abSmrg #undef  TARGET_ARG_PARTIAL_BYTES
335863d1a8abSmrg #define TARGET_ARG_PARTIAL_BYTES mn10300_arg_partial_bytes
335963d1a8abSmrg #undef  TARGET_FUNCTION_ARG
336063d1a8abSmrg #define TARGET_FUNCTION_ARG mn10300_function_arg
336163d1a8abSmrg #undef  TARGET_FUNCTION_ARG_ADVANCE
336263d1a8abSmrg #define TARGET_FUNCTION_ARG_ADVANCE mn10300_function_arg_advance
336363d1a8abSmrg 
336463d1a8abSmrg #undef  TARGET_EXPAND_BUILTIN_SAVEREGS
336563d1a8abSmrg #define TARGET_EXPAND_BUILTIN_SAVEREGS mn10300_builtin_saveregs
336663d1a8abSmrg #undef  TARGET_EXPAND_BUILTIN_VA_START
336763d1a8abSmrg #define TARGET_EXPAND_BUILTIN_VA_START mn10300_va_start
336863d1a8abSmrg 
336963d1a8abSmrg #undef  TARGET_CASE_VALUES_THRESHOLD
337063d1a8abSmrg #define TARGET_CASE_VALUES_THRESHOLD mn10300_case_values_threshold
337163d1a8abSmrg 
337263d1a8abSmrg #undef TARGET_LRA_P
337363d1a8abSmrg #define TARGET_LRA_P hook_bool_void_false
337463d1a8abSmrg 
337563d1a8abSmrg #undef  TARGET_LEGITIMATE_ADDRESS_P
337663d1a8abSmrg #define TARGET_LEGITIMATE_ADDRESS_P	mn10300_legitimate_address_p
337763d1a8abSmrg #undef  TARGET_DELEGITIMIZE_ADDRESS
337863d1a8abSmrg #define TARGET_DELEGITIMIZE_ADDRESS	mn10300_delegitimize_address
337963d1a8abSmrg #undef  TARGET_LEGITIMATE_CONSTANT_P
338063d1a8abSmrg #define TARGET_LEGITIMATE_CONSTANT_P	mn10300_legitimate_constant_p
338163d1a8abSmrg 
338263d1a8abSmrg #undef  TARGET_PREFERRED_RELOAD_CLASS
338363d1a8abSmrg #define TARGET_PREFERRED_RELOAD_CLASS mn10300_preferred_reload_class
338463d1a8abSmrg #undef  TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
338563d1a8abSmrg #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS \
338663d1a8abSmrg   mn10300_preferred_output_reload_class
338763d1a8abSmrg #undef  TARGET_SECONDARY_RELOAD
338863d1a8abSmrg #define TARGET_SECONDARY_RELOAD  mn10300_secondary_reload
338963d1a8abSmrg 
339063d1a8abSmrg #undef  TARGET_TRAMPOLINE_INIT
339163d1a8abSmrg #define TARGET_TRAMPOLINE_INIT mn10300_trampoline_init
339263d1a8abSmrg 
339363d1a8abSmrg #undef  TARGET_FUNCTION_VALUE
339463d1a8abSmrg #define TARGET_FUNCTION_VALUE mn10300_function_value
339563d1a8abSmrg #undef  TARGET_LIBCALL_VALUE
339663d1a8abSmrg #define TARGET_LIBCALL_VALUE mn10300_libcall_value
339763d1a8abSmrg 
339863d1a8abSmrg #undef  TARGET_ASM_OUTPUT_MI_THUNK
339963d1a8abSmrg #define TARGET_ASM_OUTPUT_MI_THUNK      mn10300_asm_output_mi_thunk
340063d1a8abSmrg #undef  TARGET_ASM_CAN_OUTPUT_MI_THUNK
340163d1a8abSmrg #define TARGET_ASM_CAN_OUTPUT_MI_THUNK  mn10300_can_output_mi_thunk
340263d1a8abSmrg 
340363d1a8abSmrg #undef  TARGET_SCHED_ADJUST_COST
340463d1a8abSmrg #define TARGET_SCHED_ADJUST_COST mn10300_adjust_sched_cost
340563d1a8abSmrg 
340663d1a8abSmrg #undef  TARGET_CONDITIONAL_REGISTER_USAGE
340763d1a8abSmrg #define TARGET_CONDITIONAL_REGISTER_USAGE mn10300_conditional_register_usage
340863d1a8abSmrg 
340963d1a8abSmrg #undef TARGET_MD_ASM_ADJUST
341063d1a8abSmrg #define TARGET_MD_ASM_ADJUST mn10300_md_asm_adjust
341163d1a8abSmrg 
341263d1a8abSmrg #undef  TARGET_FLAGS_REGNUM
341363d1a8abSmrg #define TARGET_FLAGS_REGNUM  CC_REG
341463d1a8abSmrg 
3415c7a68eb7Smrg #undef  TARGET_HARD_REGNO_MODE_OK
3416c7a68eb7Smrg #define TARGET_HARD_REGNO_MODE_OK mn10300_hard_regno_mode_ok
3417c7a68eb7Smrg 
3418c7a68eb7Smrg #undef  TARGET_MODES_TIEABLE_P
3419c7a68eb7Smrg #define TARGET_MODES_TIEABLE_P mn10300_modes_tieable_p
3420c7a68eb7Smrg 
34210fc04c29Smrg #undef  TARGET_HAVE_SPECULATION_SAFE_VALUE
34220fc04c29Smrg #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
34230fc04c29Smrg 
342463d1a8abSmrg struct gcc_target targetm = TARGET_INITIALIZER;
3425