1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2    Contributed by Jon Beniston <jon@beniston.com>
3 
4    Copyright (C) 2009-2016 Free Software Foundation, Inc.
5 
6    This file is part of GCC.
7 
8    GCC is free software; you can redistribute it and/or modify it
9    under the terms of the GNU General Public License as published
10    by the Free Software Foundation; either version 3, or (at your
11    option) any later version.
12 
13    GCC is distributed in the hope that it will be useful, but WITHOUT
14    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
16    License for more details.
17 
18    You should have received a copy of the GNU General Public License
19    along with GCC; see the file COPYING3.  If not see
20    <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "df.h"
30 #include "tm_p.h"
31 #include "optabs.h"
32 #include "regs.h"
33 #include "emit-rtl.h"
34 #include "recog.h"
35 #include "output.h"
36 #include "calls.h"
37 #include "alias.h"
38 #include "explow.h"
39 #include "expr.h"
40 #include "tm-constrs.h"
41 #include "builtins.h"
42 
43 /* This file should be included last.  */
44 #include "target-def.h"
45 
46 struct lm32_frame_info
47 {
48   HOST_WIDE_INT total_size;	/* number of bytes of entire frame.  */
49   HOST_WIDE_INT callee_size;	/* number of bytes to save callee saves.  */
50   HOST_WIDE_INT pretend_size;	/* number of bytes we pretend caller did.  */
51   HOST_WIDE_INT args_size;	/* number of bytes for outgoing arguments.  */
52   HOST_WIDE_INT locals_size;	/* number of bytes for local variables.  */
53   unsigned int reg_save_mask;	/* mask of saved registers.  */
54 };
55 
56 /* Prototypes for static functions.  */
57 static rtx emit_add (rtx dest, rtx src0, rtx src1);
58 static void expand_save_restore (struct lm32_frame_info *info, int op);
59 static void stack_adjust (HOST_WIDE_INT amount);
60 static bool lm32_in_small_data_p (const_tree);
61 static void lm32_setup_incoming_varargs (cumulative_args_t cum,
62 					 machine_mode mode, tree type,
63 					 int *pretend_size, int no_rtl);
64 static bool lm32_rtx_costs (rtx x, machine_mode mode, int outer_code, int opno,
65 			    int *total, bool speed);
66 static bool lm32_can_eliminate (const int, const int);
67 static bool
68 lm32_legitimate_address_p (machine_mode mode, rtx x, bool strict);
69 static HOST_WIDE_INT lm32_compute_frame_size (int size);
70 static void lm32_option_override (void);
71 static rtx lm32_function_arg (cumulative_args_t cum,
72 			      machine_mode mode, const_tree type,
73 			      bool named);
74 static void lm32_function_arg_advance (cumulative_args_t cum,
75 				       machine_mode mode,
76 				       const_tree type, bool named);
77 
78 #undef TARGET_OPTION_OVERRIDE
79 #define TARGET_OPTION_OVERRIDE lm32_option_override
80 #undef TARGET_ADDRESS_COST
81 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
82 #undef TARGET_RTX_COSTS
83 #define TARGET_RTX_COSTS lm32_rtx_costs
84 #undef TARGET_IN_SMALL_DATA_P
85 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
86 #undef TARGET_PROMOTE_FUNCTION_MODE
87 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
88 #undef TARGET_SETUP_INCOMING_VARARGS
89 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
90 #undef TARGET_FUNCTION_ARG
91 #define TARGET_FUNCTION_ARG lm32_function_arg
92 #undef TARGET_FUNCTION_ARG_ADVANCE
93 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
94 #undef TARGET_PROMOTE_PROTOTYPES
95 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
96 #undef TARGET_MIN_ANCHOR_OFFSET
97 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
98 #undef TARGET_MAX_ANCHOR_OFFSET
99 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
100 #undef TARGET_CAN_ELIMINATE
101 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
102 #undef TARGET_LEGITIMATE_ADDRESS_P
103 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
104 
105 struct gcc_target targetm = TARGET_INITIALIZER;
106 
107 /* Current frame information calculated by lm32_compute_frame_size.  */
108 static struct lm32_frame_info current_frame_info;
109 
110 /* Return non-zero if the given return type should be returned in memory.  */
111 
112 int
lm32_return_in_memory(tree type)113 lm32_return_in_memory (tree type)
114 {
115   HOST_WIDE_INT size;
116 
117   if (!AGGREGATE_TYPE_P (type))
118     {
119       /* All simple types are returned in registers.  */
120       return 0;
121     }
122 
123   size = int_size_in_bytes (type);
124   if (size >= 0 && size <= UNITS_PER_WORD)
125     {
126       /* If it can fit in one register.  */
127       return 0;
128     }
129 
130   return 1;
131 }
132 
133 /* Generate an emit a word sized add instruction.  */
134 
135 static rtx
emit_add(rtx dest,rtx src0,rtx src1)136 emit_add (rtx dest, rtx src0, rtx src1)
137 {
138   rtx insn;
139   insn = emit_insn (gen_addsi3 (dest, src0, src1));
140   return insn;
141 }
142 
143 /* Generate the code to compare (and possibly branch) two integer values
144    TEST_CODE is the comparison code we are trying to emulate
145      (or implement directly)
146    RESULT is where to store the result of the comparison,
147      or null to emit a branch
148    CMP0 CMP1 are the two comparison operands
149    DESTINATION is the destination of the branch, or null to only compare
150    */
151 
152 static void
gen_int_relational(enum rtx_code code,rtx result,rtx cmp0,rtx cmp1,rtx destination)153 gen_int_relational (enum rtx_code code,
154 		    rtx result,
155 		    rtx cmp0,
156 		    rtx cmp1,
157 		    rtx destination)
158 {
159   machine_mode mode;
160   int branch_p;
161 
162   mode = GET_MODE (cmp0);
163   if (mode == VOIDmode)
164     mode = GET_MODE (cmp1);
165 
166   /* Is this a branch or compare.  */
167   branch_p = (destination != 0);
168 
169   /* Instruction set doesn't support LE or LT, so swap operands and use
170      GE, GT.  */
171   switch (code)
172     {
173     case LE:
174     case LT:
175     case LEU:
176     case LTU:
177       {
178 	rtx temp;
179 
180 	code = swap_condition (code);
181 	temp = cmp0;
182 	cmp0 = cmp1;
183 	cmp1 = temp;
184 	break;
185       }
186     default:
187       break;
188     }
189 
190   if (branch_p)
191     {
192       rtx insn, cond, label;
193 
194       /* Operands must be in registers.  */
195       if (!register_operand (cmp0, mode))
196 	cmp0 = force_reg (mode, cmp0);
197       if (!register_operand (cmp1, mode))
198 	cmp1 = force_reg (mode, cmp1);
199 
200       /* Generate conditional branch instruction.  */
201       cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
202       label = gen_rtx_LABEL_REF (VOIDmode, destination);
203       insn = gen_rtx_SET (pc_rtx, gen_rtx_IF_THEN_ELSE (VOIDmode,
204 							cond, label, pc_rtx));
205       emit_jump_insn (insn);
206     }
207   else
208     {
209       /* We can't have const_ints in cmp0, other than 0.  */
210       if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
211 	cmp0 = force_reg (mode, cmp0);
212 
213       /* If the comparison is against an int not in legal range
214          move it into a register.  */
215       if (GET_CODE (cmp1) == CONST_INT)
216 	{
217 	  switch (code)
218 	    {
219 	    case EQ:
220 	    case NE:
221 	    case LE:
222 	    case LT:
223 	    case GE:
224 	    case GT:
225 	      if (!satisfies_constraint_K (cmp1))
226 		cmp1 = force_reg (mode, cmp1);
227 	      break;
228 	    case LEU:
229 	    case LTU:
230 	    case GEU:
231 	    case GTU:
232 	      if (!satisfies_constraint_L (cmp1))
233 		cmp1 = force_reg (mode, cmp1);
234 	      break;
235 	    default:
236 	      gcc_unreachable ();
237 	    }
238 	}
239 
240       /* Generate compare instruction.  */
241       emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
242     }
243 }
244 
245 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
246    and OPERAND[3].  Store the result in OPERANDS[0].  */
247 
248 void
lm32_expand_scc(rtx operands[])249 lm32_expand_scc (rtx operands[])
250 {
251   rtx target = operands[0];
252   enum rtx_code code = GET_CODE (operands[1]);
253   rtx op0 = operands[2];
254   rtx op1 = operands[3];
255 
256   gen_int_relational (code, target, op0, op1, NULL_RTX);
257 }
258 
259 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
260    CODE and jump to OPERANDS[3] if the condition holds.  */
261 
262 void
lm32_expand_conditional_branch(rtx operands[])263 lm32_expand_conditional_branch (rtx operands[])
264 {
265   enum rtx_code code = GET_CODE (operands[0]);
266   rtx op0 = operands[1];
267   rtx op1 = operands[2];
268   rtx destination = operands[3];
269 
270   gen_int_relational (code, NULL_RTX, op0, op1, destination);
271 }
272 
273 /* Generate and emit RTL to save or restore callee save registers.  */
274 static void
expand_save_restore(struct lm32_frame_info * info,int op)275 expand_save_restore (struct lm32_frame_info *info, int op)
276 {
277   unsigned int reg_save_mask = info->reg_save_mask;
278   int regno;
279   HOST_WIDE_INT offset;
280   rtx insn;
281 
282   /* Callee saves are below locals and above outgoing arguments.  */
283   offset = info->args_size + info->callee_size;
284   for (regno = 0; regno <= 31; regno++)
285     {
286       if ((reg_save_mask & (1 << regno)) != 0)
287 	{
288 	  rtx offset_rtx;
289 	  rtx mem;
290 
291 	  offset_rtx = GEN_INT (offset);
292 	  if (satisfies_constraint_K (offset_rtx))
293 	    {
294               mem = gen_rtx_MEM (word_mode,
295                                  gen_rtx_PLUS (Pmode,
296                                                stack_pointer_rtx,
297                                                offset_rtx));
298             }
299           else
300             {
301               /* r10 is caller saved so it can be used as a temp reg.  */
302               rtx r10;
303 
304               r10 = gen_rtx_REG (word_mode, 10);
305               insn = emit_move_insn (r10, offset_rtx);
306               if (op == 0)
307                 RTX_FRAME_RELATED_P (insn) = 1;
308               insn = emit_add (r10, r10, stack_pointer_rtx);
309               if (op == 0)
310                 RTX_FRAME_RELATED_P (insn) = 1;
311               mem = gen_rtx_MEM (word_mode, r10);
312             }
313 
314 	  if (op == 0)
315 	    insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
316 	  else
317 	    insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
318 
319 	  /* only prologue instructions which set the sp fp or save a
320 	     register should be marked as frame related.  */
321 	  if (op == 0)
322 	    RTX_FRAME_RELATED_P (insn) = 1;
323 	  offset -= UNITS_PER_WORD;
324 	}
325     }
326 }
327 
328 static void
stack_adjust(HOST_WIDE_INT amount)329 stack_adjust (HOST_WIDE_INT amount)
330 {
331   rtx insn;
332 
333   if (!IN_RANGE (amount, -32776, 32768))
334     {
335       /* r10 is caller saved so it can be used as a temp reg.  */
336       rtx r10;
337       r10 = gen_rtx_REG (word_mode, 10);
338       insn = emit_move_insn (r10, GEN_INT (amount));
339       if (amount < 0)
340 	RTX_FRAME_RELATED_P (insn) = 1;
341       insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
342       if (amount < 0)
343 	RTX_FRAME_RELATED_P (insn) = 1;
344     }
345   else
346     {
347       insn = emit_add (stack_pointer_rtx,
348 		       stack_pointer_rtx, GEN_INT (amount));
349       if (amount < 0)
350 	RTX_FRAME_RELATED_P (insn) = 1;
351     }
352 }
353 
354 
355 /* Create and emit instructions for a functions prologue.  */
356 void
lm32_expand_prologue(void)357 lm32_expand_prologue (void)
358 {
359   rtx insn;
360 
361   lm32_compute_frame_size (get_frame_size ());
362 
363   if (current_frame_info.total_size > 0)
364     {
365       /* Add space on stack new frame.  */
366       stack_adjust (-current_frame_info.total_size);
367 
368       /* Save callee save registers.  */
369       if (current_frame_info.reg_save_mask != 0)
370 	expand_save_restore (&current_frame_info, 0);
371 
372       /* Setup frame pointer if it's needed.  */
373       if (frame_pointer_needed == 1)
374 	{
375 	  /* Move sp to fp.  */
376 	  insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
377 	  RTX_FRAME_RELATED_P (insn) = 1;
378 
379 	  /* Add offset - Don't use total_size, as that includes pretend_size,
380              which isn't part of this frame?  */
381 	  insn = emit_add (frame_pointer_rtx,
382 			   frame_pointer_rtx,
383 			   GEN_INT (current_frame_info.args_size +
384 				    current_frame_info.callee_size +
385 				    current_frame_info.locals_size));
386 	  RTX_FRAME_RELATED_P (insn) = 1;
387 	}
388 
389       /* Prevent prologue from being scheduled into function body.  */
390       emit_insn (gen_blockage ());
391     }
392 }
393 
394 /* Create an emit instructions for a functions epilogue.  */
395 void
lm32_expand_epilogue(void)396 lm32_expand_epilogue (void)
397 {
398   rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
399 
400   lm32_compute_frame_size (get_frame_size ());
401 
402   if (current_frame_info.total_size > 0)
403     {
404       /* Prevent stack code from being reordered.  */
405       emit_insn (gen_blockage ());
406 
407       /* Restore callee save registers.  */
408       if (current_frame_info.reg_save_mask != 0)
409 	expand_save_restore (&current_frame_info, 1);
410 
411       /* Deallocate stack.  */
412       stack_adjust (current_frame_info.total_size);
413 
414       /* Return to calling function.  */
415       emit_jump_insn (gen_return_internal (ra_rtx));
416     }
417   else
418     {
419       /* Return to calling function.  */
420       emit_jump_insn (gen_return_internal (ra_rtx));
421     }
422 }
423 
424 /* Return the bytes needed to compute the frame pointer from the current
425    stack pointer.  */
426 static HOST_WIDE_INT
lm32_compute_frame_size(int size)427 lm32_compute_frame_size (int size)
428 {
429   int regno;
430   HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
431   unsigned int reg_save_mask;
432 
433   locals_size = size;
434   args_size = crtl->outgoing_args_size;
435   pretend_size = crtl->args.pretend_args_size;
436   callee_size = 0;
437   reg_save_mask = 0;
438 
439   /* Build mask that actually determines which regsiters we save
440      and calculate size required to store them in the stack.  */
441   for (regno = 1; regno < SP_REGNUM; regno++)
442     {
443       if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
444 	{
445 	  reg_save_mask |= 1 << regno;
446 	  callee_size += UNITS_PER_WORD;
447 	}
448     }
449   if (df_regs_ever_live_p (RA_REGNUM) || ! crtl->is_leaf
450       || !optimize)
451     {
452       reg_save_mask |= 1 << RA_REGNUM;
453       callee_size += UNITS_PER_WORD;
454     }
455   if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
456     {
457       reg_save_mask |= 1 << FP_REGNUM;
458       callee_size += UNITS_PER_WORD;
459     }
460 
461   /* Compute total frame size.  */
462   total_size = pretend_size + args_size + locals_size + callee_size;
463 
464   /* Align frame to appropriate boundary.  */
465   total_size = (total_size + 3) & ~3;
466 
467   /* Save computed information.  */
468   current_frame_info.total_size = total_size;
469   current_frame_info.callee_size = callee_size;
470   current_frame_info.pretend_size = pretend_size;
471   current_frame_info.locals_size = locals_size;
472   current_frame_info.args_size = args_size;
473   current_frame_info.reg_save_mask = reg_save_mask;
474 
475   return total_size;
476 }
477 
478 void
lm32_print_operand(FILE * file,rtx op,int letter)479 lm32_print_operand (FILE * file, rtx op, int letter)
480 {
481   enum rtx_code code;
482 
483   code = GET_CODE (op);
484 
485   if (code == SIGN_EXTEND)
486     op = XEXP (op, 0), code = GET_CODE (op);
487   else if (code == REG || code == SUBREG)
488     {
489       int regnum;
490 
491       if (code == REG)
492 	regnum = REGNO (op);
493       else
494 	regnum = true_regnum (op);
495 
496       fprintf (file, "%s", reg_names[regnum]);
497     }
498   else if (code == HIGH)
499     output_addr_const (file, XEXP (op, 0));
500   else if (code == MEM)
501     output_address (GET_MODE (op), XEXP (op, 0));
502   else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
503     fprintf (file, "%s", reg_names[0]);
504   else if (GET_CODE (op) == CONST_DOUBLE)
505     {
506       if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
507 	output_operand_lossage ("only 0.0 can be loaded as an immediate");
508       else
509 	fprintf (file, "0");
510     }
511   else if (code == EQ)
512     fprintf (file, "e  ");
513   else if (code == NE)
514     fprintf (file, "ne ");
515   else if (code == GT)
516     fprintf (file, "g  ");
517   else if (code == GTU)
518     fprintf (file, "gu ");
519   else if (code == LT)
520     fprintf (file, "l  ");
521   else if (code == LTU)
522     fprintf (file, "lu ");
523   else if (code == GE)
524     fprintf (file, "ge ");
525   else if (code == GEU)
526     fprintf (file, "geu");
527   else if (code == LE)
528     fprintf (file, "le ");
529   else if (code == LEU)
530     fprintf (file, "leu");
531   else
532     output_addr_const (file, op);
533 }
534 
535 /* A C compound statement to output to stdio stream STREAM the
536    assembler syntax for an instruction operand that is a memory
537    reference whose address is ADDR.  ADDR is an RTL expression.
538 
539    On some machines, the syntax for a symbolic address depends on
540    the section that the address refers to.  On these machines,
541    define the macro `ENCODE_SECTION_INFO' to store the information
542    into the `symbol_ref', and then check for it here.  */
543 
544 void
lm32_print_operand_address(FILE * file,rtx addr)545 lm32_print_operand_address (FILE * file, rtx addr)
546 {
547   switch (GET_CODE (addr))
548     {
549     case REG:
550       fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
551       break;
552 
553     case MEM:
554       output_address (VOIDmode, XEXP (addr, 0));
555       break;
556 
557     case PLUS:
558       {
559 	rtx arg0 = XEXP (addr, 0);
560 	rtx arg1 = XEXP (addr, 1);
561 
562 	if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
563 	  {
564 	    if (GET_CODE (arg1) == CONST_INT)
565 	      fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
566 		       INTVAL (arg1));
567 	    else
568 	      {
569 		fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
570 		output_addr_const (file, arg1);
571 		fprintf (file, ")");
572 	      }
573 	  }
574 	else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
575 	  output_addr_const (file, addr);
576 	else
577 	  fatal_insn ("bad operand", addr);
578       }
579       break;
580 
581     case SYMBOL_REF:
582       if (SYMBOL_REF_SMALL_P (addr))
583 	{
584 	  fprintf (file, "gp(");
585 	  output_addr_const (file, addr);
586 	  fprintf (file, ")");
587 	}
588       else
589 	fatal_insn ("can't use non gp relative absolute address", addr);
590       break;
591 
592     default:
593       fatal_insn ("invalid addressing mode", addr);
594       break;
595     }
596 }
597 
598 /* Determine where to put an argument to a function.
599    Value is zero to push the argument on the stack,
600    or a hard register in which to store the argument.
601 
602    MODE is the argument's machine mode.
603    TYPE is the data type of the argument (as a tree).
604     This is null for libcalls where that information may
605     not be available.
606    CUM is a variable of type CUMULATIVE_ARGS which gives info about
607     the preceding args and about the function being called.
608    NAMED is nonzero if this argument is a named parameter
609     (otherwise it is an extra parameter matching an ellipsis).  */
610 
611 static rtx
lm32_function_arg(cumulative_args_t cum_v,machine_mode mode,const_tree type,bool named)612 lm32_function_arg (cumulative_args_t cum_v, machine_mode mode,
613 		   const_tree type, bool named)
614 {
615   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
616 
617   if (mode == VOIDmode)
618     /* Compute operand 2 of the call insn.  */
619     return GEN_INT (0);
620 
621   if (targetm.calls.must_pass_in_stack (mode, type))
622     return NULL_RTX;
623 
624   if (!named || (*cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
625     return NULL_RTX;
626 
627   return gen_rtx_REG (mode, *cum + LM32_FIRST_ARG_REG);
628 }
629 
630 static void
lm32_function_arg_advance(cumulative_args_t cum,machine_mode mode,const_tree type,bool named ATTRIBUTE_UNUSED)631 lm32_function_arg_advance (cumulative_args_t cum, machine_mode mode,
632 			   const_tree type, bool named ATTRIBUTE_UNUSED)
633 {
634   *get_cumulative_args (cum) += LM32_NUM_REGS2 (mode, type);
635 }
636 
637 HOST_WIDE_INT
lm32_compute_initial_elimination_offset(int from,int to)638 lm32_compute_initial_elimination_offset (int from, int to)
639 {
640   HOST_WIDE_INT offset = 0;
641 
642   switch (from)
643     {
644     case ARG_POINTER_REGNUM:
645       switch (to)
646 	{
647 	case FRAME_POINTER_REGNUM:
648 	  offset = 0;
649 	  break;
650 	case STACK_POINTER_REGNUM:
651 	  offset =
652 	    lm32_compute_frame_size (get_frame_size ()) -
653 	    current_frame_info.pretend_size;
654 	  break;
655 	default:
656 	  gcc_unreachable ();
657 	}
658       break;
659     default:
660       gcc_unreachable ();
661     }
662 
663   return offset;
664 }
665 
666 static void
lm32_setup_incoming_varargs(cumulative_args_t cum_v,machine_mode mode,tree type,int * pretend_size,int no_rtl)667 lm32_setup_incoming_varargs (cumulative_args_t cum_v, machine_mode mode,
668 			     tree type, int *pretend_size, int no_rtl)
669 {
670   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
671   int first_anon_arg;
672   tree fntype;
673 
674   fntype = TREE_TYPE (current_function_decl);
675 
676   if (stdarg_p (fntype))
677     first_anon_arg = *cum + LM32_FIRST_ARG_REG;
678   else
679     {
680       /* this is the common case, we have been passed details setup
681          for the last named argument, we want to skip over the
682          registers, if any used in passing this named paramter in
683          order to determine which is the first registers used to pass
684          anonymous arguments.  */
685       int size;
686 
687       if (mode == BLKmode)
688 	size = int_size_in_bytes (type);
689       else
690 	size = GET_MODE_SIZE (mode);
691 
692       first_anon_arg =
693 	*cum + LM32_FIRST_ARG_REG +
694 	((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
695     }
696 
697   if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
698     {
699       int first_reg_offset = first_anon_arg;
700       int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
701       rtx regblock;
702 
703       regblock = gen_rtx_MEM (BLKmode,
704 			      plus_constant (Pmode, arg_pointer_rtx,
705 					     FIRST_PARM_OFFSET (0)));
706       move_block_from_reg (first_reg_offset, regblock, size);
707 
708       *pretend_size = size * UNITS_PER_WORD;
709     }
710 }
711 
712 /* Override command line options.  */
713 static void
lm32_option_override(void)714 lm32_option_override (void)
715 {
716   /* We must have sign-extend enabled if barrel-shift isn't.  */
717   if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
718     target_flags |= MASK_SIGN_EXTEND_ENABLED;
719 }
720 
721 /* Return nonzero if this function is known to have a null epilogue.
722    This allows the optimizer to omit jumps to jumps if no stack
723    was created.  */
724 int
lm32_can_use_return(void)725 lm32_can_use_return (void)
726 {
727   if (!reload_completed)
728     return 0;
729 
730   if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
731     return 0;
732 
733   if (lm32_compute_frame_size (get_frame_size ()) != 0)
734     return 0;
735 
736   return 1;
737 }
738 
739 /* Support function to determine the return address of the function
740    'count' frames back up the stack.  */
741 rtx
lm32_return_addr_rtx(int count,rtx frame)742 lm32_return_addr_rtx (int count, rtx frame)
743 {
744   rtx r;
745   if (count == 0)
746     {
747       if (!df_regs_ever_live_p (RA_REGNUM))
748 	r = gen_rtx_REG (Pmode, RA_REGNUM);
749       else
750 	{
751 	  r = gen_rtx_MEM (Pmode,
752 			   gen_rtx_PLUS (Pmode, frame,
753 					 GEN_INT (-2 * UNITS_PER_WORD)));
754 	  set_mem_alias_set (r, get_frame_alias_set ());
755 	}
756     }
757   else if (flag_omit_frame_pointer)
758     r = NULL_RTX;
759   else
760     {
761       r = gen_rtx_MEM (Pmode,
762 		       gen_rtx_PLUS (Pmode, frame,
763 				     GEN_INT (-2 * UNITS_PER_WORD)));
764       set_mem_alias_set (r, get_frame_alias_set ());
765     }
766   return r;
767 }
768 
769 /* Return true if EXP should be placed in the small data section.  */
770 
771 static bool
lm32_in_small_data_p(const_tree exp)772 lm32_in_small_data_p (const_tree exp)
773 {
774   /* We want to merge strings, so we never consider them small data.  */
775   if (TREE_CODE (exp) == STRING_CST)
776     return false;
777 
778   /* Functions are never in the small data area.  Duh.  */
779   if (TREE_CODE (exp) == FUNCTION_DECL)
780     return false;
781 
782   if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
783     {
784       const char *section = DECL_SECTION_NAME (exp);
785       if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
786 	return true;
787     }
788   else
789     {
790       HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
791 
792       /* If this is an incomplete type with size 0, then we can't put it
793          in sdata because it might be too big when completed.  */
794       if (size > 0 && size <= g_switch_value)
795 	return true;
796     }
797 
798   return false;
799 }
800 
801 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
802    Assume that the areas do not overlap.  */
803 
804 static void
lm32_block_move_inline(rtx dest,rtx src,HOST_WIDE_INT length,HOST_WIDE_INT alignment)805 lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
806 			HOST_WIDE_INT alignment)
807 {
808   HOST_WIDE_INT offset, delta;
809   unsigned HOST_WIDE_INT bits;
810   int i;
811   machine_mode mode;
812   rtx *regs;
813 
814   /* Work out how many bits to move at a time.  */
815   switch (alignment)
816     {
817     case 1:
818       bits = 8;
819       break;
820     case 2:
821       bits = 16;
822       break;
823     default:
824       bits = 32;
825       break;
826     }
827 
828   mode = mode_for_size (bits, MODE_INT, 0);
829   delta = bits / BITS_PER_UNIT;
830 
831   /* Allocate a buffer for the temporary registers.  */
832   regs = XALLOCAVEC (rtx, length / delta);
833 
834   /* Load as many BITS-sized chunks as possible.  */
835   for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
836     {
837       regs[i] = gen_reg_rtx (mode);
838       emit_move_insn (regs[i], adjust_address (src, mode, offset));
839     }
840 
841   /* Copy the chunks to the destination.  */
842   for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
843     emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
844 
845   /* Mop up any left-over bytes.  */
846   if (offset < length)
847     {
848       src = adjust_address (src, BLKmode, offset);
849       dest = adjust_address (dest, BLKmode, offset);
850       move_by_pieces (dest, src, length - offset,
851 		      MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
852     }
853 }
854 
855 /* Expand string/block move operations.
856 
857    operands[0] is the pointer to the destination.
858    operands[1] is the pointer to the source.
859    operands[2] is the number of bytes to move.
860    operands[3] is the alignment.  */
861 
862 int
lm32_expand_block_move(rtx * operands)863 lm32_expand_block_move (rtx * operands)
864 {
865   if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
866     {
867       lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
868 			      INTVAL (operands[3]));
869       return 1;
870     }
871   return 0;
872 }
873 
874 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
875    isn't protected by a PIC unspec.  */
876 int
nonpic_symbol_mentioned_p(rtx x)877 nonpic_symbol_mentioned_p (rtx x)
878 {
879   const char *fmt;
880   int i;
881 
882   if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
883       || GET_CODE (x) == PC)
884     return 1;
885 
886   /* We don't want to look into the possible MEM location of a
887      CONST_DOUBLE, since we're not going to use it, in general.  */
888   if (GET_CODE (x) == CONST_DOUBLE)
889     return 0;
890 
891   if (GET_CODE (x) == UNSPEC)
892     return 0;
893 
894   fmt = GET_RTX_FORMAT (GET_CODE (x));
895   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
896     {
897       if (fmt[i] == 'E')
898 	{
899 	  int j;
900 
901 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
902 	    if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
903 	      return 1;
904 	}
905       else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
906 	return 1;
907     }
908 
909   return 0;
910 }
911 
912 /* Compute a (partial) cost for rtx X.  Return true if the complete
913    cost has been computed, and false if subexpressions should be
914    scanned.  In either case, *TOTAL contains the cost result.  */
915 
916 static bool
lm32_rtx_costs(rtx x,machine_mode mode,int outer_code,int opno ATTRIBUTE_UNUSED,int * total,bool speed)917 lm32_rtx_costs (rtx x, machine_mode mode, int outer_code,
918 		int opno ATTRIBUTE_UNUSED, int *total, bool speed)
919 {
920   int code = GET_CODE (x);
921   bool small_mode;
922 
923   const int arithmetic_latency = 1;
924   const int shift_latency = 1;
925   const int compare_latency = 2;
926   const int multiply_latency = 3;
927   const int load_latency = 3;
928   const int libcall_size_cost = 5;
929 
930   /* Determine if we can handle the given mode size in a single instruction.  */
931   small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
932 
933   switch (code)
934     {
935 
936     case PLUS:
937     case MINUS:
938     case AND:
939     case IOR:
940     case XOR:
941     case NOT:
942     case NEG:
943       if (!speed)
944 	*total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
945       else
946 	*total =
947 	  COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
948       break;
949 
950     case COMPARE:
951       if (small_mode)
952 	{
953 	  if (!speed)
954 	    *total = COSTS_N_INSNS (1);
955 	  else
956 	    *total = COSTS_N_INSNS (compare_latency);
957 	}
958       else
959 	{
960 	  /* FIXME. Guessing here.  */
961 	  *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
962 	}
963       break;
964 
965     case ASHIFT:
966     case ASHIFTRT:
967     case LSHIFTRT:
968       if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
969 	{
970 	  if (!speed)
971 	    *total = COSTS_N_INSNS (1);
972 	  else
973 	    *total = COSTS_N_INSNS (shift_latency);
974 	}
975       else if (TARGET_BARREL_SHIFT_ENABLED)
976 	{
977 	  /* FIXME: Guessing here.  */
978 	  *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
979 	}
980       else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
981 	{
982 	  *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
983 	}
984       else
985 	{
986 	  /* Libcall.  */
987 	  if (!speed)
988 	    *total = COSTS_N_INSNS (libcall_size_cost);
989 	  else
990 	    *total = COSTS_N_INSNS (100);
991 	}
992       break;
993 
994     case MULT:
995       if (TARGET_MULTIPLY_ENABLED && small_mode)
996 	{
997 	  if (!speed)
998 	    *total = COSTS_N_INSNS (1);
999 	  else
1000 	    *total = COSTS_N_INSNS (multiply_latency);
1001 	}
1002       else
1003 	{
1004 	  /* Libcall.  */
1005 	  if (!speed)
1006 	    *total = COSTS_N_INSNS (libcall_size_cost);
1007 	  else
1008 	    *total = COSTS_N_INSNS (100);
1009 	}
1010       break;
1011 
1012     case DIV:
1013     case MOD:
1014     case UDIV:
1015     case UMOD:
1016       if (TARGET_DIVIDE_ENABLED && small_mode)
1017 	{
1018 	  if (!speed)
1019 	    *total = COSTS_N_INSNS (1);
1020 	  else
1021 	    {
1022 	      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1023 		{
1024 		  int cycles = 0;
1025 		  unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1026 
1027 		  while (i)
1028 		    {
1029 		      i >>= 2;
1030 		      cycles++;
1031 		    }
1032 		  if (IN_RANGE (i, 0, 65536))
1033 		    *total = COSTS_N_INSNS (1 + 1 + cycles);
1034 		  else
1035 		    *total = COSTS_N_INSNS (2 + 1 + cycles);
1036 		  return true;
1037 		}
1038 	      else if (GET_CODE (XEXP (x, 1)) == REG)
1039 		{
1040 		  *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1041 		  return true;
1042 		}
1043 	      else
1044 		{
1045 		  *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1046 		  return false;
1047 		}
1048 	    }
1049 	}
1050       else
1051 	{
1052 	  /* Libcall.  */
1053 	  if (!speed)
1054 	    *total = COSTS_N_INSNS (libcall_size_cost);
1055 	  else
1056 	    *total = COSTS_N_INSNS (100);
1057 	}
1058       break;
1059 
1060     case HIGH:
1061     case LO_SUM:
1062       if (!speed)
1063 	*total = COSTS_N_INSNS (1);
1064       else
1065 	*total = COSTS_N_INSNS (arithmetic_latency);
1066       break;
1067 
1068     case ZERO_EXTEND:
1069       if (MEM_P (XEXP (x, 0)))
1070 	*total = COSTS_N_INSNS (0);
1071       else if (small_mode)
1072 	{
1073 	  if (!speed)
1074 	    *total = COSTS_N_INSNS (1);
1075 	  else
1076 	    *total = COSTS_N_INSNS (arithmetic_latency);
1077 	}
1078       else
1079 	*total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1080       break;
1081 
1082     case CONST_INT:
1083       {
1084 	switch (outer_code)
1085 	  {
1086 	  case HIGH:
1087 	  case LO_SUM:
1088 	    *total = COSTS_N_INSNS (0);
1089 	    return true;
1090 
1091 	  case AND:
1092 	  case XOR:
1093 	  case IOR:
1094 	  case ASHIFT:
1095 	  case ASHIFTRT:
1096 	  case LSHIFTRT:
1097 	  case ROTATE:
1098 	  case ROTATERT:
1099 	    if (satisfies_constraint_L (x))
1100 	      *total = COSTS_N_INSNS (0);
1101 	    else
1102 	      *total = COSTS_N_INSNS (2);
1103 	    return true;
1104 
1105 	  case SET:
1106 	  case PLUS:
1107 	  case MINUS:
1108 	  case COMPARE:
1109 	    if (satisfies_constraint_K (x))
1110 	      *total = COSTS_N_INSNS (0);
1111 	    else
1112 	      *total = COSTS_N_INSNS (2);
1113 	    return true;
1114 
1115 	  case MULT:
1116 	    if (TARGET_MULTIPLY_ENABLED)
1117 	      {
1118 	        if (satisfies_constraint_K (x))
1119 	         *total = COSTS_N_INSNS (0);
1120 	        else
1121 	          *total = COSTS_N_INSNS (2);
1122 		return true;
1123 	      }
1124 	    /* Fall through.  */
1125 
1126 	  default:
1127             if (satisfies_constraint_K (x))
1128 	      *total = COSTS_N_INSNS (1);
1129 	    else
1130 	      *total = COSTS_N_INSNS (2);
1131 	    return true;
1132 	  }
1133       }
1134 
1135     case SYMBOL_REF:
1136     case CONST:
1137       switch (outer_code)
1138 	{
1139 	case HIGH:
1140 	case LO_SUM:
1141 	  *total = COSTS_N_INSNS (0);
1142 	  return true;
1143 
1144 	case MEM:
1145 	case SET:
1146 	  if (g_switch_value)
1147 	    {
1148 	      *total = COSTS_N_INSNS (0);
1149 	      return true;
1150 	    }
1151 	  break;
1152 	}
1153       /* Fall through.  */
1154 
1155     case LABEL_REF:
1156     case CONST_DOUBLE:
1157       *total = COSTS_N_INSNS (2);
1158       return true;
1159 
1160     case SET:
1161       *total = COSTS_N_INSNS (1);
1162       break;
1163 
1164     case MEM:
1165       if (!speed)
1166 	*total = COSTS_N_INSNS (1);
1167       else
1168 	*total = COSTS_N_INSNS (load_latency);
1169       break;
1170 
1171     }
1172 
1173   return false;
1174 }
1175 
1176 /* Implemenent TARGET_CAN_ELIMINATE.  */
1177 
1178 bool
lm32_can_eliminate(const int from ATTRIBUTE_UNUSED,const int to)1179 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1180 {
1181   return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1182 }
1183 
1184 /* Implement TARGET_LEGITIMATE_ADDRESS_P.  */
1185 
1186 static bool
lm32_legitimate_address_p(machine_mode mode ATTRIBUTE_UNUSED,rtx x,bool strict)1187 lm32_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1188 {
1189    /* (rM) */
1190   if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1191     return true;
1192   if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1193     return true;
1194 
1195   /* (rM)+literal) */
1196   if (GET_CODE (x) == PLUS
1197      && REG_P (XEXP (x, 0))
1198      && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1199          || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1200      && GET_CODE (XEXP (x, 1)) == CONST_INT
1201      && satisfies_constraint_K (XEXP ((x), 1)))
1202     return true;
1203 
1204   /* gp(sym)  */
1205   if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1206     return true;
1207 
1208   return false;
1209 }
1210 
1211 /* Check a move is not memory to memory.  */
1212 
1213 bool
lm32_move_ok(machine_mode mode,rtx operands[2])1214 lm32_move_ok (machine_mode mode, rtx operands[2]) {
1215   if (memory_operand (operands[0], mode))
1216     return register_or_zero_operand (operands[1], mode);
1217   return true;
1218 }
1219