1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2    Contributed by Jon Beniston <jon@beniston.com>
3 
4    Copyright (C) 2009-2021 Free Software Foundation, Inc.
5 
6    This file is part of GCC.
7 
8    GCC is free software; you can redistribute it and/or modify it
9    under the terms of the GNU General Public License as published
10    by the Free Software Foundation; either version 3, or (at your
11    option) any later version.
12 
13    GCC is distributed in the hope that it will be useful, but WITHOUT
14    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
16    License for more details.
17 
18    You should have received a copy of the GNU General Public License
19    along with GCC; see the file COPYING3.  If not see
20    <http://www.gnu.org/licenses/>.  */
21 
22 #define IN_TARGET_CODE 1
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "stringpool.h"
32 #include "attribs.h"
33 #include "df.h"
34 #include "memmodel.h"
35 #include "tm_p.h"
36 #include "optabs.h"
37 #include "regs.h"
38 #include "emit-rtl.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "calls.h"
42 #include "alias.h"
43 #include "explow.h"
44 #include "expr.h"
45 #include "tm-constrs.h"
46 #include "builtins.h"
47 
48 /* This file should be included last.  */
49 #include "target-def.h"
50 
51 struct lm32_frame_info
52 {
53   HOST_WIDE_INT total_size;	/* number of bytes of entire frame.  */
54   HOST_WIDE_INT callee_size;	/* number of bytes to save callee saves.  */
55   HOST_WIDE_INT pretend_size;	/* number of bytes we pretend caller did.  */
56   HOST_WIDE_INT args_size;	/* number of bytes for outgoing arguments.  */
57   HOST_WIDE_INT locals_size;	/* number of bytes for local variables.  */
58   unsigned int reg_save_mask;	/* mask of saved registers.  */
59 };
60 
61 /* Prototypes for static functions.  */
62 static rtx emit_add (rtx dest, rtx src0, rtx src1);
63 static void expand_save_restore (struct lm32_frame_info *info, int op);
64 static void stack_adjust (HOST_WIDE_INT amount);
65 static bool lm32_in_small_data_p (const_tree);
66 static void lm32_setup_incoming_varargs (cumulative_args_t cum,
67 					 const function_arg_info &,
68 					 int *pretend_size, int no_rtl);
69 static bool lm32_rtx_costs (rtx x, machine_mode mode, int outer_code, int opno,
70 			    int *total, bool speed);
71 static bool lm32_can_eliminate (const int, const int);
72 static bool
73 lm32_legitimate_address_p (machine_mode mode, rtx x, bool strict);
74 static HOST_WIDE_INT lm32_compute_frame_size (int size);
75 static void lm32_option_override (void);
76 static rtx lm32_function_arg (cumulative_args_t, const function_arg_info &);
77 static void lm32_function_arg_advance (cumulative_args_t cum,
78 				       const function_arg_info &);
79 static bool lm32_hard_regno_mode_ok (unsigned int, machine_mode);
80 static bool lm32_modes_tieable_p (machine_mode, machine_mode);
81 static HOST_WIDE_INT lm32_starting_frame_offset (void);
82 
83 #undef TARGET_OPTION_OVERRIDE
84 #define TARGET_OPTION_OVERRIDE lm32_option_override
85 #undef TARGET_ADDRESS_COST
86 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
87 #undef TARGET_RTX_COSTS
88 #define TARGET_RTX_COSTS lm32_rtx_costs
89 #undef TARGET_IN_SMALL_DATA_P
90 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
91 #undef TARGET_PROMOTE_FUNCTION_MODE
92 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
93 #undef TARGET_SETUP_INCOMING_VARARGS
94 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
95 #undef TARGET_FUNCTION_ARG
96 #define TARGET_FUNCTION_ARG lm32_function_arg
97 #undef TARGET_FUNCTION_ARG_ADVANCE
98 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
99 #undef TARGET_PROMOTE_PROTOTYPES
100 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
101 #undef TARGET_MIN_ANCHOR_OFFSET
102 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
103 #undef TARGET_MAX_ANCHOR_OFFSET
104 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
105 #undef TARGET_CAN_ELIMINATE
106 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
107 #undef TARGET_LRA_P
108 #define TARGET_LRA_P hook_bool_void_false
109 #undef TARGET_LEGITIMATE_ADDRESS_P
110 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
111 #undef TARGET_HARD_REGNO_MODE_OK
112 #define TARGET_HARD_REGNO_MODE_OK lm32_hard_regno_mode_ok
113 #undef TARGET_MODES_TIEABLE_P
114 #define TARGET_MODES_TIEABLE_P lm32_modes_tieable_p
115 
116 #undef TARGET_CONSTANT_ALIGNMENT
117 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
118 
119 #undef TARGET_STARTING_FRAME_OFFSET
120 #define TARGET_STARTING_FRAME_OFFSET lm32_starting_frame_offset
121 
122 struct gcc_target targetm = TARGET_INITIALIZER;
123 
124 /* Current frame information calculated by lm32_compute_frame_size.  */
125 static struct lm32_frame_info current_frame_info;
126 
127 /* Return non-zero if the given return type should be returned in memory.  */
128 
129 int
lm32_return_in_memory(tree type)130 lm32_return_in_memory (tree type)
131 {
132   HOST_WIDE_INT size;
133 
134   if (!AGGREGATE_TYPE_P (type))
135     {
136       /* All simple types are returned in registers.  */
137       return 0;
138     }
139 
140   size = int_size_in_bytes (type);
141   if (size >= 0 && size <= UNITS_PER_WORD)
142     {
143       /* If it can fit in one register.  */
144       return 0;
145     }
146 
147   return 1;
148 }
149 
150 /* Generate an emit a word sized add instruction.  */
151 
152 static rtx
emit_add(rtx dest,rtx src0,rtx src1)153 emit_add (rtx dest, rtx src0, rtx src1)
154 {
155   rtx insn;
156   insn = emit_insn (gen_addsi3 (dest, src0, src1));
157   return insn;
158 }
159 
160 /* Generate the code to compare (and possibly branch) two integer values
161    TEST_CODE is the comparison code we are trying to emulate
162      (or implement directly)
163    RESULT is where to store the result of the comparison,
164      or null to emit a branch
165    CMP0 CMP1 are the two comparison operands
166    DESTINATION is the destination of the branch, or null to only compare
167    */
168 
169 static void
gen_int_relational(enum rtx_code code,rtx result,rtx cmp0,rtx cmp1,rtx destination)170 gen_int_relational (enum rtx_code code,
171 		    rtx result,
172 		    rtx cmp0,
173 		    rtx cmp1,
174 		    rtx destination)
175 {
176   machine_mode mode;
177   int branch_p;
178 
179   mode = GET_MODE (cmp0);
180   if (mode == VOIDmode)
181     mode = GET_MODE (cmp1);
182 
183   /* Is this a branch or compare.  */
184   branch_p = (destination != 0);
185 
186   /* Instruction set doesn't support LE or LT, so swap operands and use
187      GE, GT.  */
188   switch (code)
189     {
190     case LE:
191     case LT:
192     case LEU:
193     case LTU:
194       {
195 	rtx temp;
196 
197 	code = swap_condition (code);
198 	temp = cmp0;
199 	cmp0 = cmp1;
200 	cmp1 = temp;
201 	break;
202       }
203     default:
204       break;
205     }
206 
207   if (branch_p)
208     {
209       rtx insn, cond, label;
210 
211       /* Operands must be in registers.  */
212       if (!register_operand (cmp0, mode))
213 	cmp0 = force_reg (mode, cmp0);
214       if (!register_operand (cmp1, mode))
215 	cmp1 = force_reg (mode, cmp1);
216 
217       /* Generate conditional branch instruction.  */
218       cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
219       label = gen_rtx_LABEL_REF (VOIDmode, destination);
220       insn = gen_rtx_SET (pc_rtx, gen_rtx_IF_THEN_ELSE (VOIDmode,
221 							cond, label, pc_rtx));
222       emit_jump_insn (insn);
223     }
224   else
225     {
226       /* We can't have const_ints in cmp0, other than 0.  */
227       if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
228 	cmp0 = force_reg (mode, cmp0);
229 
230       /* If the comparison is against an int not in legal range
231          move it into a register.  */
232       if (GET_CODE (cmp1) == CONST_INT)
233 	{
234 	  switch (code)
235 	    {
236 	    case EQ:
237 	    case NE:
238 	    case LE:
239 	    case LT:
240 	    case GE:
241 	    case GT:
242 	      if (!satisfies_constraint_K (cmp1))
243 		cmp1 = force_reg (mode, cmp1);
244 	      break;
245 	    case LEU:
246 	    case LTU:
247 	    case GEU:
248 	    case GTU:
249 	      if (!satisfies_constraint_L (cmp1))
250 		cmp1 = force_reg (mode, cmp1);
251 	      break;
252 	    default:
253 	      gcc_unreachable ();
254 	    }
255 	}
256 
257       /* Generate compare instruction.  */
258       emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
259     }
260 }
261 
262 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
263    and OPERAND[3].  Store the result in OPERANDS[0].  */
264 
265 void
lm32_expand_scc(rtx operands[])266 lm32_expand_scc (rtx operands[])
267 {
268   rtx target = operands[0];
269   enum rtx_code code = GET_CODE (operands[1]);
270   rtx op0 = operands[2];
271   rtx op1 = operands[3];
272 
273   gen_int_relational (code, target, op0, op1, NULL_RTX);
274 }
275 
276 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
277    CODE and jump to OPERANDS[3] if the condition holds.  */
278 
279 void
lm32_expand_conditional_branch(rtx operands[])280 lm32_expand_conditional_branch (rtx operands[])
281 {
282   enum rtx_code code = GET_CODE (operands[0]);
283   rtx op0 = operands[1];
284   rtx op1 = operands[2];
285   rtx destination = operands[3];
286 
287   gen_int_relational (code, NULL_RTX, op0, op1, destination);
288 }
289 
290 /* Generate and emit RTL to save or restore callee save registers.  */
291 static void
expand_save_restore(struct lm32_frame_info * info,int op)292 expand_save_restore (struct lm32_frame_info *info, int op)
293 {
294   unsigned int reg_save_mask = info->reg_save_mask;
295   int regno;
296   HOST_WIDE_INT offset;
297   rtx insn;
298 
299   /* Callee saves are below locals and above outgoing arguments.  */
300   offset = info->args_size + info->callee_size;
301   for (regno = 0; regno <= 31; regno++)
302     {
303       if ((reg_save_mask & (1 << regno)) != 0)
304 	{
305 	  rtx offset_rtx;
306 	  rtx mem;
307 
308 	  offset_rtx = GEN_INT (offset);
309 	  if (satisfies_constraint_K (offset_rtx))
310 	    {
311               mem = gen_rtx_MEM (word_mode,
312                                  gen_rtx_PLUS (Pmode,
313                                                stack_pointer_rtx,
314                                                offset_rtx));
315             }
316           else
317             {
318               /* r10 is caller saved so it can be used as a temp reg.  */
319               rtx r10;
320 
321               r10 = gen_rtx_REG (word_mode, 10);
322               insn = emit_move_insn (r10, offset_rtx);
323               if (op == 0)
324                 RTX_FRAME_RELATED_P (insn) = 1;
325               insn = emit_add (r10, r10, stack_pointer_rtx);
326               if (op == 0)
327                 RTX_FRAME_RELATED_P (insn) = 1;
328               mem = gen_rtx_MEM (word_mode, r10);
329             }
330 
331 	  if (op == 0)
332 	    insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
333 	  else
334 	    insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
335 
336 	  /* only prologue instructions which set the sp fp or save a
337 	     register should be marked as frame related.  */
338 	  if (op == 0)
339 	    RTX_FRAME_RELATED_P (insn) = 1;
340 	  offset -= UNITS_PER_WORD;
341 	}
342     }
343 }
344 
345 static void
stack_adjust(HOST_WIDE_INT amount)346 stack_adjust (HOST_WIDE_INT amount)
347 {
348   rtx insn;
349 
350   if (!IN_RANGE (amount, -32776, 32768))
351     {
352       /* r10 is caller saved so it can be used as a temp reg.  */
353       rtx r10;
354       r10 = gen_rtx_REG (word_mode, 10);
355       insn = emit_move_insn (r10, GEN_INT (amount));
356       if (amount < 0)
357 	RTX_FRAME_RELATED_P (insn) = 1;
358       insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
359       if (amount < 0)
360 	RTX_FRAME_RELATED_P (insn) = 1;
361     }
362   else
363     {
364       insn = emit_add (stack_pointer_rtx,
365 		       stack_pointer_rtx, GEN_INT (amount));
366       if (amount < 0)
367 	RTX_FRAME_RELATED_P (insn) = 1;
368     }
369 }
370 
371 
372 /* Create and emit instructions for a functions prologue.  */
373 void
lm32_expand_prologue(void)374 lm32_expand_prologue (void)
375 {
376   rtx insn;
377 
378   lm32_compute_frame_size (get_frame_size ());
379 
380   if (current_frame_info.total_size > 0)
381     {
382       /* Add space on stack new frame.  */
383       stack_adjust (-current_frame_info.total_size);
384 
385       /* Save callee save registers.  */
386       if (current_frame_info.reg_save_mask != 0)
387 	expand_save_restore (&current_frame_info, 0);
388 
389       /* Setup frame pointer if it's needed.  */
390       if (frame_pointer_needed == 1)
391 	{
392 	  /* Move sp to fp.  */
393 	  insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
394 	  RTX_FRAME_RELATED_P (insn) = 1;
395 
396 	  /* Add offset - Don't use total_size, as that includes pretend_size,
397              which isn't part of this frame?  */
398 	  insn = emit_add (frame_pointer_rtx,
399 			   frame_pointer_rtx,
400 			   GEN_INT (current_frame_info.args_size +
401 				    current_frame_info.callee_size +
402 				    current_frame_info.locals_size));
403 	  RTX_FRAME_RELATED_P (insn) = 1;
404 	}
405 
406       /* Prevent prologue from being scheduled into function body.  */
407       emit_insn (gen_blockage ());
408     }
409 }
410 
411 /* Create an emit instructions for a functions epilogue.  */
412 void
lm32_expand_epilogue(void)413 lm32_expand_epilogue (void)
414 {
415   rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
416 
417   lm32_compute_frame_size (get_frame_size ());
418 
419   if (current_frame_info.total_size > 0)
420     {
421       /* Prevent stack code from being reordered.  */
422       emit_insn (gen_blockage ());
423 
424       /* Restore callee save registers.  */
425       if (current_frame_info.reg_save_mask != 0)
426 	expand_save_restore (&current_frame_info, 1);
427 
428       /* Deallocate stack.  */
429       stack_adjust (current_frame_info.total_size);
430 
431       /* Return to calling function.  */
432       emit_jump_insn (gen_return_internal (ra_rtx));
433     }
434   else
435     {
436       /* Return to calling function.  */
437       emit_jump_insn (gen_return_internal (ra_rtx));
438     }
439 }
440 
441 /* Return the bytes needed to compute the frame pointer from the current
442    stack pointer.  */
443 static HOST_WIDE_INT
lm32_compute_frame_size(int size)444 lm32_compute_frame_size (int size)
445 {
446   int regno;
447   HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
448   unsigned int reg_save_mask;
449 
450   locals_size = size;
451   args_size = crtl->outgoing_args_size;
452   pretend_size = crtl->args.pretend_args_size;
453   callee_size = 0;
454   reg_save_mask = 0;
455 
456   /* Build mask that actually determines which regsiters we save
457      and calculate size required to store them in the stack.  */
458   for (regno = 1; regno < SP_REGNUM; regno++)
459     {
460       if (df_regs_ever_live_p (regno) && !call_used_or_fixed_reg_p (regno))
461 	{
462 	  reg_save_mask |= 1 << regno;
463 	  callee_size += UNITS_PER_WORD;
464 	}
465     }
466   if (df_regs_ever_live_p (RA_REGNUM) || ! crtl->is_leaf
467       || !optimize)
468     {
469       reg_save_mask |= 1 << RA_REGNUM;
470       callee_size += UNITS_PER_WORD;
471     }
472   if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
473     {
474       reg_save_mask |= 1 << FP_REGNUM;
475       callee_size += UNITS_PER_WORD;
476     }
477 
478   /* Compute total frame size.  */
479   total_size = pretend_size + args_size + locals_size + callee_size;
480 
481   /* Align frame to appropriate boundary.  */
482   total_size = (total_size + 3) & ~3;
483 
484   /* Save computed information.  */
485   current_frame_info.total_size = total_size;
486   current_frame_info.callee_size = callee_size;
487   current_frame_info.pretend_size = pretend_size;
488   current_frame_info.locals_size = locals_size;
489   current_frame_info.args_size = args_size;
490   current_frame_info.reg_save_mask = reg_save_mask;
491 
492   return total_size;
493 }
494 
495 void
lm32_print_operand(FILE * file,rtx op,int letter)496 lm32_print_operand (FILE * file, rtx op, int letter)
497 {
498   enum rtx_code code;
499 
500   code = GET_CODE (op);
501 
502   if (code == SIGN_EXTEND)
503     op = XEXP (op, 0), code = GET_CODE (op);
504   else if (code == REG || code == SUBREG)
505     {
506       int regnum;
507 
508       if (code == REG)
509 	regnum = REGNO (op);
510       else
511 	regnum = true_regnum (op);
512 
513       fprintf (file, "%s", reg_names[regnum]);
514     }
515   else if (code == HIGH)
516     output_addr_const (file, XEXP (op, 0));
517   else if (code == MEM)
518     output_address (GET_MODE (op), XEXP (op, 0));
519   else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
520     fprintf (file, "%s", reg_names[0]);
521   else if (GET_CODE (op) == CONST_DOUBLE)
522     {
523       if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
524 	output_operand_lossage ("only 0.0 can be loaded as an immediate");
525       else
526 	fprintf (file, "0");
527     }
528   else if (code == EQ)
529     fprintf (file, "e  ");
530   else if (code == NE)
531     fprintf (file, "ne ");
532   else if (code == GT)
533     fprintf (file, "g  ");
534   else if (code == GTU)
535     fprintf (file, "gu ");
536   else if (code == LT)
537     fprintf (file, "l  ");
538   else if (code == LTU)
539     fprintf (file, "lu ");
540   else if (code == GE)
541     fprintf (file, "ge ");
542   else if (code == GEU)
543     fprintf (file, "geu");
544   else if (code == LE)
545     fprintf (file, "le ");
546   else if (code == LEU)
547     fprintf (file, "leu");
548   else
549     output_addr_const (file, op);
550 }
551 
552 /* A C compound statement to output to stdio stream STREAM the
553    assembler syntax for an instruction operand that is a memory
554    reference whose address is ADDR.  ADDR is an RTL expression.
555 
556    On some machines, the syntax for a symbolic address depends on
557    the section that the address refers to.  On these machines,
558    define the macro `ENCODE_SECTION_INFO' to store the information
559    into the `symbol_ref', and then check for it here.  */
560 
561 void
lm32_print_operand_address(FILE * file,rtx addr)562 lm32_print_operand_address (FILE * file, rtx addr)
563 {
564   switch (GET_CODE (addr))
565     {
566     case REG:
567       fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
568       break;
569 
570     case MEM:
571       output_address (VOIDmode, XEXP (addr, 0));
572       break;
573 
574     case PLUS:
575       {
576 	rtx arg0 = XEXP (addr, 0);
577 	rtx arg1 = XEXP (addr, 1);
578 
579 	if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
580 	  {
581 	    if (GET_CODE (arg1) == CONST_INT)
582 	      fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
583 		       INTVAL (arg1));
584 	    else
585 	      {
586 		fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
587 		output_addr_const (file, arg1);
588 		fprintf (file, ")");
589 	      }
590 	  }
591 	else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
592 	  output_addr_const (file, addr);
593 	else
594 	  fatal_insn ("bad operand", addr);
595       }
596       break;
597 
598     case SYMBOL_REF:
599       if (SYMBOL_REF_SMALL_P (addr))
600 	{
601 	  fprintf (file, "gp(");
602 	  output_addr_const (file, addr);
603 	  fprintf (file, ")");
604 	}
605       else
606 	fatal_insn ("can't use non gp relative absolute address", addr);
607       break;
608 
609     default:
610       fatal_insn ("invalid addressing mode", addr);
611       break;
612     }
613 }
614 
615 /* Determine where to put an argument to a function.
616    Value is zero to push the argument on the stack,
617    or a hard register in which to store the argument.
618 
619    CUM is a variable of type CUMULATIVE_ARGS which gives info about
620     the preceding args and about the function being called.
621    ARG is a description of the argument.  */
622 
623 static rtx
lm32_function_arg(cumulative_args_t cum_v,const function_arg_info & arg)624 lm32_function_arg (cumulative_args_t cum_v, const function_arg_info &arg)
625 {
626   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
627 
628   if (arg.end_marker_p ())
629     /* Compute operand 2 of the call insn.  */
630     return GEN_INT (0);
631 
632   if (targetm.calls.must_pass_in_stack (arg))
633     return NULL_RTX;
634 
635   if (!arg.named
636       || *cum + LM32_NUM_REGS2 (arg.mode, arg.type) > LM32_NUM_ARG_REGS)
637     return NULL_RTX;
638 
639   return gen_rtx_REG (arg.mode, *cum + LM32_FIRST_ARG_REG);
640 }
641 
642 static void
lm32_function_arg_advance(cumulative_args_t cum,const function_arg_info & arg)643 lm32_function_arg_advance (cumulative_args_t cum,
644 			   const function_arg_info &arg)
645 {
646   *get_cumulative_args (cum) += LM32_NUM_REGS2 (arg.mode, arg.type);
647 }
648 
649 HOST_WIDE_INT
lm32_compute_initial_elimination_offset(int from,int to)650 lm32_compute_initial_elimination_offset (int from, int to)
651 {
652   HOST_WIDE_INT offset = 0;
653 
654   switch (from)
655     {
656     case ARG_POINTER_REGNUM:
657       switch (to)
658 	{
659 	case FRAME_POINTER_REGNUM:
660 	  offset = 0;
661 	  break;
662 	case STACK_POINTER_REGNUM:
663 	  offset =
664 	    lm32_compute_frame_size (get_frame_size ()) -
665 	    current_frame_info.pretend_size;
666 	  break;
667 	default:
668 	  gcc_unreachable ();
669 	}
670       break;
671     default:
672       gcc_unreachable ();
673     }
674 
675   return offset;
676 }
677 
678 static void
lm32_setup_incoming_varargs(cumulative_args_t cum_v,const function_arg_info & arg,int * pretend_size,int no_rtl)679 lm32_setup_incoming_varargs (cumulative_args_t cum_v,
680 			     const function_arg_info &arg,
681 			     int *pretend_size, int no_rtl)
682 {
683   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
684   int first_anon_arg;
685   tree fntype;
686 
687   fntype = TREE_TYPE (current_function_decl);
688 
689   if (stdarg_p (fntype))
690     first_anon_arg = *cum + LM32_FIRST_ARG_REG;
691   else
692     {
693       /* this is the common case, we have been passed details setup
694 	 for the last named argument, we want to skip over the
695 	 registers, if any used in passing this named parameter in
696 	 order to determine which is the first registers used to pass
697 	 anonymous arguments.  */
698       int size = arg.promoted_size_in_bytes ();
699 
700       first_anon_arg =
701 	*cum + LM32_FIRST_ARG_REG +
702 	((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
703     }
704 
705   if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
706     {
707       int first_reg_offset = first_anon_arg;
708       int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
709       rtx regblock;
710 
711       regblock = gen_rtx_MEM (BLKmode,
712 			      plus_constant (Pmode, arg_pointer_rtx,
713 					     FIRST_PARM_OFFSET (0)));
714       move_block_from_reg (first_reg_offset, regblock, size);
715 
716       *pretend_size = size * UNITS_PER_WORD;
717     }
718 }
719 
720 /* Override command line options.  */
721 static void
lm32_option_override(void)722 lm32_option_override (void)
723 {
724   /* We must have sign-extend enabled if barrel-shift isn't.  */
725   if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
726     target_flags |= MASK_SIGN_EXTEND_ENABLED;
727 }
728 
729 /* Return nonzero if this function is known to have a null epilogue.
730    This allows the optimizer to omit jumps to jumps if no stack
731    was created.  */
732 int
lm32_can_use_return(void)733 lm32_can_use_return (void)
734 {
735   if (!reload_completed)
736     return 0;
737 
738   if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
739     return 0;
740 
741   if (lm32_compute_frame_size (get_frame_size ()) != 0)
742     return 0;
743 
744   return 1;
745 }
746 
747 /* Support function to determine the return address of the function
748    'count' frames back up the stack.  */
749 rtx
lm32_return_addr_rtx(int count,rtx frame)750 lm32_return_addr_rtx (int count, rtx frame)
751 {
752   rtx r;
753   if (count == 0)
754     {
755       if (!df_regs_ever_live_p (RA_REGNUM))
756 	r = gen_rtx_REG (Pmode, RA_REGNUM);
757       else
758 	{
759 	  r = gen_rtx_MEM (Pmode,
760 			   gen_rtx_PLUS (Pmode, frame,
761 					 GEN_INT (-2 * UNITS_PER_WORD)));
762 	  set_mem_alias_set (r, get_frame_alias_set ());
763 	}
764     }
765   else if (flag_omit_frame_pointer)
766     r = NULL_RTX;
767   else
768     {
769       r = gen_rtx_MEM (Pmode,
770 		       gen_rtx_PLUS (Pmode, frame,
771 				     GEN_INT (-2 * UNITS_PER_WORD)));
772       set_mem_alias_set (r, get_frame_alias_set ());
773     }
774   return r;
775 }
776 
777 /* Return true if EXP should be placed in the small data section.  */
778 
779 static bool
lm32_in_small_data_p(const_tree exp)780 lm32_in_small_data_p (const_tree exp)
781 {
782   /* We want to merge strings, so we never consider them small data.  */
783   if (TREE_CODE (exp) == STRING_CST)
784     return false;
785 
786   /* Functions are never in the small data area.  Duh.  */
787   if (TREE_CODE (exp) == FUNCTION_DECL)
788     return false;
789 
790   if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
791     {
792       const char *section = DECL_SECTION_NAME (exp);
793       if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
794 	return true;
795     }
796   else
797     {
798       HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
799 
800       /* If this is an incomplete type with size 0, then we can't put it
801          in sdata because it might be too big when completed.  */
802       if (size > 0 && size <= g_switch_value)
803 	return true;
804     }
805 
806   return false;
807 }
808 
809 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
810    Assume that the areas do not overlap.  */
811 
812 static void
lm32_block_move_inline(rtx dest,rtx src,HOST_WIDE_INT length,HOST_WIDE_INT alignment)813 lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
814 			HOST_WIDE_INT alignment)
815 {
816   HOST_WIDE_INT offset, delta;
817   unsigned HOST_WIDE_INT bits;
818   int i;
819   machine_mode mode;
820   rtx *regs;
821 
822   /* Work out how many bits to move at a time.  */
823   switch (alignment)
824     {
825     case 1:
826       bits = 8;
827       break;
828     case 2:
829       bits = 16;
830       break;
831     default:
832       bits = 32;
833       break;
834     }
835 
836   mode = int_mode_for_size (bits, 0).require ();
837   delta = bits / BITS_PER_UNIT;
838 
839   /* Allocate a buffer for the temporary registers.  */
840   regs = XALLOCAVEC (rtx, length / delta);
841 
842   /* Load as many BITS-sized chunks as possible.  */
843   for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
844     {
845       regs[i] = gen_reg_rtx (mode);
846       emit_move_insn (regs[i], adjust_address (src, mode, offset));
847     }
848 
849   /* Copy the chunks to the destination.  */
850   for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
851     emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
852 
853   /* Mop up any left-over bytes.  */
854   if (offset < length)
855     {
856       src = adjust_address (src, BLKmode, offset);
857       dest = adjust_address (dest, BLKmode, offset);
858       move_by_pieces (dest, src, length - offset,
859 		      MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), RETURN_BEGIN);
860     }
861 }
862 
863 /* Expand string/block move operations.
864 
865    operands[0] is the pointer to the destination.
866    operands[1] is the pointer to the source.
867    operands[2] is the number of bytes to move.
868    operands[3] is the alignment.  */
869 
870 int
lm32_expand_block_move(rtx * operands)871 lm32_expand_block_move (rtx * operands)
872 {
873   if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
874     {
875       lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
876 			      INTVAL (operands[3]));
877       return 1;
878     }
879   return 0;
880 }
881 
882 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
883    isn't protected by a PIC unspec.  */
884 int
nonpic_symbol_mentioned_p(rtx x)885 nonpic_symbol_mentioned_p (rtx x)
886 {
887   const char *fmt;
888   int i;
889 
890   if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
891       || GET_CODE (x) == PC)
892     return 1;
893 
894   /* We don't want to look into the possible MEM location of a
895      CONST_DOUBLE, since we're not going to use it, in general.  */
896   if (GET_CODE (x) == CONST_DOUBLE)
897     return 0;
898 
899   if (GET_CODE (x) == UNSPEC)
900     return 0;
901 
902   fmt = GET_RTX_FORMAT (GET_CODE (x));
903   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
904     {
905       if (fmt[i] == 'E')
906 	{
907 	  int j;
908 
909 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
910 	    if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
911 	      return 1;
912 	}
913       else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
914 	return 1;
915     }
916 
917   return 0;
918 }
919 
920 /* Compute a (partial) cost for rtx X.  Return true if the complete
921    cost has been computed, and false if subexpressions should be
922    scanned.  In either case, *TOTAL contains the cost result.  */
923 
924 static bool
lm32_rtx_costs(rtx x,machine_mode mode,int outer_code,int opno ATTRIBUTE_UNUSED,int * total,bool speed)925 lm32_rtx_costs (rtx x, machine_mode mode, int outer_code,
926 		int opno ATTRIBUTE_UNUSED, int *total, bool speed)
927 {
928   int code = GET_CODE (x);
929   bool small_mode;
930 
931   const int arithmetic_latency = 1;
932   const int shift_latency = 1;
933   const int compare_latency = 2;
934   const int multiply_latency = 3;
935   const int load_latency = 3;
936   const int libcall_size_cost = 5;
937 
938   /* Determine if we can handle the given mode size in a single instruction.  */
939   small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
940 
941   switch (code)
942     {
943 
944     case PLUS:
945     case MINUS:
946     case AND:
947     case IOR:
948     case XOR:
949     case NOT:
950     case NEG:
951       if (!speed)
952 	*total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
953       else
954 	*total =
955 	  COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
956       break;
957 
958     case COMPARE:
959       if (small_mode)
960 	{
961 	  if (!speed)
962 	    *total = COSTS_N_INSNS (1);
963 	  else
964 	    *total = COSTS_N_INSNS (compare_latency);
965 	}
966       else
967 	{
968 	  /* FIXME. Guessing here.  */
969 	  *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
970 	}
971       break;
972 
973     case ASHIFT:
974     case ASHIFTRT:
975     case LSHIFTRT:
976       if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
977 	{
978 	  if (!speed)
979 	    *total = COSTS_N_INSNS (1);
980 	  else
981 	    *total = COSTS_N_INSNS (shift_latency);
982 	}
983       else if (TARGET_BARREL_SHIFT_ENABLED)
984 	{
985 	  /* FIXME: Guessing here.  */
986 	  *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
987 	}
988       else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
989 	{
990 	  *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
991 	}
992       else
993 	{
994 	  /* Libcall.  */
995 	  if (!speed)
996 	    *total = COSTS_N_INSNS (libcall_size_cost);
997 	  else
998 	    *total = COSTS_N_INSNS (100);
999 	}
1000       break;
1001 
1002     case MULT:
1003       if (TARGET_MULTIPLY_ENABLED && small_mode)
1004 	{
1005 	  if (!speed)
1006 	    *total = COSTS_N_INSNS (1);
1007 	  else
1008 	    *total = COSTS_N_INSNS (multiply_latency);
1009 	}
1010       else
1011 	{
1012 	  /* Libcall.  */
1013 	  if (!speed)
1014 	    *total = COSTS_N_INSNS (libcall_size_cost);
1015 	  else
1016 	    *total = COSTS_N_INSNS (100);
1017 	}
1018       break;
1019 
1020     case DIV:
1021     case MOD:
1022     case UDIV:
1023     case UMOD:
1024       if (TARGET_DIVIDE_ENABLED && small_mode)
1025 	{
1026 	  if (!speed)
1027 	    *total = COSTS_N_INSNS (1);
1028 	  else
1029 	    {
1030 	      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1031 		{
1032 		  int cycles = 0;
1033 		  unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1034 
1035 		  while (i)
1036 		    {
1037 		      i >>= 2;
1038 		      cycles++;
1039 		    }
1040 		  if (IN_RANGE (i, 0, 65536))
1041 		    *total = COSTS_N_INSNS (1 + 1 + cycles);
1042 		  else
1043 		    *total = COSTS_N_INSNS (2 + 1 + cycles);
1044 		  return true;
1045 		}
1046 	      else if (GET_CODE (XEXP (x, 1)) == REG)
1047 		{
1048 		  *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1049 		  return true;
1050 		}
1051 	      else
1052 		{
1053 		  *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1054 		  return false;
1055 		}
1056 	    }
1057 	}
1058       else
1059 	{
1060 	  /* Libcall.  */
1061 	  if (!speed)
1062 	    *total = COSTS_N_INSNS (libcall_size_cost);
1063 	  else
1064 	    *total = COSTS_N_INSNS (100);
1065 	}
1066       break;
1067 
1068     case HIGH:
1069     case LO_SUM:
1070       if (!speed)
1071 	*total = COSTS_N_INSNS (1);
1072       else
1073 	*total = COSTS_N_INSNS (arithmetic_latency);
1074       break;
1075 
1076     case ZERO_EXTEND:
1077       if (MEM_P (XEXP (x, 0)))
1078 	*total = COSTS_N_INSNS (0);
1079       else if (small_mode)
1080 	{
1081 	  if (!speed)
1082 	    *total = COSTS_N_INSNS (1);
1083 	  else
1084 	    *total = COSTS_N_INSNS (arithmetic_latency);
1085 	}
1086       else
1087 	*total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1088       break;
1089 
1090     case CONST_INT:
1091       {
1092 	switch (outer_code)
1093 	  {
1094 	  case HIGH:
1095 	  case LO_SUM:
1096 	    *total = COSTS_N_INSNS (0);
1097 	    return true;
1098 
1099 	  case AND:
1100 	  case XOR:
1101 	  case IOR:
1102 	  case ASHIFT:
1103 	  case ASHIFTRT:
1104 	  case LSHIFTRT:
1105 	  case ROTATE:
1106 	  case ROTATERT:
1107 	    if (satisfies_constraint_L (x))
1108 	      *total = COSTS_N_INSNS (0);
1109 	    else
1110 	      *total = COSTS_N_INSNS (2);
1111 	    return true;
1112 
1113 	  case SET:
1114 	  case PLUS:
1115 	  case MINUS:
1116 	  case COMPARE:
1117 	    if (satisfies_constraint_K (x))
1118 	      *total = COSTS_N_INSNS (0);
1119 	    else
1120 	      *total = COSTS_N_INSNS (2);
1121 	    return true;
1122 
1123 	  case MULT:
1124 	    if (TARGET_MULTIPLY_ENABLED)
1125 	      {
1126 	        if (satisfies_constraint_K (x))
1127 	         *total = COSTS_N_INSNS (0);
1128 	        else
1129 	          *total = COSTS_N_INSNS (2);
1130 		return true;
1131 	      }
1132 	    /* Fall through.  */
1133 
1134 	  default:
1135             if (satisfies_constraint_K (x))
1136 	      *total = COSTS_N_INSNS (1);
1137 	    else
1138 	      *total = COSTS_N_INSNS (2);
1139 	    return true;
1140 	  }
1141       }
1142 
1143     case SYMBOL_REF:
1144     case CONST:
1145       switch (outer_code)
1146 	{
1147 	case HIGH:
1148 	case LO_SUM:
1149 	  *total = COSTS_N_INSNS (0);
1150 	  return true;
1151 
1152 	case MEM:
1153 	case SET:
1154 	  if (g_switch_value)
1155 	    {
1156 	      *total = COSTS_N_INSNS (0);
1157 	      return true;
1158 	    }
1159 	  break;
1160 	}
1161       /* Fall through.  */
1162 
1163     case LABEL_REF:
1164     case CONST_DOUBLE:
1165       *total = COSTS_N_INSNS (2);
1166       return true;
1167 
1168     case SET:
1169       *total = COSTS_N_INSNS (1);
1170       break;
1171 
1172     case MEM:
1173       if (!speed)
1174 	*total = COSTS_N_INSNS (1);
1175       else
1176 	*total = COSTS_N_INSNS (load_latency);
1177       break;
1178 
1179     }
1180 
1181   return false;
1182 }
1183 
1184 /* Implemenent TARGET_CAN_ELIMINATE.  */
1185 
1186 bool
lm32_can_eliminate(const int from ATTRIBUTE_UNUSED,const int to)1187 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1188 {
1189   return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1190 }
1191 
1192 /* Implement TARGET_LEGITIMATE_ADDRESS_P.  */
1193 
1194 static bool
lm32_legitimate_address_p(machine_mode mode ATTRIBUTE_UNUSED,rtx x,bool strict)1195 lm32_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1196 {
1197    /* (rM) */
1198   if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1199     return true;
1200   if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1201     return true;
1202 
1203   /* (rM)+literal) */
1204   if (GET_CODE (x) == PLUS
1205      && REG_P (XEXP (x, 0))
1206      && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1207          || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1208      && GET_CODE (XEXP (x, 1)) == CONST_INT
1209      && satisfies_constraint_K (XEXP ((x), 1)))
1210     return true;
1211 
1212   /* gp(sym)  */
1213   if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1214     return true;
1215 
1216   return false;
1217 }
1218 
1219 /* Check a move is not memory to memory.  */
1220 
1221 bool
lm32_move_ok(machine_mode mode,rtx operands[2])1222 lm32_move_ok (machine_mode mode, rtx operands[2]) {
1223   if (memory_operand (operands[0], mode))
1224     return register_or_zero_operand (operands[1], mode);
1225   return true;
1226 }
1227 
1228 /* Implement TARGET_HARD_REGNO_MODE_OK.  */
1229 
1230 static bool
lm32_hard_regno_mode_ok(unsigned int regno,machine_mode)1231 lm32_hard_regno_mode_ok (unsigned int regno, machine_mode)
1232 {
1233   return G_REG_P (regno);
1234 }
1235 
1236 /* Implement TARGET_MODES_TIEABLE_P.  */
1237 
1238 static bool
lm32_modes_tieable_p(machine_mode mode1,machine_mode mode2)1239 lm32_modes_tieable_p (machine_mode mode1, machine_mode mode2)
1240 {
1241   return (GET_MODE_CLASS (mode1) == MODE_INT
1242 	  && GET_MODE_CLASS (mode2) == MODE_INT
1243 	  && GET_MODE_SIZE (mode1) <= UNITS_PER_WORD
1244 	  && GET_MODE_SIZE (mode2) <= UNITS_PER_WORD);
1245 }
1246 
1247 /* Implement TARGET_STARTING_FRAME_OFFSET.  */
1248 
1249 static HOST_WIDE_INT
lm32_starting_frame_offset(void)1250 lm32_starting_frame_offset (void)
1251 {
1252   return UNITS_PER_WORD;
1253 }
1254