1 /* Xstormy16 target functions.
2    Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
3    Free Software Foundation, Inc.
4    Contributed by Red Hat, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12 
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 GNU General Public License for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING.  If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "recog.h"
38 #include "toplev.h"
39 #include "obstack.h"
40 #include "tree.h"
41 #include "expr.h"
42 #include "optabs.h"
43 #include "except.h"
44 #include "function.h"
45 #include "target.h"
46 #include "target-def.h"
47 #include "tm_p.h"
48 #include "langhooks.h"
49 
50 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
51 static void xstormy16_asm_out_constructor (rtx, int);
52 static void xstormy16_asm_out_destructor (rtx, int);
53 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
54 					   HOST_WIDE_INT, tree);
55 
56 static void xstormy16_init_builtins (void);
57 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
58 static bool xstormy16_rtx_costs (rtx, int, int, int *);
59 static int xstormy16_address_cost (rtx);
60 
61 /* Define the information needed to generate branch and scc insns.  This is
62    stored from the compare operation.  */
63 struct rtx_def * xstormy16_compare_op0;
64 struct rtx_def * xstormy16_compare_op1;
65 
66 /* Return 1 if this is a LT, GE, LTU, or GEU operator.  */
67 
68 int
xstormy16_ineqsi_operator(register rtx op,enum machine_mode mode)69 xstormy16_ineqsi_operator (register rtx op, enum machine_mode mode)
70 {
71   enum rtx_code code = GET_CODE (op);
72 
73   return ((mode == VOIDmode || GET_MODE (op) == mode)
74 	  && (code == LT || code == GE || code == LTU || code == GEU));
75 }
76 
77 /* Return 1 if this is an EQ or NE operator.  */
78 
79 int
equality_operator(register rtx op,enum machine_mode mode)80 equality_operator (register rtx op, enum machine_mode mode)
81 {
82   return ((mode == VOIDmode || GET_MODE (op) == mode)
83 	  && (GET_CODE (op) == EQ || GET_CODE (op) == NE));
84 }
85 
86 /* Return 1 if this is a comparison operator but not an EQ or NE operator.  */
87 
88 int
inequality_operator(register rtx op,enum machine_mode mode)89 inequality_operator (register rtx op, enum machine_mode mode)
90 {
91   return comparison_operator (op, mode) && ! equality_operator (op, mode);
92 }
93 
94 /* Compute a (partial) cost for rtx X.  Return true if the complete
95    cost has been computed, and false if subexpressions should be
96    scanned.  In either case, *TOTAL contains the cost result.  */
97 
98 static bool
xstormy16_rtx_costs(rtx x,int code,int outer_code ATTRIBUTE_UNUSED,int * total)99 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
100 		     int *total)
101 {
102   switch (code)
103     {
104     case CONST_INT:
105       if (INTVAL (x) < 16 && INTVAL (x) >= 0)
106         *total = COSTS_N_INSNS (1) / 2;
107       else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
108 	*total = COSTS_N_INSNS (1);
109       else
110 	*total = COSTS_N_INSNS (2);
111       return true;
112 
113     case CONST_DOUBLE:
114     case CONST:
115     case SYMBOL_REF:
116     case LABEL_REF:
117       *total = COSTS_N_INSNS(2);
118       return true;
119 
120     case MULT:
121       *total = COSTS_N_INSNS (35 + 6);
122       return true;
123     case DIV:
124       *total = COSTS_N_INSNS (51 - 6);
125       return true;
126 
127     default:
128       return false;
129     }
130 }
131 
132 static int
xstormy16_address_cost(rtx x)133 xstormy16_address_cost (rtx x)
134 {
135   return (GET_CODE (x) == CONST_INT ? 2
136 	  : GET_CODE (x) == PLUS ? 7
137 	  : 5);
138 }
139 
140 /* Branches are handled as follows:
141 
142    1. HImode compare-and-branches.  The machine supports these
143       natively, so the appropriate pattern is emitted directly.
144 
145    2. SImode EQ and NE.  These are emitted as pairs of HImode
146       compare-and-branches.
147 
148    3. SImode LT, GE, LTU and GEU.  These are emitted as a sequence
149       of a SImode subtract followed by a branch (not a compare-and-branch),
150       like this:
151       sub
152       sbc
153       blt
154 
155    4. SImode GT, LE, GTU, LEU.  These are emitted as a sequence like:
156       sub
157       sbc
158       blt
159       or
160       bne
161 */
162 
163 /* Emit a branch of kind CODE to location LOC.  */
164 
165 void
xstormy16_emit_cbranch(enum rtx_code code,rtx loc)166 xstormy16_emit_cbranch (enum rtx_code code, rtx loc)
167 {
168   rtx op0 = xstormy16_compare_op0;
169   rtx op1 = xstormy16_compare_op1;
170   rtx condition_rtx, loc_ref, branch, cy_clobber;
171   rtvec vec;
172   enum machine_mode mode;
173 
174   mode = GET_MODE (op0);
175   if (mode != HImode && mode != SImode)
176     abort ();
177 
178   if (mode == SImode
179       && (code == GT || code == LE || code == GTU || code == LEU))
180     {
181       int unsigned_p = (code == GTU || code == LEU);
182       int gt_p = (code == GT || code == GTU);
183       rtx lab = NULL_RTX;
184 
185       if (gt_p)
186 	lab = gen_label_rtx ();
187       xstormy16_emit_cbranch (unsigned_p ? LTU : LT, gt_p ? lab : loc);
188       /* This should be generated as a comparison against the temporary
189 	 created by the previous insn, but reload can't handle that.  */
190       xstormy16_emit_cbranch (gt_p ? NE : EQ, loc);
191       if (gt_p)
192 	emit_label (lab);
193       return;
194     }
195   else if (mode == SImode
196 	   && (code == NE || code == EQ)
197 	   && op1 != const0_rtx)
198     {
199       rtx lab = NULL_RTX;
200       int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
201       int i;
202 
203       if (code == EQ)
204 	lab = gen_label_rtx ();
205 
206       for (i = 0; i < num_words - 1; i++)
207 	{
208 	  xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
209 						      i * UNITS_PER_WORD);
210 	  xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
211 						      i * UNITS_PER_WORD);
212 	  xstormy16_emit_cbranch (NE, code == EQ ? lab : loc);
213 	}
214       xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
215 						  i * UNITS_PER_WORD);
216       xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
217 						  i * UNITS_PER_WORD);
218       xstormy16_emit_cbranch (code, loc);
219 
220       if (code == EQ)
221 	emit_label (lab);
222       return;
223     }
224 
225   /* We can't allow reload to try to generate any reload after a branch,
226      so when some register must match we must make the temporary ourselves.  */
227   if (mode != HImode)
228     {
229       rtx tmp;
230       tmp = gen_reg_rtx (mode);
231       emit_move_insn (tmp, op0);
232       op0 = tmp;
233     }
234 
235   condition_rtx = gen_rtx (code, mode, op0, op1);
236   loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
237   branch = gen_rtx_SET (VOIDmode, pc_rtx,
238 			gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
239 					      loc_ref, pc_rtx));
240 
241   cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (BImode));
242 
243   if (mode == HImode)
244     vec = gen_rtvec (2, branch, cy_clobber);
245   else if (code == NE || code == EQ)
246     vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
247   else
248     {
249       rtx sub;
250 #if 0
251       sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
252 #else
253       sub = gen_rtx_CLOBBER (SImode, op0);
254 #endif
255       vec = gen_rtvec (3, branch, sub, cy_clobber);
256     }
257 
258   emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
259 }
260 
261 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
262    the arithmetic operation.  Most of the work is done by
263    xstormy16_expand_arith.  */
264 
265 void
xstormy16_split_cbranch(enum machine_mode mode,rtx label,rtx comparison,rtx dest,rtx carry)266 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
267 			 rtx dest, rtx carry)
268 {
269   rtx op0 = XEXP (comparison, 0);
270   rtx op1 = XEXP (comparison, 1);
271   rtx seq, last_insn;
272   rtx compare;
273 
274   start_sequence ();
275   xstormy16_expand_arith (mode, COMPARE, dest, op0, op1, carry);
276   seq = get_insns ();
277   end_sequence ();
278 
279   if (! INSN_P (seq))
280     abort ();
281 
282   last_insn = seq;
283   while (NEXT_INSN (last_insn) != NULL_RTX)
284     last_insn = NEXT_INSN (last_insn);
285 
286   compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
287   PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
288   XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
289   emit_insn (seq);
290 }
291 
292 
293 /* Return the string to output a conditional branch to LABEL, which is
294    the operand number of the label.
295 
296    OP is the conditional expression, or NULL for branch-always.
297 
298    REVERSED is nonzero if we should reverse the sense of the comparison.
299 
300    INSN is the insn.  */
301 
302 char *
xstormy16_output_cbranch_hi(rtx op,const char * label,int reversed,rtx insn)303 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
304 {
305   static char string[64];
306   int need_longbranch = (op != NULL_RTX
307 			 ? get_attr_length (insn) == 8
308 			 : get_attr_length (insn) == 4);
309   int really_reversed = reversed ^ need_longbranch;
310   const char *ccode;
311   const char *template;
312   const char *operands;
313   enum rtx_code code;
314 
315   if (! op)
316     {
317       if (need_longbranch)
318 	ccode = "jmpf";
319       else
320 	ccode = "br";
321       sprintf (string, "%s %s", ccode, label);
322       return string;
323     }
324 
325   code = GET_CODE (op);
326 
327   if (GET_CODE (XEXP (op, 0)) != REG)
328     {
329       code = swap_condition (code);
330       operands = "%3,%2";
331     }
332   else
333       operands = "%2,%3";
334 
335   /* Work out which way this really branches.  */
336   if (really_reversed)
337     code = reverse_condition (code);
338 
339   switch (code)
340     {
341     case EQ:   ccode = "z";   break;
342     case NE:   ccode = "nz";  break;
343     case GE:   ccode = "ge";  break;
344     case LT:   ccode = "lt";  break;
345     case GT:   ccode = "gt";  break;
346     case LE:   ccode = "le";  break;
347     case GEU:  ccode = "nc";  break;
348     case LTU:  ccode = "c";   break;
349     case GTU:  ccode = "hi";  break;
350     case LEU:  ccode = "ls";  break;
351 
352     default:
353       abort ();
354     }
355 
356   if (need_longbranch)
357     template = "b%s %s,.+8 | jmpf %s";
358   else
359     template = "b%s %s,%s";
360   sprintf (string, template, ccode, operands, label);
361 
362   return string;
363 }
364 
365 /* Return the string to output a conditional branch to LABEL, which is
366    the operand number of the label, but suitable for the tail of a
367    SImode branch.
368 
369    OP is the conditional expression (OP is never NULL_RTX).
370 
371    REVERSED is nonzero if we should reverse the sense of the comparison.
372 
373    INSN is the insn.  */
374 
375 char *
xstormy16_output_cbranch_si(rtx op,const char * label,int reversed,rtx insn)376 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
377 {
378   static char string[64];
379   int need_longbranch = get_attr_length (insn) >= 8;
380   int really_reversed = reversed ^ need_longbranch;
381   const char *ccode;
382   const char *template;
383   char prevop[16];
384   enum rtx_code code;
385 
386   code = GET_CODE (op);
387 
388   /* Work out which way this really branches.  */
389   if (really_reversed)
390     code = reverse_condition (code);
391 
392   switch (code)
393     {
394     case EQ:   ccode = "z";   break;
395     case NE:   ccode = "nz";  break;
396     case GE:   ccode = "ge";  break;
397     case LT:   ccode = "lt";  break;
398     case GEU:  ccode = "nc";  break;
399     case LTU:  ccode = "c";   break;
400 
401       /* The missing codes above should never be generated.  */
402     default:
403       abort ();
404     }
405 
406   switch (code)
407     {
408     case EQ: case NE:
409       {
410 	int regnum;
411 
412 	if (GET_CODE (XEXP (op, 0)) != REG)
413 	  abort ();
414 
415 	regnum = REGNO (XEXP (op, 0));
416 	sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
417       }
418       break;
419 
420     case GE: case LT: case GEU: case LTU:
421       strcpy (prevop, "sbc %2,%3");
422       break;
423 
424     default:
425       abort ();
426     }
427 
428   if (need_longbranch)
429     template = "%s | b%s .+6 | jmpf %s";
430   else
431     template = "%s | b%s %s";
432   sprintf (string, template, prevop, ccode, label);
433 
434   return string;
435 }
436 
437 /* Many machines have some registers that cannot be copied directly to or from
438    memory or even from other types of registers.  An example is the `MQ'
439    register, which on most machines, can only be copied to or from general
440    registers, but not memory.  Some machines allow copying all registers to and
441    from memory, but require a scratch register for stores to some memory
442    locations (e.g., those with symbolic address on the RT, and those with
443    certain symbolic address on the SPARC when compiling PIC).  In some cases,
444    both an intermediate and a scratch register are required.
445 
446    You should define these macros to indicate to the reload phase that it may
447    need to allocate at least one register for a reload in addition to the
448    register to contain the data.  Specifically, if copying X to a register
449    CLASS in MODE requires an intermediate register, you should define
450    `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
451    whose registers can be used as intermediate registers or scratch registers.
452 
453    If copying a register CLASS in MODE to X requires an intermediate or scratch
454    register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
455    largest register class required.  If the requirements for input and output
456    reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
457    instead of defining both macros identically.
458 
459    The values returned by these macros are often `GENERAL_REGS'.  Return
460    `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
461    to or from a register of CLASS in MODE without requiring a scratch register.
462    Do not define this macro if it would always return `NO_REGS'.
463 
464    If a scratch register is required (either with or without an intermediate
465    register), you should define patterns for `reload_inM' or `reload_outM', as
466    required..  These patterns, which will normally be implemented with a
467    `define_expand', should be similar to the `movM' patterns, except that
468    operand 2 is the scratch register.
469 
470    Define constraints for the reload register and scratch register that contain
471    a single register class.  If the original reload register (whose class is
472    CLASS) can meet the constraint given in the pattern, the value returned by
473    these macros is used for the class of the scratch register.  Otherwise, two
474    additional reload registers are required.  Their classes are obtained from
475    the constraints in the insn pattern.
476 
477    X might be a pseudo-register or a `subreg' of a pseudo-register, which could
478    either be in a hard register or in memory.  Use `true_regnum' to find out;
479    it will return -1 if the pseudo is in memory and the hard register number if
480    it is in a register.
481 
482    These macros should not be used in the case where a particular class of
483    registers can only be copied to memory and not to another class of
484    registers.  In that case, secondary reload registers are not needed and
485    would not be helpful.  Instead, a stack location must be used to perform the
486    copy and the `movM' pattern should use memory as an intermediate storage.
487    This case often occurs between floating-point and general registers.  */
488 
489 enum reg_class
xstormy16_secondary_reload_class(enum reg_class class,enum machine_mode mode,rtx x)490 xstormy16_secondary_reload_class (enum reg_class class,
491 				  enum machine_mode mode,
492 				  rtx x)
493 {
494   /* This chip has the interesting property that only the first eight
495      registers can be moved to/from memory.  */
496   if ((GET_CODE (x) == MEM
497        || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
498 	   && (true_regnum (x) == -1
499 	       || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
500       && ! reg_class_subset_p (class, EIGHT_REGS))
501     return EIGHT_REGS;
502 
503   /* When reloading a PLUS, the carry register will be required
504      unless the inc or dec instructions can be used.  */
505   if (xstormy16_carry_plus_operand (x, mode))
506     return CARRY_REGS;
507 
508   return NO_REGS;
509 }
510 
511 /* Recognize a PLUS that needs the carry register.  */
512 int
xstormy16_carry_plus_operand(rtx x,enum machine_mode mode ATTRIBUTE_UNUSED)513 xstormy16_carry_plus_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
514 {
515   return (GET_CODE (x) == PLUS
516 	  && GET_CODE (XEXP (x, 1)) == CONST_INT
517 	  && (INTVAL (XEXP (x, 1)) < -4 || INTVAL (XEXP (x, 1)) > 4));
518 }
519 
520 /* Detect and error out on out-of-range constants for movhi.  */
521 int
xs_hi_general_operand(rtx x,enum machine_mode mode ATTRIBUTE_UNUSED)522 xs_hi_general_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
523 {
524   if ((GET_CODE (x) == CONST_INT)
525    && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
526     error ("Constant halfword load operand out of range.");
527   return general_operand (x, mode);
528 }
529 
530 /* Detect and error out on out-of-range constants for addhi and subhi.  */
531 int
xs_hi_nonmemory_operand(rtx x,enum machine_mode mode ATTRIBUTE_UNUSED)532 xs_hi_nonmemory_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
533 {
534   if ((GET_CODE (x) == CONST_INT)
535    && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
536     error ("Constant arithmetic operand out of range.");
537   return nonmemory_operand (x, mode);
538 }
539 
540 enum reg_class
xstormy16_preferred_reload_class(rtx x,enum reg_class class)541 xstormy16_preferred_reload_class (rtx x, enum reg_class class)
542 {
543   if (class == GENERAL_REGS
544       && GET_CODE (x) == MEM)
545     return EIGHT_REGS;
546 
547   return class;
548 }
549 
550 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET)				\
551  (GET_CODE (X) == CONST_INT						\
552   && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
553 
554 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET)			 \
555  (GET_CODE (X) == CONST_INT						 \
556   && INTVAL (X) + (OFFSET) >= 0						 \
557   && INTVAL (X) + (OFFSET) < 0x8000					 \
558   && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
559 
560 int
xstormy16_legitimate_address_p(enum machine_mode mode ATTRIBUTE_UNUSED,rtx x,int strict)561 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
562 				rtx x, int strict)
563 {
564   if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
565     return 1;
566 
567   if (GET_CODE (x) == PLUS
568       && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
569     x = XEXP (x, 0);
570 
571   if ((GET_CODE (x) == PRE_MODIFY
572        && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
573       || GET_CODE (x) == POST_INC
574       || GET_CODE (x) == PRE_DEC)
575     x = XEXP (x, 0);
576 
577   if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
578       && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
579     return 1;
580 
581   return 0;
582 }
583 
584 /* Return nonzero if memory address X (an RTX) can have different
585    meanings depending on the machine mode of the memory reference it
586    is used for or if the address is valid for some modes but not
587    others.
588 
589    Autoincrement and autodecrement addresses typically have mode-dependent
590    effects because the amount of the increment or decrement is the size of the
591    operand being addressed.  Some machines have other mode-dependent addresses.
592    Many RISC machines have no mode-dependent addresses.
593 
594    You may assume that ADDR is a valid address for the machine.
595 
596    On this chip, this is true if the address is valid with an offset
597    of 0 but not of 6, because in that case it cannot be used as an
598    address for DImode or DFmode, or if the address is a post-increment
599    or pre-decrement address.  */
600 int
xstormy16_mode_dependent_address_p(rtx x)601 xstormy16_mode_dependent_address_p (rtx x)
602 {
603   if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
604       && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
605     return 1;
606 
607   if (GET_CODE (x) == PLUS
608       && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
609       && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
610     return 1;
611 
612   if (GET_CODE (x) == PLUS)
613     x = XEXP (x, 0);
614 
615   if (GET_CODE (x) == POST_INC
616       || GET_CODE (x) == PRE_DEC)
617     return 1;
618 
619   return 0;
620 }
621 
622 /* A C expression that defines the optional machine-dependent constraint
623    letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
624    types of operands, usually memory references, for the target machine.
625    Normally this macro will not be defined.  If it is required for a particular
626    target machine, it should return 1 if VALUE corresponds to the operand type
627    represented by the constraint letter C.  If C is not defined as an extra
628    constraint, the value returned should be 0 regardless of VALUE.  */
629 int
xstormy16_extra_constraint_p(rtx x,int c)630 xstormy16_extra_constraint_p (rtx x, int c)
631 {
632   switch (c)
633     {
634       /* 'Q' is for pushes.  */
635     case 'Q':
636       return (GET_CODE (x) == MEM
637 	      && GET_CODE (XEXP (x, 0)) == POST_INC
638 	      && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
639 
640       /* 'R' is for pops.  */
641     case 'R':
642       return (GET_CODE (x) == MEM
643 	      && GET_CODE (XEXP (x, 0)) == PRE_DEC
644 	      && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
645 
646       /* 'S' is for immediate memory addresses.  */
647     case 'S':
648       return (GET_CODE (x) == MEM
649 	      && GET_CODE (XEXP (x, 0)) == CONST_INT
650 	      && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
651 
652       /* 'T' is for Rx.  */
653     case 'T':
654       /* Not implemented yet.  */
655       return 0;
656 
657       /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
658 	 for allocating a scratch register for 32-bit shifts.  */
659     case 'U':
660       return (GET_CODE (x) == CONST_INT
661 	      && (INTVAL (x) < 2 || INTVAL (x) > 15));
662 
663       /* 'Z' is for CONST_INT value zero.  This is for adding zero to
664 	 a register in addhi3, which would otherwise require a carry.  */
665     case 'Z':
666       return (GET_CODE (x) == CONST_INT
667 	      && (INTVAL (x) == 0));
668 
669     default:
670       return 0;
671     }
672 }
673 
674 int
short_memory_operand(rtx x,enum machine_mode mode)675 short_memory_operand (rtx x, enum machine_mode mode)
676 {
677   if (! memory_operand (x, mode))
678     return 0;
679   return (GET_CODE (XEXP (x, 0)) != PLUS);
680 }
681 
682 int
nonimmediate_nonstack_operand(rtx op,enum machine_mode mode)683 nonimmediate_nonstack_operand (rtx op, enum machine_mode mode)
684 {
685   /* 'Q' is for pushes, 'R' for pops.  */
686   return (nonimmediate_operand (op, mode)
687 	  && ! xstormy16_extra_constraint_p (op, 'Q')
688 	  && ! xstormy16_extra_constraint_p (op, 'R'));
689 }
690 
691 /* Splitter for the 'move' patterns, for modes not directly implemented
692    by hardware.  Emit insns to copy a value of mode MODE from SRC to
693    DEST.
694 
695    This function is only called when reload_completed.
696    */
697 
698 void
xstormy16_split_move(enum machine_mode mode,rtx dest,rtx src)699 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
700 {
701   int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
702   int direction, end, i;
703   int src_modifies = 0;
704   int dest_modifies = 0;
705   int src_volatile = 0;
706   int dest_volatile = 0;
707   rtx mem_operand;
708   rtx auto_inc_reg_rtx = NULL_RTX;
709 
710   /* Check initial conditions.  */
711   if (! reload_completed
712       || mode == QImode || mode == HImode
713       || ! nonimmediate_operand (dest, mode)
714       || ! general_operand (src, mode))
715     abort ();
716 
717   /* This case is not supported below, and shouldn't be generated.  */
718   if (GET_CODE (dest) == MEM
719       && GET_CODE (src) == MEM)
720     abort ();
721 
722   /* This case is very very bad after reload, so trap it now.  */
723   if (GET_CODE (dest) == SUBREG
724       || GET_CODE (src) == SUBREG)
725     abort ();
726 
727   /* The general idea is to copy by words, offsetting the source and
728      destination.  Normally the least-significant word will be copied
729      first, but for pre-dec operations it's better to copy the
730      most-significant word first.  Only one operand can be a pre-dec
731      or post-inc operand.
732 
733      It's also possible that the copy overlaps so that the direction
734      must be reversed.  */
735   direction = 1;
736 
737   if (GET_CODE (dest) == MEM)
738     {
739       mem_operand = XEXP (dest, 0);
740       dest_modifies = side_effects_p (mem_operand);
741       if (auto_inc_p (mem_operand))
742         auto_inc_reg_rtx = XEXP (mem_operand, 0);
743       dest_volatile = MEM_VOLATILE_P (dest);
744       if (dest_volatile)
745 	{
746 	  dest = copy_rtx (dest);
747 	  MEM_VOLATILE_P (dest) = 0;
748 	}
749     }
750   else if (GET_CODE (src) == MEM)
751     {
752       mem_operand = XEXP (src, 0);
753       src_modifies = side_effects_p (mem_operand);
754       if (auto_inc_p (mem_operand))
755         auto_inc_reg_rtx = XEXP (mem_operand, 0);
756       src_volatile = MEM_VOLATILE_P (src);
757       if (src_volatile)
758 	{
759 	  src = copy_rtx (src);
760 	  MEM_VOLATILE_P (src) = 0;
761 	}
762     }
763   else
764     mem_operand = NULL_RTX;
765 
766   if (mem_operand == NULL_RTX)
767     {
768       if (GET_CODE (src) == REG
769 	  && GET_CODE (dest) == REG
770 	  && reg_overlap_mentioned_p (dest, src)
771 	  && REGNO (dest) > REGNO (src))
772 	direction = -1;
773     }
774   else if (GET_CODE (mem_operand) == PRE_DEC
775       || (GET_CODE (mem_operand) == PLUS
776 	  && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
777     direction = -1;
778   else if (GET_CODE (src) == MEM
779 	   && reg_overlap_mentioned_p (dest, src))
780     {
781       int regno;
782       if (GET_CODE (dest) != REG)
783 	abort ();
784       regno = REGNO (dest);
785 
786       if (! refers_to_regno_p (regno, regno + num_words, mem_operand, 0))
787 	abort ();
788 
789       if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
790 	direction = -1;
791       else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
792 				  mem_operand, 0))
793 	direction = 1;
794       else
795 	/* This means something like
796 	   (set (reg:DI r0) (mem:DI (reg:HI r1)))
797 	   which we'd need to support by doing the set of the second word
798 	   last.  */
799 	abort ();
800     }
801 
802   end = direction < 0 ? -1 : num_words;
803   for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
804     {
805       rtx w_src, w_dest, insn;
806 
807       if (src_modifies)
808 	w_src = gen_rtx_MEM (word_mode, mem_operand);
809       else
810 	w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
811       if (src_volatile)
812 	MEM_VOLATILE_P (w_src) = 1;
813       if (dest_modifies)
814 	w_dest = gen_rtx_MEM (word_mode, mem_operand);
815       else
816 	w_dest = simplify_gen_subreg (word_mode, dest, mode,
817 				      i * UNITS_PER_WORD);
818       if (dest_volatile)
819 	MEM_VOLATILE_P (w_dest) = 1;
820 
821       /* The simplify_subreg calls must always be able to simplify.  */
822       if (GET_CODE (w_src) == SUBREG
823 	  || GET_CODE (w_dest) == SUBREG)
824 	abort ();
825 
826       insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
827       if (auto_inc_reg_rtx)
828         REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
829                                             auto_inc_reg_rtx,
830 					    REG_NOTES (insn));
831     }
832 }
833 
834 /* Expander for the 'move' patterns.  Emit insns to copy a value of
835    mode MODE from SRC to DEST.  */
836 
837 void
xstormy16_expand_move(enum machine_mode mode,rtx dest,rtx src)838 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
839 {
840   if ((GET_CODE (dest) == MEM) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
841     {
842       rtx pmv      = XEXP (dest, 0);
843       rtx dest_reg = XEXP (pmv, 0);
844       rtx dest_mod = XEXP (pmv, 1);
845       rtx set      = gen_rtx_SET (Pmode, dest_reg, dest_mod);
846       rtx clobber  = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
847 
848       dest = gen_rtx_MEM (mode, dest_reg);
849       emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
850     }
851   else if ((GET_CODE (src) == MEM) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
852     {
853       rtx pmv     = XEXP (src, 0);
854       rtx src_reg = XEXP (pmv, 0);
855       rtx src_mod = XEXP (pmv, 1);
856       rtx set     = gen_rtx_SET (Pmode, src_reg, src_mod);
857       rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
858 
859       src = gen_rtx_MEM (mode, src_reg);
860       emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
861     }
862 
863   /* There are only limited immediate-to-memory move instructions.  */
864   if (! reload_in_progress
865       && ! reload_completed
866       && GET_CODE (dest) == MEM
867       && (GET_CODE (XEXP (dest, 0)) != CONST_INT
868 	  || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
869       && GET_CODE (src) != REG
870       && GET_CODE (src) != SUBREG)
871     src = copy_to_mode_reg (mode, src);
872 
873   /* Don't emit something we would immediately split.  */
874   if (reload_completed
875       && mode != HImode && mode != QImode)
876     {
877       xstormy16_split_move (mode, dest, src);
878       return;
879     }
880 
881   emit_insn (gen_rtx_SET (VOIDmode, dest, src));
882 }
883 
884 
885 /* Stack Layout:
886 
887    The stack is laid out as follows:
888 
889 SP->
890 FP->	Local variables
891 	Register save area (up to 4 words)
892 	Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
893 
894 AP->	Return address (two words)
895 	9th procedure parameter word
896 	10th procedure parameter word
897 	...
898 	last procedure parameter word
899 
900   The frame pointer location is tuned to make it most likely that all
901   parameters and local variables can be accessed using a load-indexed
902   instruction.  */
903 
904 /* A structure to describe the layout.  */
905 struct xstormy16_stack_layout
906 {
907   /* Size of the topmost three items on the stack.  */
908   int locals_size;
909   int register_save_size;
910   int stdarg_save_size;
911   /* Sum of the above items.  */
912   int frame_size;
913   /* Various offsets.  */
914   int first_local_minus_ap;
915   int sp_minus_fp;
916   int fp_minus_ap;
917 };
918 
919 /* Does REGNO need to be saved?  */
920 #define REG_NEEDS_SAVE(REGNUM, IFUN)					\
921   ((regs_ever_live[REGNUM] && ! call_used_regs[REGNUM])			\
922    || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM]		\
923        && (REGNO_REG_CLASS (REGNUM) != CARRY_REGS)			\
924        && (regs_ever_live[REGNUM] || ! current_function_is_leaf)))
925 
926 /* Compute the stack layout.  */
927 struct xstormy16_stack_layout
xstormy16_compute_stack_layout(void)928 xstormy16_compute_stack_layout (void)
929 {
930   struct xstormy16_stack_layout layout;
931   int regno;
932   const int ifun = xstormy16_interrupt_function_p ();
933 
934   layout.locals_size = get_frame_size ();
935 
936   layout.register_save_size = 0;
937   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
938     if (REG_NEEDS_SAVE (regno, ifun))
939       layout.register_save_size += UNITS_PER_WORD;
940 
941   if (current_function_stdarg)
942     layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
943   else
944     layout.stdarg_save_size = 0;
945 
946   layout.frame_size = (layout.locals_size
947 		       + layout.register_save_size
948 		       + layout.stdarg_save_size);
949 
950   if (current_function_args_size <= 2048 && current_function_args_size != -1)
951     {
952       if (layout.frame_size + INCOMING_FRAME_SP_OFFSET
953 	  + current_function_args_size <= 2048)
954 	layout.fp_minus_ap = layout.frame_size + INCOMING_FRAME_SP_OFFSET;
955       else
956 	layout.fp_minus_ap = 2048 - current_function_args_size;
957     }
958   else
959     layout.fp_minus_ap = (layout.stdarg_save_size
960 			  + layout.register_save_size
961 			  + INCOMING_FRAME_SP_OFFSET);
962   layout.sp_minus_fp = (layout.frame_size + INCOMING_FRAME_SP_OFFSET
963 			- layout.fp_minus_ap);
964   layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
965   return layout;
966 }
967 
968 /* Determine how all the special registers get eliminated.  */
969 int
xstormy16_initial_elimination_offset(int from,int to)970 xstormy16_initial_elimination_offset (int from, int to)
971 {
972   struct xstormy16_stack_layout layout;
973   int result;
974 
975   layout = xstormy16_compute_stack_layout ();
976 
977   if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
978     result = layout.sp_minus_fp - layout.locals_size;
979   else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
980     result = -layout.locals_size;
981   else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
982     result = -layout.fp_minus_ap;
983   else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
984     result = -(layout.sp_minus_fp + layout.fp_minus_ap);
985   else
986     abort ();
987 
988   return result;
989 }
990 
991 static rtx
emit_addhi3_postreload(rtx dest,rtx src0,rtx src1)992 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
993 {
994   rtx set, clobber, insn;
995 
996   set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
997   clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
998   insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
999   return insn;
1000 }
1001 
1002 /* Called after register allocation to add any instructions needed for
1003    the prologue.  Using a prologue insn is favored compared to putting
1004    all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1005    since it allows the scheduler to intermix instructions with the
1006    saves of the caller saved registers.  In some cases, it might be
1007    necessary to emit a barrier instruction as the last insn to prevent
1008    such scheduling.
1009 
1010    Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1011    so that the debug info generation code can handle them properly.  */
1012 void
xstormy16_expand_prologue(void)1013 xstormy16_expand_prologue (void)
1014 {
1015   struct xstormy16_stack_layout layout;
1016   int regno;
1017   rtx insn;
1018   rtx mem_push_rtx;
1019   const int ifun = xstormy16_interrupt_function_p ();
1020 
1021   mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1022   mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1023 
1024   layout = xstormy16_compute_stack_layout ();
1025 
1026   if (layout.locals_size >= 32768)
1027     error ("Local variable memory requirements exceed capacity.");
1028 
1029   /* Save the argument registers if necessary.  */
1030   if (layout.stdarg_save_size)
1031     for (regno = FIRST_ARGUMENT_REGISTER;
1032 	 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1033 	 regno++)
1034       {
1035 	rtx dwarf;
1036 	rtx reg = gen_rtx_REG (HImode, regno);
1037 
1038 	insn = emit_move_insn (mem_push_rtx, reg);
1039 	RTX_FRAME_RELATED_P (insn) = 1;
1040 
1041 	dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1042 
1043 	XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1044 					     gen_rtx_MEM (Pmode, stack_pointer_rtx),
1045 					     reg);
1046 	XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1047 					     plus_constant (stack_pointer_rtx,
1048 							    GET_MODE_SIZE (Pmode)));
1049 	REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1050 					      dwarf,
1051 					      REG_NOTES (insn));
1052 	RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1053 	RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1054       }
1055 
1056   /* Push each of the registers to save.  */
1057   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1058     if (REG_NEEDS_SAVE (regno, ifun))
1059       {
1060 	rtx dwarf;
1061 	rtx reg = gen_rtx_REG (HImode, regno);
1062 
1063 	insn = emit_move_insn (mem_push_rtx, reg);
1064 	RTX_FRAME_RELATED_P (insn) = 1;
1065 
1066 	dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1067 
1068 	XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1069 					     gen_rtx_MEM (Pmode, stack_pointer_rtx),
1070 					     reg);
1071 	XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1072 					     plus_constant (stack_pointer_rtx,
1073 							    GET_MODE_SIZE (Pmode)));
1074 	REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1075 					      dwarf,
1076 					      REG_NOTES (insn));
1077 	RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1078 	RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1079       }
1080 
1081   /* It's just possible that the SP here might be what we need for
1082      the new FP...  */
1083   if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1084     emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1085 
1086   /* Allocate space for local variables.  */
1087   if (layout.locals_size)
1088     {
1089       insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1090 				     GEN_INT (layout.locals_size));
1091       RTX_FRAME_RELATED_P (insn) = 1;
1092     }
1093 
1094   /* Set up the frame pointer, if required.  */
1095   if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1096     {
1097       insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1098 
1099       if (layout.sp_minus_fp)
1100 	emit_addhi3_postreload (hard_frame_pointer_rtx,
1101 				hard_frame_pointer_rtx,
1102 				GEN_INT (-layout.sp_minus_fp));
1103     }
1104 }
1105 
1106 /* Do we need an epilogue at all?  */
1107 int
direct_return(void)1108 direct_return (void)
1109 {
1110   return (reload_completed
1111 	  && xstormy16_compute_stack_layout ().frame_size == 0);
1112 }
1113 
1114 /* Called after register allocation to add any instructions needed for
1115    the epilogue.  Using an epilogue insn is favored compared to putting
1116    all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1117    since it allows the scheduler to intermix instructions with the
1118    saves of the caller saved registers.  In some cases, it might be
1119    necessary to emit a barrier instruction as the last insn to prevent
1120    such scheduling.  */
1121 
1122 void
xstormy16_expand_epilogue(void)1123 xstormy16_expand_epilogue (void)
1124 {
1125   struct xstormy16_stack_layout layout;
1126   rtx mem_pop_rtx, insn;
1127   int regno;
1128   const int ifun = xstormy16_interrupt_function_p ();
1129 
1130   mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1131   mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1132 
1133   layout = xstormy16_compute_stack_layout ();
1134 
1135   /* Pop the stack for the locals.  */
1136   if (layout.locals_size)
1137     {
1138       if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1139 	emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1140       else
1141         {
1142 	  insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1143 					 GEN_INT (- layout.locals_size));
1144 	  RTX_FRAME_RELATED_P (insn) = 1;
1145 	}
1146     }
1147 
1148   /* Restore any call-saved registers.  */
1149   for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1150     if (REG_NEEDS_SAVE (regno, ifun))
1151       {
1152         rtx dwarf;
1153 
1154 	insn = emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1155 	RTX_FRAME_RELATED_P (insn) = 1;
1156 	dwarf = gen_rtx_SET (Pmode, stack_pointer_rtx,
1157 			     plus_constant (stack_pointer_rtx,
1158 					    -GET_MODE_SIZE (Pmode)));
1159 	REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1160 					      dwarf,
1161 					      REG_NOTES (insn));
1162       }
1163 
1164   /* Pop the stack for the stdarg save area.  */
1165   if (layout.stdarg_save_size)
1166     {
1167       insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1168 				     GEN_INT (- layout.stdarg_save_size));
1169       RTX_FRAME_RELATED_P (insn) = 1;
1170     }
1171 
1172   /* Return.  */
1173   if (ifun)
1174     emit_jump_insn (gen_return_internal_interrupt ());
1175   else
1176     emit_jump_insn (gen_return_internal ());
1177 }
1178 
1179 int
xstormy16_epilogue_uses(int regno)1180 xstormy16_epilogue_uses (int regno)
1181 {
1182   if (reload_completed && call_used_regs[regno])
1183     {
1184       const int ifun = xstormy16_interrupt_function_p ();
1185       return REG_NEEDS_SAVE (regno, ifun);
1186     }
1187   return 0;
1188 }
1189 
1190 void
xstormy16_function_profiler(void)1191 xstormy16_function_profiler (void)
1192 {
1193   sorry ("function_profiler support");
1194 }
1195 
1196 
1197 /* Return an updated summarizer variable CUM to advance past an
1198    argument in the argument list.  The values MODE, TYPE and NAMED
1199    describe that argument.  Once this is done, the variable CUM is
1200    suitable for analyzing the *following* argument with
1201    `FUNCTION_ARG', etc.
1202 
1203    This function need not do anything if the argument in question was
1204    passed on the stack.  The compiler knows how to track the amount of
1205    stack space used for arguments without any special help.  However,
1206    it makes life easier for xstormy16_build_va_list if it does update
1207    the word count.  */
1208 CUMULATIVE_ARGS
xstormy16_function_arg_advance(CUMULATIVE_ARGS cum,enum machine_mode mode,tree type,int named ATTRIBUTE_UNUSED)1209 xstormy16_function_arg_advance (CUMULATIVE_ARGS cum, enum machine_mode mode,
1210 				tree type, int named ATTRIBUTE_UNUSED)
1211 {
1212   /* If an argument would otherwise be passed partially in registers,
1213      and partially on the stack, the whole of it is passed on the
1214      stack.  */
1215   if (cum < NUM_ARGUMENT_REGISTERS
1216       && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1217     cum = NUM_ARGUMENT_REGISTERS;
1218 
1219   cum += XSTORMY16_WORD_SIZE (type, mode);
1220 
1221   return cum;
1222 }
1223 
1224 rtx
xstormy16_function_arg(CUMULATIVE_ARGS cum,enum machine_mode mode,tree type,int named ATTRIBUTE_UNUSED)1225 xstormy16_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
1226 			tree type, int named ATTRIBUTE_UNUSED)
1227 {
1228   if (mode == VOIDmode)
1229     return const0_rtx;
1230   if (MUST_PASS_IN_STACK (mode, type)
1231       || cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1232     return 0;
1233   return gen_rtx_REG (mode, cum + 2);
1234 }
1235 
1236 /* Do any needed setup for a variadic function.  CUM has not been updated
1237    for the last named argument which has type TYPE and mode MODE.  */
1238 void
xstormy16_setup_incoming_varargs(CUMULATIVE_ARGS cum ATTRIBUTE_UNUSED,int int_mode ATTRIBUTE_UNUSED,tree type ATTRIBUTE_UNUSED,int * pretend_size ATTRIBUTE_UNUSED)1239 xstormy16_setup_incoming_varargs (CUMULATIVE_ARGS cum ATTRIBUTE_UNUSED,
1240 				  int int_mode ATTRIBUTE_UNUSED,
1241 				  tree type ATTRIBUTE_UNUSED,
1242 				  int *pretend_size ATTRIBUTE_UNUSED)
1243 {
1244 }
1245 
1246 /* Build the va_list type.
1247 
1248    For this chip, va_list is a record containing a counter and a pointer.
1249    The counter is of type 'int' and indicates how many bytes
1250    have been used to date.  The pointer indicates the stack position
1251    for arguments that have not been passed in registers.
1252    To keep the layout nice, the pointer is first in the structure.  */
1253 
1254 static tree
xstormy16_build_builtin_va_list(void)1255 xstormy16_build_builtin_va_list (void)
1256 {
1257   tree f_1, f_2, record, type_decl;
1258 
1259   record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1260   type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
1261 
1262   f_1 = build_decl (FIELD_DECL, get_identifier ("base"),
1263 		      ptr_type_node);
1264   f_2 = build_decl (FIELD_DECL, get_identifier ("count"),
1265 		      unsigned_type_node);
1266 
1267   DECL_FIELD_CONTEXT (f_1) = record;
1268   DECL_FIELD_CONTEXT (f_2) = record;
1269 
1270   TREE_CHAIN (record) = type_decl;
1271   TYPE_NAME (record) = type_decl;
1272   TYPE_FIELDS (record) = f_1;
1273   TREE_CHAIN (f_1) = f_2;
1274 
1275   layout_type (record);
1276 
1277   return record;
1278 }
1279 
1280 /* Implement the stdarg/varargs va_start macro.  STDARG_P is nonzero if this
1281    is stdarg.h instead of varargs.h.  VALIST is the tree of the va_list
1282    variable to initialize.  NEXTARG is the machine independent notion of the
1283    'next' argument after the variable arguments.  */
1284 void
xstormy16_expand_builtin_va_start(tree valist,rtx nextarg ATTRIBUTE_UNUSED)1285 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1286 {
1287   tree f_base, f_count;
1288   tree base, count;
1289   tree t;
1290 
1291   if (xstormy16_interrupt_function_p ())
1292     error ("cannot use va_start in interrupt function");
1293 
1294   f_base = TYPE_FIELDS (va_list_type_node);
1295   f_count = TREE_CHAIN (f_base);
1296 
1297   base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base);
1298   count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count);
1299 
1300   t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1301   t = build (PLUS_EXPR, TREE_TYPE (base), t,
1302 	     build_int_2 (INCOMING_FRAME_SP_OFFSET, 0));
1303   t = build (MODIFY_EXPR, TREE_TYPE (base), base, t);
1304   TREE_SIDE_EFFECTS (t) = 1;
1305   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1306 
1307   t = build (MODIFY_EXPR, TREE_TYPE (count), count,
1308 	     build_int_2 (current_function_args_info * UNITS_PER_WORD, 0));
1309   TREE_SIDE_EFFECTS (t) = 1;
1310   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1311 }
1312 
1313 /* Implement the stdarg/varargs va_arg macro.  VALIST is the variable
1314    of type va_list as a tree, TYPE is the type passed to va_arg.
1315    Note:  This algorithm is documented in stormy-abi.  */
1316 
1317 rtx
xstormy16_expand_builtin_va_arg(tree valist,tree type)1318 xstormy16_expand_builtin_va_arg (tree valist, tree type)
1319 {
1320   tree f_base, f_count;
1321   tree base, count;
1322   rtx count_rtx, addr_rtx, r;
1323   rtx lab_gotaddr, lab_fromstack;
1324   tree t;
1325   int size, size_of_reg_args, must_stack;
1326   tree size_tree, count_plus_size;
1327   rtx count_plus_size_rtx;
1328 
1329   f_base = TYPE_FIELDS (va_list_type_node);
1330   f_count = TREE_CHAIN (f_base);
1331 
1332   base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base);
1333   count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count);
1334 
1335   must_stack = MUST_PASS_IN_STACK (TYPE_MODE (type), type);
1336   size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1337 
1338   size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1339 
1340   count_rtx = expand_expr (count, NULL_RTX, HImode, EXPAND_NORMAL);
1341   lab_gotaddr = gen_label_rtx ();
1342   lab_fromstack = gen_label_rtx ();
1343   addr_rtx = gen_reg_rtx (Pmode);
1344 
1345   if (!must_stack)
1346     {
1347       count_plus_size = build (PLUS_EXPR, TREE_TYPE (count), count, size_tree);
1348       count_plus_size_rtx = expand_expr (count_plus_size, NULL_RTX, HImode, EXPAND_NORMAL);
1349       emit_cmp_and_jump_insns (count_plus_size_rtx, GEN_INT (size_of_reg_args),
1350 			       GTU, const1_rtx, HImode, 1, lab_fromstack);
1351 
1352       t = build (PLUS_EXPR, ptr_type_node, base, count);
1353       r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
1354       if (r != addr_rtx)
1355 	emit_move_insn (addr_rtx, r);
1356 
1357       emit_jump_insn (gen_jump (lab_gotaddr));
1358       emit_barrier ();
1359       emit_label (lab_fromstack);
1360     }
1361 
1362   /* Arguments larger than a word might need to skip over some
1363      registers, since arguments are either passed entirely in
1364      registers or entirely on the stack.  */
1365   size = PUSH_ROUNDING (int_size_in_bytes (type));
1366   if (size > 2 || size < 0 || must_stack)
1367     {
1368       rtx lab_notransition = gen_label_rtx ();
1369       emit_cmp_and_jump_insns (count_rtx, GEN_INT (NUM_ARGUMENT_REGISTERS
1370 						   * UNITS_PER_WORD),
1371 			       GEU, const1_rtx, HImode, 1, lab_notransition);
1372 
1373       t = build (MODIFY_EXPR, TREE_TYPE (count), count,
1374 		 build_int_2 (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD, 0));
1375       TREE_SIDE_EFFECTS (t) = 1;
1376       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1377 
1378       emit_label (lab_notransition);
1379     }
1380 
1381   t = build (PLUS_EXPR, sizetype, size_tree,
1382 	     build_int_2 ((- NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1383 			   + INCOMING_FRAME_SP_OFFSET),
1384 			  -1));
1385   t = build (PLUS_EXPR, TREE_TYPE (count), count, fold (t));
1386   t = build (MINUS_EXPR, TREE_TYPE (base), base, t);
1387   r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
1388   if (r != addr_rtx)
1389     emit_move_insn (addr_rtx, r);
1390 
1391   emit_label (lab_gotaddr);
1392 
1393   count_plus_size = build (PLUS_EXPR, TREE_TYPE (count), count, size_tree);
1394   t = build (MODIFY_EXPR, TREE_TYPE (count), count, count_plus_size);
1395   TREE_SIDE_EFFECTS (t) = 1;
1396   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1397 
1398   return addr_rtx;
1399 }
1400 
1401 /* Initialize the variable parts of a trampoline.  ADDR is an RTX for
1402    the address of the trampoline; FNADDR is an RTX for the address of
1403    the nested function; STATIC_CHAIN is an RTX for the static chain
1404    value that should be passed to the function when it is called.  */
1405 void
xstormy16_initialize_trampoline(rtx addr,rtx fnaddr,rtx static_chain)1406 xstormy16_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
1407 {
1408   rtx reg_addr = gen_reg_rtx (Pmode);
1409   rtx temp = gen_reg_rtx (HImode);
1410   rtx reg_fnaddr = gen_reg_rtx (HImode);
1411   rtx reg_addr_mem;
1412 
1413   reg_addr_mem = gen_rtx_MEM (HImode, reg_addr);
1414 
1415   emit_move_insn (reg_addr, addr);
1416   emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1417   emit_move_insn (reg_addr_mem, temp);
1418   emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1419   emit_move_insn (temp, static_chain);
1420   emit_move_insn (reg_addr_mem, temp);
1421   emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1422   emit_move_insn (reg_fnaddr, fnaddr);
1423   emit_move_insn (temp, reg_fnaddr);
1424   emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1425   emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1426   emit_move_insn (reg_addr_mem, temp);
1427   emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1428   emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1429   emit_move_insn (reg_addr_mem, reg_fnaddr);
1430 }
1431 
1432 /* Create an RTX representing the place where a function returns a
1433    value of data type VALTYPE.  VALTYPE is a tree node representing a
1434    data type.  Write `TYPE_MODE (VALTYPE)' to get the machine mode
1435    used to represent that type.  On many machines, only the mode is
1436    relevant.  (Actually, on most machines, scalar values are returned
1437    in the same place regardless of mode).
1438 
1439    If `PROMOTE_FUNCTION_RETURN' is defined, you must apply the same promotion
1440    rules specified in `PROMOTE_MODE' if VALTYPE is a scalar type.
1441 
1442    If the precise function being called is known, FUNC is a tree node
1443    (`FUNCTION_DECL') for it; otherwise, FUNC is a null pointer.  This makes it
1444    possible to use a different value-returning convention for specific
1445    functions when all their calls are known.
1446 
1447    `FUNCTION_VALUE' is not used for return vales with aggregate data types,
1448    because these are returned in another way.  See `STRUCT_VALUE_REGNUM' and
1449    related macros.  */
1450 rtx
xstormy16_function_value(tree valtype,tree func ATTRIBUTE_UNUSED)1451 xstormy16_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
1452 {
1453   enum machine_mode mode;
1454   mode = TYPE_MODE (valtype);
1455   PROMOTE_MODE (mode, 0, valtype);
1456   return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1457 }
1458 
1459 /* A C compound statement that outputs the assembler code for a thunk function,
1460    used to implement C++ virtual function calls with multiple inheritance.  The
1461    thunk acts as a wrapper around a virtual function, adjusting the implicit
1462    object parameter before handing control off to the real function.
1463 
1464    First, emit code to add the integer DELTA to the location that contains the
1465    incoming first argument.  Assume that this argument contains a pointer, and
1466    is the one used to pass the `this' pointer in C++.  This is the incoming
1467    argument *before* the function prologue, e.g. `%o0' on a sparc.  The
1468    addition must preserve the values of all other incoming arguments.
1469 
1470    After the addition, emit code to jump to FUNCTION, which is a
1471    `FUNCTION_DECL'.  This is a direct pure jump, not a call, and does not touch
1472    the return address.  Hence returning from FUNCTION will return to whoever
1473    called the current `thunk'.
1474 
1475    The effect must be as if @var{function} had been called directly
1476    with the adjusted first argument.  This macro is responsible for
1477    emitting all of the code for a thunk function;
1478    TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1479    not invoked.
1480 
1481    The THUNK_FNDECL is redundant.  (DELTA and FUNCTION have already been
1482    extracted from it.)  It might possibly be useful on some targets, but
1483    probably not.  */
1484 
1485 static void
xstormy16_asm_output_mi_thunk(FILE * file,tree thunk_fndecl ATTRIBUTE_UNUSED,HOST_WIDE_INT delta,HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,tree function)1486 xstormy16_asm_output_mi_thunk (FILE *file,
1487 			       tree thunk_fndecl ATTRIBUTE_UNUSED,
1488 			       HOST_WIDE_INT delta,
1489 			       HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1490 			       tree function)
1491 {
1492   int regnum = FIRST_ARGUMENT_REGISTER;
1493 
1494   /* There might be a hidden first argument for a returned structure.  */
1495   if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1496     regnum += 1;
1497 
1498   fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1499   fputs ("\tjmpf ", file);
1500   assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1501   putc ('\n', file);
1502 }
1503 
1504 /* Output constructors and destructors.  Just like
1505    default_named_section_asm_out_* but don't set the sections writable.  */
1506 #undef  TARGET_ASM_CONSTRUCTOR
1507 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1508 #undef  TARGET_ASM_DESTRUCTOR
1509 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1510 
1511 static void
xstormy16_asm_out_destructor(rtx symbol,int priority)1512 xstormy16_asm_out_destructor (rtx symbol, int priority)
1513 {
1514   const char *section = ".dtors";
1515   char buf[16];
1516 
1517   /* ??? This only works reliably with the GNU linker.   */
1518   if (priority != DEFAULT_INIT_PRIORITY)
1519     {
1520       sprintf (buf, ".dtors.%.5u",
1521 	       /* Invert the numbering so the linker puts us in the proper
1522 		  order; constructors are run from right to left, and the
1523 		  linker sorts in increasing order.  */
1524 	       MAX_INIT_PRIORITY - priority);
1525       section = buf;
1526     }
1527 
1528   named_section_flags (section, 0);
1529   assemble_align (POINTER_SIZE);
1530   assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1531 }
1532 
1533 static void
xstormy16_asm_out_constructor(rtx symbol,int priority)1534 xstormy16_asm_out_constructor (rtx symbol, int priority)
1535 {
1536   const char *section = ".ctors";
1537   char buf[16];
1538 
1539   /* ??? This only works reliably with the GNU linker.   */
1540   if (priority != DEFAULT_INIT_PRIORITY)
1541     {
1542       sprintf (buf, ".ctors.%.5u",
1543 	       /* Invert the numbering so the linker puts us in the proper
1544 		  order; constructors are run from right to left, and the
1545 		  linker sorts in increasing order.  */
1546 	       MAX_INIT_PRIORITY - priority);
1547       section = buf;
1548     }
1549 
1550   named_section_flags (section, 0);
1551   assemble_align (POINTER_SIZE);
1552   assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1553 }
1554 
1555 /* Print a memory address as an operand to reference that memory location.  */
1556 void
xstormy16_print_operand_address(FILE * file,rtx address)1557 xstormy16_print_operand_address (FILE *file, rtx address)
1558 {
1559   HOST_WIDE_INT offset;
1560   int pre_dec, post_inc;
1561 
1562   /* There are a few easy cases.  */
1563   if (GET_CODE (address) == CONST_INT)
1564     {
1565       fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1566       return;
1567     }
1568 
1569   if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1570     {
1571       output_addr_const (file, address);
1572       return;
1573     }
1574 
1575   /* Otherwise, it's hopefully something of the form
1576      (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...))
1577   */
1578 
1579   if (GET_CODE (address) == PLUS)
1580     {
1581       if (GET_CODE (XEXP (address, 1)) != CONST_INT)
1582 	abort ();
1583       offset = INTVAL (XEXP (address, 1));
1584       address = XEXP (address, 0);
1585     }
1586   else
1587     offset = 0;
1588 
1589   pre_dec = (GET_CODE (address) == PRE_DEC);
1590   post_inc = (GET_CODE (address) == POST_INC);
1591   if (pre_dec || post_inc)
1592     address = XEXP (address, 0);
1593 
1594   if (GET_CODE (address) != REG)
1595     abort ();
1596 
1597   fputc ('(', file);
1598   if (pre_dec)
1599     fputs ("--", file);
1600   fputs (reg_names [REGNO (address)], file);
1601   if (post_inc)
1602     fputs ("++", file);
1603   if (offset != 0)
1604     fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1605   fputc (')', file);
1606 }
1607 
1608 /* Print an operand to an assembler instruction.  */
1609 void
xstormy16_print_operand(FILE * file,rtx x,int code)1610 xstormy16_print_operand (FILE *file, rtx x, int code)
1611 {
1612   switch (code)
1613     {
1614     case 'B':
1615 	/* There is either one bit set, or one bit clear, in X.
1616 	   Print it preceded by '#'.  */
1617       {
1618 	HOST_WIDE_INT xx = 1;
1619 	HOST_WIDE_INT l;
1620 
1621 	if (GET_CODE (x) == CONST_INT)
1622 	  xx = INTVAL (x);
1623 	else
1624 	  output_operand_lossage ("`B' operand is not constant");
1625 
1626 	l = exact_log2 (xx);
1627 	if (l == -1)
1628 	  l = exact_log2 (~xx);
1629 	if (l == -1)
1630 	  output_operand_lossage ("`B' operand has multiple bits set");
1631 
1632 	fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1633 	return;
1634       }
1635 
1636     case 'C':
1637       /* Print the symbol without a surrounding @fptr().  */
1638       if (GET_CODE (x) == SYMBOL_REF)
1639 	assemble_name (file, XSTR (x, 0));
1640       else if (GET_CODE (x) == LABEL_REF)
1641 	output_asm_label (x);
1642       else
1643 	xstormy16_print_operand_address (file, x);
1644       return;
1645 
1646     case 'o':
1647     case 'O':
1648       /* Print the immediate operand less one, preceded by '#'.
1649          For 'O', negate it first.  */
1650       {
1651 	HOST_WIDE_INT xx = 0;
1652 
1653 	if (GET_CODE (x) == CONST_INT)
1654 	  xx = INTVAL (x);
1655 	else
1656 	  output_operand_lossage ("`o' operand is not constant");
1657 
1658 	if (code == 'O')
1659 	  xx = -xx;
1660 
1661 	fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1662 	return;
1663       }
1664 
1665     case 0:
1666       /* Handled below.  */
1667       break;
1668 
1669     default:
1670       output_operand_lossage ("xstormy16_print_operand: unknown code");
1671       return;
1672     }
1673 
1674   switch (GET_CODE (x))
1675     {
1676     case REG:
1677       fputs (reg_names [REGNO (x)], file);
1678       break;
1679 
1680     case MEM:
1681       xstormy16_print_operand_address (file, XEXP (x, 0));
1682       break;
1683 
1684     default:
1685       /* Some kind of constant or label; an immediate operand,
1686          so prefix it with '#' for the assembler.  */
1687       fputs (IMMEDIATE_PREFIX, file);
1688       output_addr_const (file, x);
1689       break;
1690     }
1691 
1692   return;
1693 }
1694 
1695 
1696 /* Expander for the `casesi' pattern.
1697    INDEX is the index of the switch statement.
1698    LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1699      to the first table entry.
1700    RANGE is the number of table entries.
1701    TABLE is an ADDR_VEC that is the jump table.
1702    DEFAULT_LABEL is the address to branch to if INDEX is outside the
1703      range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1704 */
1705 
1706 void
xstormy16_expand_casesi(rtx index,rtx lower_bound,rtx range,rtx table,rtx default_label)1707 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1708 			 rtx table, rtx default_label)
1709 {
1710   HOST_WIDE_INT range_i = INTVAL (range);
1711   rtx int_index;
1712 
1713   /* This code uses 'br', so it can deal only with tables of size up to
1714      8192 entries.  */
1715   if (range_i >= 8192)
1716     sorry ("switch statement of size %lu entries too large",
1717 	   (unsigned long) range_i);
1718 
1719   index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1720 			OPTAB_LIB_WIDEN);
1721   emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1722 			   default_label);
1723   int_index = gen_lowpart_common (HImode, index);
1724   emit_insn (gen_ashlhi3 (int_index, int_index, GEN_INT (2)));
1725   emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1726 }
1727 
1728 /* Output an ADDR_VEC.  It is output as a sequence of 'jmpf'
1729    instructions, without label or alignment or any other special
1730    constructs.  We know that the previous instruction will be the
1731    `tablejump_pcrel' output above.
1732 
1733    TODO: it might be nice to output 'br' instructions if they could
1734    all reach.  */
1735 
1736 void
xstormy16_output_addr_vec(FILE * file,rtx label ATTRIBUTE_UNUSED,rtx table)1737 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1738 {
1739   int vlen, idx;
1740 
1741   function_section (current_function_decl);
1742 
1743   vlen = XVECLEN (table, 0);
1744   for (idx = 0; idx < vlen; idx++)
1745     {
1746       fputs ("\tjmpf ", file);
1747       output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1748       fputc ('\n', file);
1749     }
1750 }
1751 
1752 
1753 /* Expander for the `call' patterns.
1754    INDEX is the index of the switch statement.
1755    LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1756      to the first table entry.
1757    RANGE is the number of table entries.
1758    TABLE is an ADDR_VEC that is the jump table.
1759    DEFAULT_LABEL is the address to branch to if INDEX is outside the
1760      range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1761 */
1762 
1763 void
xstormy16_expand_call(rtx retval,rtx dest,rtx counter)1764 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1765 {
1766   rtx call, temp;
1767   enum machine_mode mode;
1768 
1769   if (GET_CODE (dest) != MEM)
1770     abort ();
1771   dest = XEXP (dest, 0);
1772 
1773   if (! CONSTANT_P (dest)
1774       && GET_CODE (dest) != REG)
1775     dest = force_reg (Pmode, dest);
1776 
1777   if (retval == NULL)
1778     mode = VOIDmode;
1779   else
1780     mode = GET_MODE (retval);
1781 
1782   call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1783 		       counter);
1784   if (retval)
1785     call = gen_rtx_SET (VOIDmode, retval, call);
1786 
1787   if (! CONSTANT_P (dest))
1788     {
1789       temp = gen_reg_rtx (HImode);
1790       emit_move_insn (temp, const0_rtx);
1791     }
1792   else
1793     temp = const0_rtx;
1794 
1795   call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1796 						gen_rtx_USE (VOIDmode, temp)));
1797   emit_call_insn (call);
1798 }
1799 
1800 /* Expanders for multiword computational operations.  */
1801 
1802 /* Expander for arithmetic operations; emit insns to compute
1803 
1804    (set DEST (CODE:MODE SRC0 SRC1))
1805 
1806    using CARRY as a temporary.  When CODE is COMPARE, a branch
1807    template is generated (this saves duplicating code in
1808    xstormy16_split_cbranch).  */
1809 
1810 void
xstormy16_expand_arith(enum machine_mode mode,enum rtx_code code,rtx dest,rtx src0,rtx src1,rtx carry)1811 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1812 			rtx dest, rtx src0, rtx src1, rtx carry)
1813 {
1814   int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1815   int i;
1816   int firstloop = 1;
1817 
1818   if (code == NEG)
1819     emit_move_insn (src0, const0_rtx);
1820 
1821   for (i = 0; i < num_words; i++)
1822     {
1823       rtx w_src0, w_src1, w_dest;
1824       rtx insn;
1825 
1826       w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1827 				    i * UNITS_PER_WORD);
1828       w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1829       w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1830 
1831       switch (code)
1832 	{
1833 	case PLUS:
1834 	  if (firstloop
1835 	      && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
1836 	    continue;
1837 
1838 	  if (firstloop)
1839 	    insn = gen_addchi4 (w_dest, w_src0, w_src1, carry);
1840 	  else
1841 	    insn = gen_addchi5 (w_dest, w_src0, w_src1, carry, carry);
1842 	  break;
1843 
1844 	case NEG:
1845 	case MINUS:
1846 	case COMPARE:
1847 	  if (code == COMPARE && i == num_words - 1)
1848 	    {
1849 	      rtx branch, sub, clobber, sub_1;
1850 
1851 	      sub_1 = gen_rtx_MINUS (HImode, w_src0,
1852 				     gen_rtx_ZERO_EXTEND (HImode, carry));
1853 	      sub = gen_rtx_SET (VOIDmode, w_dest,
1854 				 gen_rtx_MINUS (HImode, sub_1, w_src1));
1855 	      clobber = gen_rtx_CLOBBER (VOIDmode, carry);
1856 	      branch = gen_rtx_SET (VOIDmode, pc_rtx,
1857 				    gen_rtx_IF_THEN_ELSE (VOIDmode,
1858 							  gen_rtx_EQ (HImode,
1859 								      sub_1,
1860 								      w_src1),
1861 							  pc_rtx,
1862 							  pc_rtx));
1863 	      insn = gen_rtx_PARALLEL (VOIDmode,
1864 				       gen_rtvec (3, branch, sub, clobber));
1865 	    }
1866 	  else if (firstloop
1867 		   && code != COMPARE
1868 		   && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
1869 	    continue;
1870 	  else if (firstloop)
1871 	    insn = gen_subchi4 (w_dest, w_src0, w_src1, carry);
1872 	  else
1873 	    insn = gen_subchi5 (w_dest, w_src0, w_src1, carry, carry);
1874 	  break;
1875 
1876 	case IOR:
1877 	case XOR:
1878 	case AND:
1879 	  if (GET_CODE (w_src1) == CONST_INT
1880 	      && INTVAL (w_src1) == -(code == AND))
1881 	    continue;
1882 
1883 	  insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx (code, mode,
1884 							 w_src0, w_src1));
1885 	  break;
1886 
1887 	case NOT:
1888 	  insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
1889 	  break;
1890 
1891 	default:
1892 	  abort ();
1893 	}
1894 
1895       firstloop = 0;
1896       emit (insn);
1897     }
1898 
1899   /* If we emit nothing, try_split() will think we failed.  So emit
1900      something that does nothing and can be optimized away.  */
1901   if (firstloop)
1902     emit (gen_nop ());
1903 }
1904 
1905 /* Return 1 if OP is a shift operator.  */
1906 
1907 int
shift_operator(register rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)1908 shift_operator (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1909 {
1910   enum rtx_code code = GET_CODE (op);
1911 
1912   return (code == ASHIFT
1913 	  || code == ASHIFTRT
1914 	  || code == LSHIFTRT);
1915 }
1916 
1917 /* The shift operations are split at output time for constant values;
1918    variable-width shifts get handed off to a library routine.
1919 
1920    Generate an output string to do (set X (CODE:MODE X SIZE_R))
1921    SIZE_R will be a CONST_INT, X will be a hard register.  */
1922 
1923 const char *
xstormy16_output_shift(enum machine_mode mode,enum rtx_code code,rtx x,rtx size_r,rtx temp)1924 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
1925 			rtx x, rtx size_r, rtx temp)
1926 {
1927   HOST_WIDE_INT size;
1928   const char *r0, *r1, *rt;
1929   static char r[64];
1930 
1931   if (GET_CODE (size_r) != CONST_INT
1932       || GET_CODE (x) != REG
1933       || mode != SImode)
1934     abort ();
1935   size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
1936 
1937   if (size == 0)
1938     return "";
1939 
1940   r0 = reg_names [REGNO (x)];
1941   r1 = reg_names [REGNO (x) + 1];
1942 
1943   /* For shifts of size 1, we can use the rotate instructions.  */
1944   if (size == 1)
1945     {
1946       switch (code)
1947 	{
1948 	case ASHIFT:
1949 	  sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
1950 	  break;
1951 	case ASHIFTRT:
1952 	  sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
1953 	  break;
1954 	case LSHIFTRT:
1955 	  sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
1956 	  break;
1957 	default:
1958 	  abort ();
1959 	}
1960       return r;
1961     }
1962 
1963   /* For large shifts, there are easy special cases.  */
1964   if (size == 16)
1965     {
1966       switch (code)
1967 	{
1968 	case ASHIFT:
1969 	  sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
1970 	  break;
1971 	case ASHIFTRT:
1972 	  sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
1973 	  break;
1974 	case LSHIFTRT:
1975 	  sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
1976 	  break;
1977 	default:
1978 	  abort ();
1979 	}
1980       return r;
1981     }
1982   if (size > 16)
1983     {
1984       switch (code)
1985 	{
1986 	case ASHIFT:
1987 	  sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
1988 		   r1, r0, r0, r1, (int) size - 16);
1989 	  break;
1990 	case ASHIFTRT:
1991 	  sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
1992 		   r0, r1, r1, r0, (int) size - 16);
1993 	  break;
1994 	case LSHIFTRT:
1995 	  sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
1996 		   r0, r1, r1, r0, (int) size - 16);
1997 	  break;
1998 	default:
1999 	  abort ();
2000 	}
2001       return r;
2002     }
2003 
2004   /* For the rest, we have to do more work.  In particular, we
2005      need a temporary.  */
2006   rt = reg_names [REGNO (temp)];
2007   switch (code)
2008     {
2009     case ASHIFT:
2010       sprintf (r,
2011 	       "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2012 	       rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16-size),
2013 	       r1, rt);
2014       break;
2015     case ASHIFTRT:
2016       sprintf (r,
2017 	       "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2018 	       rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2019 	       r0, rt);
2020       break;
2021     case LSHIFTRT:
2022       sprintf (r,
2023 	       "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2024 	       rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2025 	       r0, rt);
2026       break;
2027     default:
2028       abort ();
2029     }
2030   return r;
2031 }
2032 
2033 /* Attribute handling.  */
2034 
2035 /* Return nonzero if the function is an interrupt function.  */
2036 int
xstormy16_interrupt_function_p(void)2037 xstormy16_interrupt_function_p (void)
2038 {
2039   tree attributes;
2040 
2041   /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2042      any functions are declared, which is demonstrably wrong, but
2043      it is worked around here.  FIXME.  */
2044   if (!cfun)
2045     return 0;
2046 
2047   attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2048   return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2049 }
2050 
2051 #undef TARGET_ATTRIBUTE_TABLE
2052 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2053 static tree xstormy16_handle_interrupt_attribute
2054   (tree *, tree, tree, int, bool *);
2055 
2056 static const struct attribute_spec xstormy16_attribute_table[] =
2057 {
2058   /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2059   { "interrupt", 0, 0, false, true,  true,  xstormy16_handle_interrupt_attribute },
2060   { NULL,        0, 0, false, false, false, NULL }
2061 };
2062 
2063 /* Handle an "interrupt" attribute;
2064    arguments as in struct attribute_spec.handler.  */
2065 static tree
xstormy16_handle_interrupt_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)2066 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2067 				      tree args ATTRIBUTE_UNUSED,
2068 				      int flags ATTRIBUTE_UNUSED,
2069 				      bool *no_add_attrs)
2070 {
2071   if (TREE_CODE (*node) != FUNCTION_TYPE)
2072     {
2073       warning ("`%s' attribute only applies to functions",
2074 	       IDENTIFIER_POINTER (name));
2075       *no_add_attrs = true;
2076     }
2077 
2078   return NULL_TREE;
2079 }
2080 
2081 #undef TARGET_INIT_BUILTINS
2082 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2083 #undef TARGET_EXPAND_BUILTIN
2084 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2085 
2086 static struct {
2087   const char *name;
2088   int md_code;
2089   const char *arg_ops; /* 0..9, t for temp register, r for return value */
2090   const char *arg_types; /* s=short,l=long, upper case for unsigned */
2091 } s16builtins[] = {
2092   { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2093   { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2094   { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2095   { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2096   { 0, 0, 0, 0 }
2097 };
2098 
2099 static void
xstormy16_init_builtins(void)2100 xstormy16_init_builtins (void)
2101 {
2102   tree args, ret_type, arg;
2103   int i, a;
2104 
2105   ret_type = void_type_node;
2106 
2107   for (i=0; s16builtins[i].name; i++)
2108     {
2109       args = void_list_node;
2110       for (a=strlen (s16builtins[i].arg_types)-1; a>=0; a--)
2111 	{
2112 	  switch (s16builtins[i].arg_types[a])
2113 	    {
2114 	    case 's': arg = short_integer_type_node; break;
2115 	    case 'S': arg = short_unsigned_type_node; break;
2116 	    case 'l': arg = long_integer_type_node; break;
2117 	    case 'L': arg = long_unsigned_type_node; break;
2118 	    default: abort();
2119 	    }
2120 	  if (a == 0)
2121 	    ret_type = arg;
2122 	  else
2123 	    args = tree_cons (NULL_TREE, arg, args);
2124 	}
2125       builtin_function (s16builtins[i].name,
2126 			build_function_type (ret_type, args),
2127 			i, BUILT_IN_MD, NULL, NULL);
2128     }
2129 }
2130 
2131 static rtx
xstormy16_expand_builtin(tree exp,rtx target,rtx subtarget ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)2132 xstormy16_expand_builtin(tree exp, rtx target,
2133 			 rtx subtarget ATTRIBUTE_UNUSED,
2134 			 enum machine_mode mode ATTRIBUTE_UNUSED,
2135 			 int ignore ATTRIBUTE_UNUSED)
2136 {
2137   rtx op[10], args[10], pat, copyto[10], retval = 0;
2138   tree fndecl, argtree;
2139   int i, a, o, code;
2140 
2141   fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2142   argtree = TREE_OPERAND (exp, 1);
2143   i = DECL_FUNCTION_CODE (fndecl);
2144   code = s16builtins[i].md_code;
2145 
2146   for (a = 0; a < 10 && argtree; a++)
2147     {
2148       args[a] = expand_expr (TREE_VALUE (argtree), NULL_RTX, VOIDmode, 0);
2149       argtree = TREE_CHAIN (argtree);
2150     }
2151 
2152   for (o = 0; s16builtins[i].arg_ops[o]; o++)
2153     {
2154       char ao = s16builtins[i].arg_ops[o];
2155       char c = insn_data[code].operand[o].constraint[0];
2156       int omode;
2157 
2158       copyto[o] = 0;
2159 
2160       omode = insn_data[code].operand[o].mode;
2161       if (ao == 'r')
2162 	op[o] = target ? target : gen_reg_rtx (omode);
2163       else if (ao == 't')
2164 	op[o] = gen_reg_rtx (omode);
2165       else
2166 	op[o] = args[(int) hex_value (ao)];
2167 
2168       if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2169 	{
2170 	  if (c == '+' || c == '=')
2171 	    {
2172 	      copyto[o] = op[o];
2173 	      op[o] = gen_reg_rtx (omode);
2174 	    }
2175 	  else
2176 	    op[o] = copy_to_mode_reg (omode, op[o]);
2177 	}
2178 
2179       if (ao == 'r')
2180 	retval = op[o];
2181     }
2182 
2183   pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2184 			op[5], op[6], op[7], op[8], op[9]);
2185   emit_insn (pat);
2186 
2187   for (o = 0; s16builtins[i].arg_ops[o]; o++)
2188     if (copyto[o])
2189       {
2190 	emit_move_insn (copyto[o], op[o]);
2191 	if (op[o] == retval)
2192 	  retval = copyto[o];
2193       }
2194 
2195   return retval;
2196 }
2197 
2198 
2199 #undef TARGET_ASM_ALIGNED_HI_OP
2200 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2201 #undef TARGET_ASM_ALIGNED_SI_OP
2202 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2203 
2204 #undef TARGET_ASM_OUTPUT_MI_THUNK
2205 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2206 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2207 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2208 
2209 #undef TARGET_RTX_COSTS
2210 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2211 #undef TARGET_ADDRESS_COST
2212 #define TARGET_ADDRESS_COST xstormy16_address_cost
2213 
2214 #undef TARGET_BUILD_BUILTIN_VA_LIST_TYPE
2215 #define TARGET_BUILD_BUILTIN_VA_LIST_TYPE xstormy16_build_builtin_va_list
2216 
2217 struct gcc_target targetm = TARGET_INITIALIZER;
2218