1 /* Xstormy16 target functions.
2 Copyright (C) 1997-2013 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
31 #include "output.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "recog.h"
35 #include "diagnostic-core.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "target.h"
43 #include "target-def.h"
44 #include "tm_p.h"
45 #include "langhooks.h"
46 #include "gimple.h"
47 #include "df.h"
48 #include "reload.h"
49 #include "ggc.h"
50
51 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
52 static void xstormy16_asm_out_constructor (rtx, int);
53 static void xstormy16_asm_out_destructor (rtx, int);
54 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
55 HOST_WIDE_INT, tree);
56
57 static void xstormy16_init_builtins (void);
58 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
59 static bool xstormy16_rtx_costs (rtx, int, int, int, int *, bool);
60 static int xstormy16_address_cost (rtx, enum machine_mode, addr_space_t, bool);
61 static bool xstormy16_return_in_memory (const_tree, const_tree);
62
63 static GTY(()) section *bss100_section;
64
65 /* Compute a (partial) cost for rtx X. Return true if the complete
66 cost has been computed, and false if subexpressions should be
67 scanned. In either case, *TOTAL contains the cost result. */
68
69 static bool
xstormy16_rtx_costs(rtx x,int code,int outer_code ATTRIBUTE_UNUSED,int opno ATTRIBUTE_UNUSED,int * total,bool speed ATTRIBUTE_UNUSED)70 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
71 int opno ATTRIBUTE_UNUSED, int *total,
72 bool speed ATTRIBUTE_UNUSED)
73 {
74 switch (code)
75 {
76 case CONST_INT:
77 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
78 *total = COSTS_N_INSNS (1) / 2;
79 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
80 *total = COSTS_N_INSNS (1);
81 else
82 *total = COSTS_N_INSNS (2);
83 return true;
84
85 case CONST_DOUBLE:
86 case CONST:
87 case SYMBOL_REF:
88 case LABEL_REF:
89 *total = COSTS_N_INSNS (2);
90 return true;
91
92 case MULT:
93 *total = COSTS_N_INSNS (35 + 6);
94 return true;
95 case DIV:
96 *total = COSTS_N_INSNS (51 - 6);
97 return true;
98
99 default:
100 return false;
101 }
102 }
103
104 static int
xstormy16_address_cost(rtx x,enum machine_mode mode ATTRIBUTE_UNUSED,addr_space_t as ATTRIBUTE_UNUSED,bool speed ATTRIBUTE_UNUSED)105 xstormy16_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
106 addr_space_t as ATTRIBUTE_UNUSED,
107 bool speed ATTRIBUTE_UNUSED)
108 {
109 return (CONST_INT_P (x) ? 2
110 : GET_CODE (x) == PLUS ? 7
111 : 5);
112 }
113
114 /* Worker function for TARGET_MEMORY_MOVE_COST. */
115
116 static int
xstormy16_memory_move_cost(enum machine_mode mode,reg_class_t rclass,bool in)117 xstormy16_memory_move_cost (enum machine_mode mode, reg_class_t rclass,
118 bool in)
119 {
120 return (5 + memory_move_secondary_cost (mode, rclass, in));
121 }
122
123 /* Branches are handled as follows:
124
125 1. HImode compare-and-branches. The machine supports these
126 natively, so the appropriate pattern is emitted directly.
127
128 2. SImode EQ and NE. These are emitted as pairs of HImode
129 compare-and-branches.
130
131 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
132 of a SImode subtract followed by a branch (not a compare-and-branch),
133 like this:
134 sub
135 sbc
136 blt
137
138 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
139 sub
140 sbc
141 blt
142 or
143 bne. */
144
145 /* Emit a branch of kind CODE to location LOC. */
146
147 void
xstormy16_emit_cbranch(enum rtx_code code,rtx op0,rtx op1,rtx loc)148 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
149 {
150 rtx condition_rtx, loc_ref, branch, cy_clobber;
151 rtvec vec;
152 enum machine_mode mode;
153
154 mode = GET_MODE (op0);
155 gcc_assert (mode == HImode || mode == SImode);
156
157 if (mode == SImode
158 && (code == GT || code == LE || code == GTU || code == LEU))
159 {
160 int unsigned_p = (code == GTU || code == LEU);
161 int gt_p = (code == GT || code == GTU);
162 rtx lab = NULL_RTX;
163
164 if (gt_p)
165 lab = gen_label_rtx ();
166 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
167 /* This should be generated as a comparison against the temporary
168 created by the previous insn, but reload can't handle that. */
169 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
170 if (gt_p)
171 emit_label (lab);
172 return;
173 }
174 else if (mode == SImode
175 && (code == NE || code == EQ)
176 && op1 != const0_rtx)
177 {
178 rtx op0_word, op1_word;
179 rtx lab = NULL_RTX;
180 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
181 int i;
182
183 if (code == EQ)
184 lab = gen_label_rtx ();
185
186 for (i = 0; i < num_words - 1; i++)
187 {
188 op0_word = simplify_gen_subreg (word_mode, op0, mode,
189 i * UNITS_PER_WORD);
190 op1_word = simplify_gen_subreg (word_mode, op1, mode,
191 i * UNITS_PER_WORD);
192 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
193 }
194 op0_word = simplify_gen_subreg (word_mode, op0, mode,
195 i * UNITS_PER_WORD);
196 op1_word = simplify_gen_subreg (word_mode, op1, mode,
197 i * UNITS_PER_WORD);
198 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
199
200 if (code == EQ)
201 emit_label (lab);
202 return;
203 }
204
205 /* We can't allow reload to try to generate any reload after a branch,
206 so when some register must match we must make the temporary ourselves. */
207 if (mode != HImode)
208 {
209 rtx tmp;
210 tmp = gen_reg_rtx (mode);
211 emit_move_insn (tmp, op0);
212 op0 = tmp;
213 }
214
215 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
216 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
217 branch = gen_rtx_SET (VOIDmode, pc_rtx,
218 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
219 loc_ref, pc_rtx));
220
221 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
222
223 if (mode == HImode)
224 vec = gen_rtvec (2, branch, cy_clobber);
225 else if (code == NE || code == EQ)
226 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
227 else
228 {
229 rtx sub;
230 #if 0
231 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
232 #else
233 sub = gen_rtx_CLOBBER (SImode, op0);
234 #endif
235 vec = gen_rtvec (3, branch, sub, cy_clobber);
236 }
237
238 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
239 }
240
241 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
242 the arithmetic operation. Most of the work is done by
243 xstormy16_expand_arith. */
244
245 void
xstormy16_split_cbranch(enum machine_mode mode,rtx label,rtx comparison,rtx dest)246 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
247 rtx dest)
248 {
249 rtx op0 = XEXP (comparison, 0);
250 rtx op1 = XEXP (comparison, 1);
251 rtx seq, last_insn;
252 rtx compare;
253
254 start_sequence ();
255 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
256 seq = get_insns ();
257 end_sequence ();
258
259 gcc_assert (INSN_P (seq));
260
261 last_insn = seq;
262 while (NEXT_INSN (last_insn) != NULL_RTX)
263 last_insn = NEXT_INSN (last_insn);
264
265 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
266 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
267 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
268 emit_insn (seq);
269 }
270
271
272 /* Return the string to output a conditional branch to LABEL, which is
273 the operand number of the label.
274
275 OP is the conditional expression, or NULL for branch-always.
276
277 REVERSED is nonzero if we should reverse the sense of the comparison.
278
279 INSN is the insn. */
280
281 char *
xstormy16_output_cbranch_hi(rtx op,const char * label,int reversed,rtx insn)282 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
283 {
284 static char string[64];
285 int need_longbranch = (op != NULL_RTX
286 ? get_attr_length (insn) == 8
287 : get_attr_length (insn) == 4);
288 int really_reversed = reversed ^ need_longbranch;
289 const char *ccode;
290 const char *templ;
291 const char *operands;
292 enum rtx_code code;
293
294 if (! op)
295 {
296 if (need_longbranch)
297 ccode = "jmpf";
298 else
299 ccode = "br";
300 sprintf (string, "%s %s", ccode, label);
301 return string;
302 }
303
304 code = GET_CODE (op);
305
306 if (! REG_P (XEXP (op, 0)))
307 {
308 code = swap_condition (code);
309 operands = "%3,%2";
310 }
311 else
312 operands = "%2,%3";
313
314 /* Work out which way this really branches. */
315 if (really_reversed)
316 code = reverse_condition (code);
317
318 switch (code)
319 {
320 case EQ: ccode = "z"; break;
321 case NE: ccode = "nz"; break;
322 case GE: ccode = "ge"; break;
323 case LT: ccode = "lt"; break;
324 case GT: ccode = "gt"; break;
325 case LE: ccode = "le"; break;
326 case GEU: ccode = "nc"; break;
327 case LTU: ccode = "c"; break;
328 case GTU: ccode = "hi"; break;
329 case LEU: ccode = "ls"; break;
330
331 default:
332 gcc_unreachable ();
333 }
334
335 if (need_longbranch)
336 templ = "b%s %s,.+8 | jmpf %s";
337 else
338 templ = "b%s %s,%s";
339 sprintf (string, templ, ccode, operands, label);
340
341 return string;
342 }
343
344 /* Return the string to output a conditional branch to LABEL, which is
345 the operand number of the label, but suitable for the tail of a
346 SImode branch.
347
348 OP is the conditional expression (OP is never NULL_RTX).
349
350 REVERSED is nonzero if we should reverse the sense of the comparison.
351
352 INSN is the insn. */
353
354 char *
xstormy16_output_cbranch_si(rtx op,const char * label,int reversed,rtx insn)355 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
356 {
357 static char string[64];
358 int need_longbranch = get_attr_length (insn) >= 8;
359 int really_reversed = reversed ^ need_longbranch;
360 const char *ccode;
361 const char *templ;
362 char prevop[16];
363 enum rtx_code code;
364
365 code = GET_CODE (op);
366
367 /* Work out which way this really branches. */
368 if (really_reversed)
369 code = reverse_condition (code);
370
371 switch (code)
372 {
373 case EQ: ccode = "z"; break;
374 case NE: ccode = "nz"; break;
375 case GE: ccode = "ge"; break;
376 case LT: ccode = "lt"; break;
377 case GEU: ccode = "nc"; break;
378 case LTU: ccode = "c"; break;
379
380 /* The missing codes above should never be generated. */
381 default:
382 gcc_unreachable ();
383 }
384
385 switch (code)
386 {
387 case EQ: case NE:
388 {
389 int regnum;
390
391 gcc_assert (REG_P (XEXP (op, 0)));
392
393 regnum = REGNO (XEXP (op, 0));
394 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
395 }
396 break;
397
398 case GE: case LT: case GEU: case LTU:
399 strcpy (prevop, "sbc %2,%3");
400 break;
401
402 default:
403 gcc_unreachable ();
404 }
405
406 if (need_longbranch)
407 templ = "%s | b%s .+6 | jmpf %s";
408 else
409 templ = "%s | b%s %s";
410 sprintf (string, templ, prevop, ccode, label);
411
412 return string;
413 }
414
415 /* Many machines have some registers that cannot be copied directly to or from
416 memory or even from other types of registers. An example is the `MQ'
417 register, which on most machines, can only be copied to or from general
418 registers, but not memory. Some machines allow copying all registers to and
419 from memory, but require a scratch register for stores to some memory
420 locations (e.g., those with symbolic address on the RT, and those with
421 certain symbolic address on the SPARC when compiling PIC). In some cases,
422 both an intermediate and a scratch register are required.
423
424 You should define these macros to indicate to the reload phase that it may
425 need to allocate at least one register for a reload in addition to the
426 register to contain the data. Specifically, if copying X to a register
427 RCLASS in MODE requires an intermediate register, you should define
428 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
429 whose registers can be used as intermediate registers or scratch registers.
430
431 If copying a register RCLASS in MODE to X requires an intermediate or scratch
432 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
433 largest register class required. If the requirements for input and output
434 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
435 instead of defining both macros identically.
436
437 The values returned by these macros are often `GENERAL_REGS'. Return
438 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
439 to or from a register of RCLASS in MODE without requiring a scratch register.
440 Do not define this macro if it would always return `NO_REGS'.
441
442 If a scratch register is required (either with or without an intermediate
443 register), you should define patterns for `reload_inM' or `reload_outM', as
444 required.. These patterns, which will normally be implemented with a
445 `define_expand', should be similar to the `movM' patterns, except that
446 operand 2 is the scratch register.
447
448 Define constraints for the reload register and scratch register that contain
449 a single register class. If the original reload register (whose class is
450 RCLASS) can meet the constraint given in the pattern, the value returned by
451 these macros is used for the class of the scratch register. Otherwise, two
452 additional reload registers are required. Their classes are obtained from
453 the constraints in the insn pattern.
454
455 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
456 either be in a hard register or in memory. Use `true_regnum' to find out;
457 it will return -1 if the pseudo is in memory and the hard register number if
458 it is in a register.
459
460 These macros should not be used in the case where a particular class of
461 registers can only be copied to memory and not to another class of
462 registers. In that case, secondary reload registers are not needed and
463 would not be helpful. Instead, a stack location must be used to perform the
464 copy and the `movM' pattern should use memory as an intermediate storage.
465 This case often occurs between floating-point and general registers. */
466
467 enum reg_class
xstormy16_secondary_reload_class(enum reg_class rclass,enum machine_mode mode ATTRIBUTE_UNUSED,rtx x)468 xstormy16_secondary_reload_class (enum reg_class rclass,
469 enum machine_mode mode ATTRIBUTE_UNUSED,
470 rtx x)
471 {
472 /* This chip has the interesting property that only the first eight
473 registers can be moved to/from memory. */
474 if ((MEM_P (x)
475 || ((GET_CODE (x) == SUBREG || REG_P (x))
476 && (true_regnum (x) == -1
477 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
478 && ! reg_class_subset_p (rclass, EIGHT_REGS))
479 return EIGHT_REGS;
480
481 return NO_REGS;
482 }
483
484 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS
485 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
486
487 static reg_class_t
xstormy16_preferred_reload_class(rtx x,reg_class_t rclass)488 xstormy16_preferred_reload_class (rtx x, reg_class_t rclass)
489 {
490 if (rclass == GENERAL_REGS && MEM_P (x))
491 return EIGHT_REGS;
492
493 return rclass;
494 }
495
496 /* Predicate for symbols and addresses that reflect special 8-bit
497 addressing. */
498
499 int
xstormy16_below100_symbol(rtx x,enum machine_mode mode ATTRIBUTE_UNUSED)500 xstormy16_below100_symbol (rtx x,
501 enum machine_mode mode ATTRIBUTE_UNUSED)
502 {
503 if (GET_CODE (x) == CONST)
504 x = XEXP (x, 0);
505 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
506 x = XEXP (x, 0);
507
508 if (GET_CODE (x) == SYMBOL_REF)
509 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
510
511 if (CONST_INT_P (x))
512 {
513 HOST_WIDE_INT i = INTVAL (x);
514
515 if ((i >= 0x0000 && i <= 0x00ff)
516 || (i >= 0x7f00 && i <= 0x7fff))
517 return 1;
518 }
519 return 0;
520 }
521
522 /* Likewise, but only for non-volatile MEMs, for patterns where the
523 MEM will get split into smaller sized accesses. */
524
525 int
xstormy16_splittable_below100_operand(rtx x,enum machine_mode mode)526 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
527 {
528 if (MEM_P (x) && MEM_VOLATILE_P (x))
529 return 0;
530 return xstormy16_below100_operand (x, mode);
531 }
532
533 /* Expand an 8-bit IOR. This either detects the one case we can
534 actually do, or uses a 16-bit IOR. */
535
536 void
xstormy16_expand_iorqi3(rtx * operands)537 xstormy16_expand_iorqi3 (rtx *operands)
538 {
539 rtx in, out, outsub, val;
540
541 out = operands[0];
542 in = operands[1];
543 val = operands[2];
544
545 if (xstormy16_onebit_set_operand (val, QImode))
546 {
547 if (!xstormy16_below100_or_register (in, QImode))
548 in = copy_to_mode_reg (QImode, in);
549 if (!xstormy16_below100_or_register (out, QImode))
550 out = gen_reg_rtx (QImode);
551 emit_insn (gen_iorqi3_internal (out, in, val));
552 if (out != operands[0])
553 emit_move_insn (operands[0], out);
554 return;
555 }
556
557 if (! REG_P (in))
558 in = copy_to_mode_reg (QImode, in);
559
560 if (! REG_P (val) && ! CONST_INT_P (val))
561 val = copy_to_mode_reg (QImode, val);
562
563 if (! REG_P (out))
564 out = gen_reg_rtx (QImode);
565
566 in = simplify_gen_subreg (HImode, in, QImode, 0);
567 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
568
569 if (! CONST_INT_P (val))
570 val = simplify_gen_subreg (HImode, val, QImode, 0);
571
572 emit_insn (gen_iorhi3 (outsub, in, val));
573
574 if (out != operands[0])
575 emit_move_insn (operands[0], out);
576 }
577
578 /* Expand an 8-bit AND. This either detects the one case we can
579 actually do, or uses a 16-bit AND. */
580
581 void
xstormy16_expand_andqi3(rtx * operands)582 xstormy16_expand_andqi3 (rtx *operands)
583 {
584 rtx in, out, outsub, val;
585
586 out = operands[0];
587 in = operands[1];
588 val = operands[2];
589
590 if (xstormy16_onebit_clr_operand (val, QImode))
591 {
592 if (!xstormy16_below100_or_register (in, QImode))
593 in = copy_to_mode_reg (QImode, in);
594 if (!xstormy16_below100_or_register (out, QImode))
595 out = gen_reg_rtx (QImode);
596 emit_insn (gen_andqi3_internal (out, in, val));
597 if (out != operands[0])
598 emit_move_insn (operands[0], out);
599 return;
600 }
601
602 if (! REG_P (in))
603 in = copy_to_mode_reg (QImode, in);
604
605 if (! REG_P (val) && ! CONST_INT_P (val))
606 val = copy_to_mode_reg (QImode, val);
607
608 if (! REG_P (out))
609 out = gen_reg_rtx (QImode);
610
611 in = simplify_gen_subreg (HImode, in, QImode, 0);
612 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
613
614 if (! CONST_INT_P (val))
615 val = simplify_gen_subreg (HImode, val, QImode, 0);
616
617 emit_insn (gen_andhi3 (outsub, in, val));
618
619 if (out != operands[0])
620 emit_move_insn (operands[0], out);
621 }
622
623 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
624 (CONST_INT_P (X) \
625 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
626
627 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
628 (CONST_INT_P (X) \
629 && INTVAL (X) + (OFFSET) >= 0 \
630 && INTVAL (X) + (OFFSET) < 0x8000 \
631 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
632
633 bool
xstormy16_legitimate_address_p(enum machine_mode mode ATTRIBUTE_UNUSED,rtx x,bool strict)634 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
635 rtx x, bool strict)
636 {
637 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
638 return true;
639
640 if (GET_CODE (x) == PLUS
641 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
642 {
643 x = XEXP (x, 0);
644 /* PR 31232: Do not allow INT+INT as an address. */
645 if (CONST_INT_P (x))
646 return false;
647 }
648
649 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
650 || GET_CODE (x) == POST_INC
651 || GET_CODE (x) == PRE_DEC)
652 x = XEXP (x, 0);
653
654 if (REG_P (x)
655 && REGNO_OK_FOR_BASE_P (REGNO (x))
656 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
657 return true;
658
659 if (xstormy16_below100_symbol (x, mode))
660 return true;
661
662 return false;
663 }
664
665 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
666
667 On this chip, this is true if the address is valid with an offset
668 of 0 but not of 6, because in that case it cannot be used as an
669 address for DImode or DFmode, or if the address is a post-increment
670 or pre-decrement address. */
671
672 static bool
xstormy16_mode_dependent_address_p(const_rtx x,addr_space_t as ATTRIBUTE_UNUSED)673 xstormy16_mode_dependent_address_p (const_rtx x,
674 addr_space_t as ATTRIBUTE_UNUSED)
675 {
676 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
677 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
678 return true;
679
680 if (GET_CODE (x) == PLUS
681 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
682 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
683 return true;
684
685 /* Auto-increment addresses are now treated generically in recog.c. */
686 return false;
687 }
688
689 int
short_memory_operand(rtx x,enum machine_mode mode)690 short_memory_operand (rtx x, enum machine_mode mode)
691 {
692 if (! memory_operand (x, mode))
693 return 0;
694 return (GET_CODE (XEXP (x, 0)) != PLUS);
695 }
696
697 /* Splitter for the 'move' patterns, for modes not directly implemented
698 by hardware. Emit insns to copy a value of mode MODE from SRC to
699 DEST.
700
701 This function is only called when reload_completed. */
702
703 void
xstormy16_split_move(enum machine_mode mode,rtx dest,rtx src)704 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
705 {
706 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
707 int direction, end, i;
708 int src_modifies = 0;
709 int dest_modifies = 0;
710 int src_volatile = 0;
711 int dest_volatile = 0;
712 rtx mem_operand;
713 rtx auto_inc_reg_rtx = NULL_RTX;
714
715 /* Check initial conditions. */
716 gcc_assert (reload_completed
717 && mode != QImode && mode != HImode
718 && nonimmediate_operand (dest, mode)
719 && general_operand (src, mode));
720
721 /* This case is not supported below, and shouldn't be generated. */
722 gcc_assert (! MEM_P (dest) || ! MEM_P (src));
723
724 /* This case is very very bad after reload, so trap it now. */
725 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
726
727 /* The general idea is to copy by words, offsetting the source and
728 destination. Normally the least-significant word will be copied
729 first, but for pre-dec operations it's better to copy the
730 most-significant word first. Only one operand can be a pre-dec
731 or post-inc operand.
732
733 It's also possible that the copy overlaps so that the direction
734 must be reversed. */
735 direction = 1;
736
737 if (MEM_P (dest))
738 {
739 mem_operand = XEXP (dest, 0);
740 dest_modifies = side_effects_p (mem_operand);
741 if (auto_inc_p (mem_operand))
742 auto_inc_reg_rtx = XEXP (mem_operand, 0);
743 dest_volatile = MEM_VOLATILE_P (dest);
744 if (dest_volatile)
745 {
746 dest = copy_rtx (dest);
747 MEM_VOLATILE_P (dest) = 0;
748 }
749 }
750 else if (MEM_P (src))
751 {
752 mem_operand = XEXP (src, 0);
753 src_modifies = side_effects_p (mem_operand);
754 if (auto_inc_p (mem_operand))
755 auto_inc_reg_rtx = XEXP (mem_operand, 0);
756 src_volatile = MEM_VOLATILE_P (src);
757 if (src_volatile)
758 {
759 src = copy_rtx (src);
760 MEM_VOLATILE_P (src) = 0;
761 }
762 }
763 else
764 mem_operand = NULL_RTX;
765
766 if (mem_operand == NULL_RTX)
767 {
768 if (REG_P (src)
769 && REG_P (dest)
770 && reg_overlap_mentioned_p (dest, src)
771 && REGNO (dest) > REGNO (src))
772 direction = -1;
773 }
774 else if (GET_CODE (mem_operand) == PRE_DEC
775 || (GET_CODE (mem_operand) == PLUS
776 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
777 direction = -1;
778 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src))
779 {
780 int regno;
781
782 gcc_assert (REG_P (dest));
783 regno = REGNO (dest);
784
785 gcc_assert (refers_to_regno_p (regno, regno + num_words,
786 mem_operand, 0));
787
788 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
789 direction = -1;
790 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
791 mem_operand, 0))
792 direction = 1;
793 else
794 /* This means something like
795 (set (reg:DI r0) (mem:DI (reg:HI r1)))
796 which we'd need to support by doing the set of the second word
797 last. */
798 gcc_unreachable ();
799 }
800
801 end = direction < 0 ? -1 : num_words;
802 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
803 {
804 rtx w_src, w_dest, insn;
805
806 if (src_modifies)
807 w_src = gen_rtx_MEM (word_mode, mem_operand);
808 else
809 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
810 if (src_volatile)
811 MEM_VOLATILE_P (w_src) = 1;
812 if (dest_modifies)
813 w_dest = gen_rtx_MEM (word_mode, mem_operand);
814 else
815 w_dest = simplify_gen_subreg (word_mode, dest, mode,
816 i * UNITS_PER_WORD);
817 if (dest_volatile)
818 MEM_VOLATILE_P (w_dest) = 1;
819
820 /* The simplify_subreg calls must always be able to simplify. */
821 gcc_assert (GET_CODE (w_src) != SUBREG
822 && GET_CODE (w_dest) != SUBREG);
823
824 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
825 if (auto_inc_reg_rtx)
826 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
827 auto_inc_reg_rtx,
828 REG_NOTES (insn));
829 }
830 }
831
832 /* Expander for the 'move' patterns. Emit insns to copy a value of
833 mode MODE from SRC to DEST. */
834
835 void
xstormy16_expand_move(enum machine_mode mode,rtx dest,rtx src)836 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
837 {
838 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
839 {
840 rtx pmv = XEXP (dest, 0);
841 rtx dest_reg = XEXP (pmv, 0);
842 rtx dest_mod = XEXP (pmv, 1);
843 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
844 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
845
846 dest = gen_rtx_MEM (mode, dest_reg);
847 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
848 }
849 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
850 {
851 rtx pmv = XEXP (src, 0);
852 rtx src_reg = XEXP (pmv, 0);
853 rtx src_mod = XEXP (pmv, 1);
854 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
855 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
856
857 src = gen_rtx_MEM (mode, src_reg);
858 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
859 }
860
861 /* There are only limited immediate-to-memory move instructions. */
862 if (! reload_in_progress
863 && ! reload_completed
864 && MEM_P (dest)
865 && (! CONST_INT_P (XEXP (dest, 0))
866 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
867 && ! xstormy16_below100_operand (dest, mode)
868 && ! REG_P (src)
869 && GET_CODE (src) != SUBREG)
870 src = copy_to_mode_reg (mode, src);
871
872 /* Don't emit something we would immediately split. */
873 if (reload_completed
874 && mode != HImode && mode != QImode)
875 {
876 xstormy16_split_move (mode, dest, src);
877 return;
878 }
879
880 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
881 }
882
883 /* Stack Layout:
884
885 The stack is laid out as follows:
886
887 SP->
888 FP-> Local variables
889 Register save area (up to 4 words)
890 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
891
892 AP-> Return address (two words)
893 9th procedure parameter word
894 10th procedure parameter word
895 ...
896 last procedure parameter word
897
898 The frame pointer location is tuned to make it most likely that all
899 parameters and local variables can be accessed using a load-indexed
900 instruction. */
901
902 /* A structure to describe the layout. */
903 struct xstormy16_stack_layout
904 {
905 /* Size of the topmost three items on the stack. */
906 int locals_size;
907 int register_save_size;
908 int stdarg_save_size;
909 /* Sum of the above items. */
910 int frame_size;
911 /* Various offsets. */
912 int first_local_minus_ap;
913 int sp_minus_fp;
914 int fp_minus_ap;
915 };
916
917 /* Does REGNO need to be saved? */
918 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
919 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
920 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
921 && (REGNUM != CARRY_REGNUM) \
922 && (df_regs_ever_live_p (REGNUM) || ! crtl->is_leaf)))
923
924 /* Compute the stack layout. */
925
926 struct xstormy16_stack_layout
xstormy16_compute_stack_layout(void)927 xstormy16_compute_stack_layout (void)
928 {
929 struct xstormy16_stack_layout layout;
930 int regno;
931 const int ifun = xstormy16_interrupt_function_p ();
932
933 layout.locals_size = get_frame_size ();
934
935 layout.register_save_size = 0;
936 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
937 if (REG_NEEDS_SAVE (regno, ifun))
938 layout.register_save_size += UNITS_PER_WORD;
939
940 if (cfun->stdarg)
941 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
942 else
943 layout.stdarg_save_size = 0;
944
945 layout.frame_size = (layout.locals_size
946 + layout.register_save_size
947 + layout.stdarg_save_size);
948
949 if (crtl->args.size <= 2048 && crtl->args.size != -1)
950 {
951 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
952 + crtl->args.size <= 2048)
953 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
954 else
955 layout.fp_minus_ap = 2048 - crtl->args.size;
956 }
957 else
958 layout.fp_minus_ap = (layout.stdarg_save_size
959 + layout.register_save_size
960 - INCOMING_FRAME_SP_OFFSET);
961 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
962 - layout.fp_minus_ap);
963 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
964 return layout;
965 }
966
967 /* Worker function for TARGET_CAN_ELIMINATE. */
968
969 static bool
xstormy16_can_eliminate(const int from,const int to)970 xstormy16_can_eliminate (const int from, const int to)
971 {
972 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
973 ? ! frame_pointer_needed
974 : true);
975 }
976
977 /* Determine how all the special registers get eliminated. */
978
979 int
xstormy16_initial_elimination_offset(int from,int to)980 xstormy16_initial_elimination_offset (int from, int to)
981 {
982 struct xstormy16_stack_layout layout;
983 int result;
984
985 layout = xstormy16_compute_stack_layout ();
986
987 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
988 result = layout.sp_minus_fp - layout.locals_size;
989 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
990 result = - layout.locals_size;
991 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
992 result = - layout.fp_minus_ap;
993 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
994 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
995 else
996 gcc_unreachable ();
997
998 return result;
999 }
1000
1001 static rtx
emit_addhi3_postreload(rtx dest,rtx src0,rtx src1)1002 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1003 {
1004 rtx set, clobber, insn;
1005
1006 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1007 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1008 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1009 return insn;
1010 }
1011
1012 /* Called after register allocation to add any instructions needed for
1013 the prologue. Using a prologue insn is favored compared to putting
1014 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1015 since it allows the scheduler to intermix instructions with the
1016 saves of the caller saved registers. In some cases, it might be
1017 necessary to emit a barrier instruction as the last insn to prevent
1018 such scheduling.
1019
1020 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1021 so that the debug info generation code can handle them properly. */
1022
1023 void
xstormy16_expand_prologue(void)1024 xstormy16_expand_prologue (void)
1025 {
1026 struct xstormy16_stack_layout layout;
1027 int regno;
1028 rtx insn;
1029 rtx mem_push_rtx;
1030 const int ifun = xstormy16_interrupt_function_p ();
1031
1032 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1033 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1034
1035 layout = xstormy16_compute_stack_layout ();
1036
1037 if (layout.locals_size >= 32768)
1038 error ("local variable memory requirements exceed capacity");
1039
1040 if (flag_stack_usage_info)
1041 current_function_static_stack_size = layout.frame_size;
1042
1043 /* Save the argument registers if necessary. */
1044 if (layout.stdarg_save_size)
1045 for (regno = FIRST_ARGUMENT_REGISTER;
1046 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1047 regno++)
1048 {
1049 rtx dwarf;
1050 rtx reg = gen_rtx_REG (HImode, regno);
1051
1052 insn = emit_move_insn (mem_push_rtx, reg);
1053 RTX_FRAME_RELATED_P (insn) = 1;
1054
1055 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1056
1057 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1058 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1059 reg);
1060 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1061 plus_constant (Pmode,
1062 stack_pointer_rtx,
1063 GET_MODE_SIZE (Pmode)));
1064 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1065 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1066 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1067 }
1068
1069 /* Push each of the registers to save. */
1070 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1071 if (REG_NEEDS_SAVE (regno, ifun))
1072 {
1073 rtx dwarf;
1074 rtx reg = gen_rtx_REG (HImode, regno);
1075
1076 insn = emit_move_insn (mem_push_rtx, reg);
1077 RTX_FRAME_RELATED_P (insn) = 1;
1078
1079 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1080
1081 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1082 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1083 reg);
1084 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1085 plus_constant (Pmode, \
1086 stack_pointer_rtx,
1087 GET_MODE_SIZE (Pmode)));
1088 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1089 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1090 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1091 }
1092
1093 /* It's just possible that the SP here might be what we need for
1094 the new FP... */
1095 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1096 {
1097 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1098 RTX_FRAME_RELATED_P (insn) = 1;
1099 }
1100
1101 /* Allocate space for local variables. */
1102 if (layout.locals_size)
1103 {
1104 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1105 GEN_INT (layout.locals_size));
1106 RTX_FRAME_RELATED_P (insn) = 1;
1107 }
1108
1109 /* Set up the frame pointer, if required. */
1110 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1111 {
1112 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1113 RTX_FRAME_RELATED_P (insn) = 1;
1114
1115 if (layout.sp_minus_fp)
1116 {
1117 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1118 hard_frame_pointer_rtx,
1119 GEN_INT (- layout.sp_minus_fp));
1120 RTX_FRAME_RELATED_P (insn) = 1;
1121 }
1122 }
1123 }
1124
1125 /* Do we need an epilogue at all? */
1126
1127 int
direct_return(void)1128 direct_return (void)
1129 {
1130 return (reload_completed
1131 && xstormy16_compute_stack_layout ().frame_size == 0
1132 && ! xstormy16_interrupt_function_p ());
1133 }
1134
1135 /* Called after register allocation to add any instructions needed for
1136 the epilogue. Using an epilogue insn is favored compared to putting
1137 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1138 since it allows the scheduler to intermix instructions with the
1139 saves of the caller saved registers. In some cases, it might be
1140 necessary to emit a barrier instruction as the last insn to prevent
1141 such scheduling. */
1142
1143 void
xstormy16_expand_epilogue(void)1144 xstormy16_expand_epilogue (void)
1145 {
1146 struct xstormy16_stack_layout layout;
1147 rtx mem_pop_rtx;
1148 int regno;
1149 const int ifun = xstormy16_interrupt_function_p ();
1150
1151 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1152 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1153
1154 layout = xstormy16_compute_stack_layout ();
1155
1156 /* Pop the stack for the locals. */
1157 if (layout.locals_size)
1158 {
1159 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1160 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1161 else
1162 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1163 GEN_INT (- layout.locals_size));
1164 }
1165
1166 /* Restore any call-saved registers. */
1167 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1168 if (REG_NEEDS_SAVE (regno, ifun))
1169 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1170
1171 /* Pop the stack for the stdarg save area. */
1172 if (layout.stdarg_save_size)
1173 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1174 GEN_INT (- layout.stdarg_save_size));
1175
1176 /* Return. */
1177 if (ifun)
1178 emit_jump_insn (gen_return_internal_interrupt ());
1179 else
1180 emit_jump_insn (gen_return_internal ());
1181 }
1182
1183 int
xstormy16_epilogue_uses(int regno)1184 xstormy16_epilogue_uses (int regno)
1185 {
1186 if (reload_completed && call_used_regs[regno])
1187 {
1188 const int ifun = xstormy16_interrupt_function_p ();
1189 return REG_NEEDS_SAVE (regno, ifun);
1190 }
1191 return 0;
1192 }
1193
1194 void
xstormy16_function_profiler(void)1195 xstormy16_function_profiler (void)
1196 {
1197 sorry ("function_profiler support");
1198 }
1199
1200 /* Update CUM to advance past an argument in the argument list. The
1201 values MODE, TYPE and NAMED describe that argument. Once this is
1202 done, the variable CUM is suitable for analyzing the *following*
1203 argument with `TARGET_FUNCTION_ARG', etc.
1204
1205 This function need not do anything if the argument in question was
1206 passed on the stack. The compiler knows how to track the amount of
1207 stack space used for arguments without any special help. However,
1208 it makes life easier for xstormy16_build_va_list if it does update
1209 the word count. */
1210
1211 static void
xstormy16_function_arg_advance(cumulative_args_t cum_v,enum machine_mode mode,const_tree type,bool named ATTRIBUTE_UNUSED)1212 xstormy16_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1213 const_tree type, bool named ATTRIBUTE_UNUSED)
1214 {
1215 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1216
1217 /* If an argument would otherwise be passed partially in registers,
1218 and partially on the stack, the whole of it is passed on the
1219 stack. */
1220 if (*cum < NUM_ARGUMENT_REGISTERS
1221 && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1222 *cum = NUM_ARGUMENT_REGISTERS;
1223
1224 *cum += XSTORMY16_WORD_SIZE (type, mode);
1225 }
1226
1227 static rtx
xstormy16_function_arg(cumulative_args_t cum_v,enum machine_mode mode,const_tree type,bool named ATTRIBUTE_UNUSED)1228 xstormy16_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1229 const_tree type, bool named ATTRIBUTE_UNUSED)
1230 {
1231 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1232
1233 if (mode == VOIDmode)
1234 return const0_rtx;
1235 if (targetm.calls.must_pass_in_stack (mode, type)
1236 || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1237 return NULL_RTX;
1238 return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER);
1239 }
1240
1241 /* Build the va_list type.
1242
1243 For this chip, va_list is a record containing a counter and a pointer.
1244 The counter is of type 'int' and indicates how many bytes
1245 have been used to date. The pointer indicates the stack position
1246 for arguments that have not been passed in registers.
1247 To keep the layout nice, the pointer is first in the structure. */
1248
1249 static tree
xstormy16_build_builtin_va_list(void)1250 xstormy16_build_builtin_va_list (void)
1251 {
1252 tree f_1, f_2, record, type_decl;
1253
1254 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1255 type_decl = build_decl (BUILTINS_LOCATION,
1256 TYPE_DECL, get_identifier ("__va_list_tag"), record);
1257
1258 f_1 = build_decl (BUILTINS_LOCATION,
1259 FIELD_DECL, get_identifier ("base"),
1260 ptr_type_node);
1261 f_2 = build_decl (BUILTINS_LOCATION,
1262 FIELD_DECL, get_identifier ("count"),
1263 unsigned_type_node);
1264
1265 DECL_FIELD_CONTEXT (f_1) = record;
1266 DECL_FIELD_CONTEXT (f_2) = record;
1267
1268 TYPE_STUB_DECL (record) = type_decl;
1269 TYPE_NAME (record) = type_decl;
1270 TYPE_FIELDS (record) = f_1;
1271 DECL_CHAIN (f_1) = f_2;
1272
1273 layout_type (record);
1274
1275 return record;
1276 }
1277
1278 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1279 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1280 variable to initialize. NEXTARG is the machine independent notion of the
1281 'next' argument after the variable arguments. */
1282
1283 static void
xstormy16_expand_builtin_va_start(tree valist,rtx nextarg ATTRIBUTE_UNUSED)1284 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1285 {
1286 tree f_base, f_count;
1287 tree base, count;
1288 tree t,u;
1289
1290 if (xstormy16_interrupt_function_p ())
1291 error ("cannot use va_start in interrupt function");
1292
1293 f_base = TYPE_FIELDS (va_list_type_node);
1294 f_count = DECL_CHAIN (f_base);
1295
1296 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1297 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1298 NULL_TREE);
1299
1300 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1301 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1302 u = fold_convert (TREE_TYPE (count), u);
1303 t = fold_build_pointer_plus (t, u);
1304 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1305 TREE_SIDE_EFFECTS (t) = 1;
1306 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1307
1308 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1309 build_int_cst (NULL_TREE,
1310 crtl->args.info * UNITS_PER_WORD));
1311 TREE_SIDE_EFFECTS (t) = 1;
1312 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1313 }
1314
1315 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1316 of type va_list as a tree, TYPE is the type passed to va_arg.
1317 Note: This algorithm is documented in stormy-abi. */
1318
1319 static tree
xstormy16_gimplify_va_arg_expr(tree valist,tree type,gimple_seq * pre_p,gimple_seq * post_p ATTRIBUTE_UNUSED)1320 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1321 gimple_seq *post_p ATTRIBUTE_UNUSED)
1322 {
1323 tree f_base, f_count;
1324 tree base, count;
1325 tree count_tmp, addr, t;
1326 tree lab_gotaddr, lab_fromstack;
1327 int size, size_of_reg_args, must_stack;
1328 tree size_tree;
1329
1330 f_base = TYPE_FIELDS (va_list_type_node);
1331 f_count = DECL_CHAIN (f_base);
1332
1333 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1334 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1335 NULL_TREE);
1336
1337 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1338 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1339 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1340
1341 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1342
1343 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1344 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1345 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1346 addr = create_tmp_var (ptr_type_node, NULL);
1347
1348 if (!must_stack)
1349 {
1350 tree r;
1351
1352 t = fold_convert (TREE_TYPE (count), size_tree);
1353 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1354 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1355 t = build2 (GT_EXPR, boolean_type_node, t, r);
1356 t = build3 (COND_EXPR, void_type_node, t,
1357 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1358 NULL_TREE);
1359 gimplify_and_add (t, pre_p);
1360
1361 t = fold_build_pointer_plus (base, count_tmp);
1362 gimplify_assign (addr, t, pre_p);
1363
1364 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1365 gimplify_and_add (t, pre_p);
1366
1367 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1368 gimplify_and_add (t, pre_p);
1369 }
1370
1371 /* Arguments larger than a word might need to skip over some
1372 registers, since arguments are either passed entirely in
1373 registers or entirely on the stack. */
1374 size = PUSH_ROUNDING (int_size_in_bytes (type));
1375 if (size > 2 || size < 0 || must_stack)
1376 {
1377 tree r, u;
1378
1379 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1380 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1381
1382 t = fold_convert (TREE_TYPE (count), r);
1383 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1384 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1385 gimplify_and_add (t, pre_p);
1386 }
1387
1388 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1389 + INCOMING_FRAME_SP_OFFSET);
1390 t = fold_convert (TREE_TYPE (count), t);
1391 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1392 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1393 fold_convert (TREE_TYPE (count), size_tree));
1394 t = fold_convert (TREE_TYPE (t), fold (t));
1395 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1396 t = fold_build_pointer_plus (base, t);
1397 gimplify_assign (addr, t, pre_p);
1398
1399 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1400 gimplify_and_add (t, pre_p);
1401
1402 t = fold_convert (TREE_TYPE (count), size_tree);
1403 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1404 gimplify_assign (count, t, pre_p);
1405
1406 addr = fold_convert (build_pointer_type (type), addr);
1407 return build_va_arg_indirect_ref (addr);
1408 }
1409
1410 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1411
1412 static void
xstormy16_trampoline_init(rtx m_tramp,tree fndecl,rtx static_chain)1413 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
1414 {
1415 rtx temp = gen_reg_rtx (HImode);
1416 rtx reg_fnaddr = gen_reg_rtx (HImode);
1417 rtx reg_addr, reg_addr_mem;
1418
1419 reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1420 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
1421
1422 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1423 emit_move_insn (reg_addr_mem, temp);
1424 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1425 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1426
1427 emit_move_insn (temp, static_chain);
1428 emit_move_insn (reg_addr_mem, temp);
1429 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1430 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1431
1432 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
1433 emit_move_insn (temp, reg_fnaddr);
1434 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1435 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1436 emit_move_insn (reg_addr_mem, temp);
1437 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1438 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1439
1440 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1441 emit_move_insn (reg_addr_mem, reg_fnaddr);
1442 }
1443
1444 /* Worker function for TARGET_FUNCTION_VALUE. */
1445
1446 static rtx
xstormy16_function_value(const_tree valtype,const_tree func ATTRIBUTE_UNUSED,bool outgoing ATTRIBUTE_UNUSED)1447 xstormy16_function_value (const_tree valtype,
1448 const_tree func ATTRIBUTE_UNUSED,
1449 bool outgoing ATTRIBUTE_UNUSED)
1450 {
1451 enum machine_mode mode;
1452 mode = TYPE_MODE (valtype);
1453 PROMOTE_MODE (mode, 0, valtype);
1454 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1455 }
1456
1457 /* Worker function for TARGET_LIBCALL_VALUE. */
1458
1459 static rtx
xstormy16_libcall_value(enum machine_mode mode,const_rtx fun ATTRIBUTE_UNUSED)1460 xstormy16_libcall_value (enum machine_mode mode,
1461 const_rtx fun ATTRIBUTE_UNUSED)
1462 {
1463 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1464 }
1465
1466 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1467
1468 static bool
xstormy16_function_value_regno_p(const unsigned int regno)1469 xstormy16_function_value_regno_p (const unsigned int regno)
1470 {
1471 return (regno == RETURN_VALUE_REGNUM);
1472 }
1473
1474 /* A C compound statement that outputs the assembler code for a thunk function,
1475 used to implement C++ virtual function calls with multiple inheritance. The
1476 thunk acts as a wrapper around a virtual function, adjusting the implicit
1477 object parameter before handing control off to the real function.
1478
1479 First, emit code to add the integer DELTA to the location that contains the
1480 incoming first argument. Assume that this argument contains a pointer, and
1481 is the one used to pass the `this' pointer in C++. This is the incoming
1482 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1483 addition must preserve the values of all other incoming arguments.
1484
1485 After the addition, emit code to jump to FUNCTION, which is a
1486 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1487 the return address. Hence returning from FUNCTION will return to whoever
1488 called the current `thunk'.
1489
1490 The effect must be as if @var{function} had been called directly
1491 with the adjusted first argument. This macro is responsible for
1492 emitting all of the code for a thunk function;
1493 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1494 not invoked.
1495
1496 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1497 extracted from it.) It might possibly be useful on some targets, but
1498 probably not. */
1499
1500 static void
xstormy16_asm_output_mi_thunk(FILE * file,tree thunk_fndecl ATTRIBUTE_UNUSED,HOST_WIDE_INT delta,HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,tree function)1501 xstormy16_asm_output_mi_thunk (FILE *file,
1502 tree thunk_fndecl ATTRIBUTE_UNUSED,
1503 HOST_WIDE_INT delta,
1504 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1505 tree function)
1506 {
1507 int regnum = FIRST_ARGUMENT_REGISTER;
1508
1509 /* There might be a hidden first argument for a returned structure. */
1510 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1511 regnum += 1;
1512
1513 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1514 fputs ("\tjmpf ", file);
1515 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1516 putc ('\n', file);
1517 }
1518
1519 /* The purpose of this function is to override the default behavior of
1520 BSS objects. Normally, they go into .bss or .sbss via ".common"
1521 directives, but we need to override that and put them in
1522 .bss_below100. We can't just use a section override (like we do
1523 for .data_below100), because that makes them initialized rather
1524 than uninitialized. */
1525
1526 void
xstormy16_asm_output_aligned_common(FILE * stream,tree decl,const char * name,int size,int align,int global)1527 xstormy16_asm_output_aligned_common (FILE *stream,
1528 tree decl,
1529 const char *name,
1530 int size,
1531 int align,
1532 int global)
1533 {
1534 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
1535 rtx symbol;
1536
1537 if (mem != NULL_RTX
1538 && MEM_P (mem)
1539 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1540 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1541 {
1542 const char *name2;
1543 int p2align = 0;
1544
1545 switch_to_section (bss100_section);
1546
1547 while (align > 8)
1548 {
1549 align /= 2;
1550 p2align ++;
1551 }
1552
1553 name2 = default_strip_name_encoding (name);
1554 if (global)
1555 fprintf (stream, "\t.globl\t%s\n", name2);
1556 if (p2align)
1557 fprintf (stream, "\t.p2align %d\n", p2align);
1558 fprintf (stream, "\t.type\t%s, @object\n", name2);
1559 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1560 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1561 return;
1562 }
1563
1564 if (!global)
1565 {
1566 fprintf (stream, "\t.local\t");
1567 assemble_name (stream, name);
1568 fprintf (stream, "\n");
1569 }
1570 fprintf (stream, "\t.comm\t");
1571 assemble_name (stream, name);
1572 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1573 }
1574
1575 /* Implement TARGET_ASM_INIT_SECTIONS. */
1576
1577 static void
xstormy16_asm_init_sections(void)1578 xstormy16_asm_init_sections (void)
1579 {
1580 bss100_section
1581 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1582 output_section_asm_op,
1583 "\t.section \".bss_below100\",\"aw\",@nobits");
1584 }
1585
1586 /* Mark symbols with the "below100" attribute so that we can use the
1587 special addressing modes for them. */
1588
1589 static void
xstormy16_encode_section_info(tree decl,rtx r,int first)1590 xstormy16_encode_section_info (tree decl, rtx r, int first)
1591 {
1592 default_encode_section_info (decl, r, first);
1593
1594 if (TREE_CODE (decl) == VAR_DECL
1595 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1596 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1597 {
1598 rtx symbol = XEXP (r, 0);
1599
1600 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1601 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1602 }
1603 }
1604
1605 #undef TARGET_ASM_CONSTRUCTOR
1606 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1607 #undef TARGET_ASM_DESTRUCTOR
1608 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1609
1610 /* Output constructors and destructors. Just like
1611 default_named_section_asm_out_* but don't set the sections writable. */
1612
1613 static void
xstormy16_asm_out_destructor(rtx symbol,int priority)1614 xstormy16_asm_out_destructor (rtx symbol, int priority)
1615 {
1616 const char *section = ".dtors";
1617 char buf[16];
1618
1619 /* ??? This only works reliably with the GNU linker. */
1620 if (priority != DEFAULT_INIT_PRIORITY)
1621 {
1622 sprintf (buf, ".dtors.%.5u",
1623 /* Invert the numbering so the linker puts us in the proper
1624 order; constructors are run from right to left, and the
1625 linker sorts in increasing order. */
1626 MAX_INIT_PRIORITY - priority);
1627 section = buf;
1628 }
1629
1630 switch_to_section (get_section (section, 0, NULL));
1631 assemble_align (POINTER_SIZE);
1632 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1633 }
1634
1635 static void
xstormy16_asm_out_constructor(rtx symbol,int priority)1636 xstormy16_asm_out_constructor (rtx symbol, int priority)
1637 {
1638 const char *section = ".ctors";
1639 char buf[16];
1640
1641 /* ??? This only works reliably with the GNU linker. */
1642 if (priority != DEFAULT_INIT_PRIORITY)
1643 {
1644 sprintf (buf, ".ctors.%.5u",
1645 /* Invert the numbering so the linker puts us in the proper
1646 order; constructors are run from right to left, and the
1647 linker sorts in increasing order. */
1648 MAX_INIT_PRIORITY - priority);
1649 section = buf;
1650 }
1651
1652 switch_to_section (get_section (section, 0, NULL));
1653 assemble_align (POINTER_SIZE);
1654 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1655 }
1656
1657 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
1658
1659 Print a memory address as an operand to reference that memory location. */
1660
1661 static void
xstormy16_print_operand_address(FILE * file,rtx address)1662 xstormy16_print_operand_address (FILE *file, rtx address)
1663 {
1664 HOST_WIDE_INT offset;
1665 int pre_dec, post_inc;
1666
1667 /* There are a few easy cases. */
1668 if (CONST_INT_P (address))
1669 {
1670 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1671 return;
1672 }
1673
1674 if (CONSTANT_P (address) || LABEL_P (address))
1675 {
1676 output_addr_const (file, address);
1677 return;
1678 }
1679
1680 /* Otherwise, it's hopefully something of the form
1681 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1682 if (GET_CODE (address) == PLUS)
1683 {
1684 gcc_assert (CONST_INT_P (XEXP (address, 1)));
1685 offset = INTVAL (XEXP (address, 1));
1686 address = XEXP (address, 0);
1687 }
1688 else
1689 offset = 0;
1690
1691 pre_dec = (GET_CODE (address) == PRE_DEC);
1692 post_inc = (GET_CODE (address) == POST_INC);
1693 if (pre_dec || post_inc)
1694 address = XEXP (address, 0);
1695
1696 gcc_assert (REG_P (address));
1697
1698 fputc ('(', file);
1699 if (pre_dec)
1700 fputs ("--", file);
1701 fputs (reg_names [REGNO (address)], file);
1702 if (post_inc)
1703 fputs ("++", file);
1704 if (offset != 0)
1705 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1706 fputc (')', file);
1707 }
1708
1709 /* Worker function for TARGET_PRINT_OPERAND.
1710
1711 Print an operand to an assembler instruction. */
1712
1713 static void
xstormy16_print_operand(FILE * file,rtx x,int code)1714 xstormy16_print_operand (FILE *file, rtx x, int code)
1715 {
1716 switch (code)
1717 {
1718 case 'B':
1719 /* There is either one bit set, or one bit clear, in X.
1720 Print it preceded by '#'. */
1721 {
1722 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1723 HOST_WIDE_INT xx = 1;
1724 HOST_WIDE_INT l;
1725
1726 if (CONST_INT_P (x))
1727 xx = INTVAL (x);
1728 else
1729 output_operand_lossage ("'B' operand is not constant");
1730
1731 /* GCC sign-extends masks with the MSB set, so we have to
1732 detect all the cases that differ only in sign extension
1733 beyond the bits we care about. Normally, the predicates
1734 and constraints ensure that we have the right values. This
1735 works correctly for valid masks. */
1736 if (bits_set[xx & 7] <= 1)
1737 {
1738 /* Remove sign extension bits. */
1739 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1740 xx &= 0xff;
1741 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1742 xx &= 0xffff;
1743 l = exact_log2 (xx);
1744 }
1745 else
1746 {
1747 /* Add sign extension bits. */
1748 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1749 xx |= ~(HOST_WIDE_INT)0xff;
1750 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1751 xx |= ~(HOST_WIDE_INT)0xffff;
1752 l = exact_log2 (~xx);
1753 }
1754
1755 if (l == -1)
1756 output_operand_lossage ("'B' operand has multiple bits set");
1757
1758 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1759 return;
1760 }
1761
1762 case 'C':
1763 /* Print the symbol without a surrounding @fptr(). */
1764 if (GET_CODE (x) == SYMBOL_REF)
1765 assemble_name (file, XSTR (x, 0));
1766 else if (LABEL_P (x))
1767 output_asm_label (x);
1768 else
1769 xstormy16_print_operand_address (file, x);
1770 return;
1771
1772 case 'o':
1773 case 'O':
1774 /* Print the immediate operand less one, preceded by '#'.
1775 For 'O', negate it first. */
1776 {
1777 HOST_WIDE_INT xx = 0;
1778
1779 if (CONST_INT_P (x))
1780 xx = INTVAL (x);
1781 else
1782 output_operand_lossage ("'o' operand is not constant");
1783
1784 if (code == 'O')
1785 xx = -xx;
1786
1787 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1788 return;
1789 }
1790
1791 case 'b':
1792 /* Print the shift mask for bp/bn. */
1793 {
1794 HOST_WIDE_INT xx = 1;
1795 HOST_WIDE_INT l;
1796
1797 if (CONST_INT_P (x))
1798 xx = INTVAL (x);
1799 else
1800 output_operand_lossage ("'B' operand is not constant");
1801
1802 l = 7 - xx;
1803
1804 fputs (IMMEDIATE_PREFIX, file);
1805 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1806 return;
1807 }
1808
1809 case 0:
1810 /* Handled below. */
1811 break;
1812
1813 default:
1814 output_operand_lossage ("xstormy16_print_operand: unknown code");
1815 return;
1816 }
1817
1818 switch (GET_CODE (x))
1819 {
1820 case REG:
1821 fputs (reg_names [REGNO (x)], file);
1822 break;
1823
1824 case MEM:
1825 xstormy16_print_operand_address (file, XEXP (x, 0));
1826 break;
1827
1828 default:
1829 /* Some kind of constant or label; an immediate operand,
1830 so prefix it with '#' for the assembler. */
1831 fputs (IMMEDIATE_PREFIX, file);
1832 output_addr_const (file, x);
1833 break;
1834 }
1835
1836 return;
1837 }
1838
1839 /* Expander for the `casesi' pattern.
1840 INDEX is the index of the switch statement.
1841 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1842 to the first table entry.
1843 RANGE is the number of table entries.
1844 TABLE is an ADDR_VEC that is the jump table.
1845 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1846 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1847
1848 void
xstormy16_expand_casesi(rtx index,rtx lower_bound,rtx range,rtx table,rtx default_label)1849 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1850 rtx table, rtx default_label)
1851 {
1852 HOST_WIDE_INT range_i = INTVAL (range);
1853 rtx int_index;
1854
1855 /* This code uses 'br', so it can deal only with tables of size up to
1856 8192 entries. */
1857 if (range_i >= 8192)
1858 sorry ("switch statement of size %lu entries too large",
1859 (unsigned long) range_i);
1860
1861 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1862 OPTAB_LIB_WIDEN);
1863 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1864 default_label);
1865 int_index = gen_lowpart_common (HImode, index);
1866 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1867 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1868 }
1869
1870 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1871 instructions, without label or alignment or any other special
1872 constructs. We know that the previous instruction will be the
1873 `tablejump_pcrel' output above.
1874
1875 TODO: it might be nice to output 'br' instructions if they could
1876 all reach. */
1877
1878 void
xstormy16_output_addr_vec(FILE * file,rtx label ATTRIBUTE_UNUSED,rtx table)1879 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1880 {
1881 int vlen, idx;
1882
1883 switch_to_section (current_function_section ());
1884
1885 vlen = XVECLEN (table, 0);
1886 for (idx = 0; idx < vlen; idx++)
1887 {
1888 fputs ("\tjmpf ", file);
1889 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1890 fputc ('\n', file);
1891 }
1892 }
1893
1894 /* Expander for the `call' patterns.
1895 RETVAL is the RTL for the return register or NULL for void functions.
1896 DEST is the function to call, expressed as a MEM.
1897 COUNTER is ignored. */
1898
1899 void
xstormy16_expand_call(rtx retval,rtx dest,rtx counter)1900 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1901 {
1902 rtx call, temp;
1903 enum machine_mode mode;
1904
1905 gcc_assert (MEM_P (dest));
1906 dest = XEXP (dest, 0);
1907
1908 if (! CONSTANT_P (dest) && ! REG_P (dest))
1909 dest = force_reg (Pmode, dest);
1910
1911 if (retval == NULL)
1912 mode = VOIDmode;
1913 else
1914 mode = GET_MODE (retval);
1915
1916 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1917 counter);
1918 if (retval)
1919 call = gen_rtx_SET (VOIDmode, retval, call);
1920
1921 if (! CONSTANT_P (dest))
1922 {
1923 temp = gen_reg_rtx (HImode);
1924 emit_move_insn (temp, const0_rtx);
1925 }
1926 else
1927 temp = const0_rtx;
1928
1929 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1930 gen_rtx_USE (VOIDmode, temp)));
1931 emit_call_insn (call);
1932 }
1933
1934 /* Expanders for multiword computational operations. */
1935
1936 /* Expander for arithmetic operations; emit insns to compute
1937
1938 (set DEST (CODE:MODE SRC0 SRC1))
1939
1940 When CODE is COMPARE, a branch template is generated
1941 (this saves duplicating code in xstormy16_split_cbranch). */
1942
1943 void
xstormy16_expand_arith(enum machine_mode mode,enum rtx_code code,rtx dest,rtx src0,rtx src1)1944 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1945 rtx dest, rtx src0, rtx src1)
1946 {
1947 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1948 int i;
1949 int firstloop = 1;
1950
1951 if (code == NEG)
1952 emit_move_insn (src0, const0_rtx);
1953
1954 for (i = 0; i < num_words; i++)
1955 {
1956 rtx w_src0, w_src1, w_dest;
1957 rtx insn;
1958
1959 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1960 i * UNITS_PER_WORD);
1961 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1962 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1963
1964 switch (code)
1965 {
1966 case PLUS:
1967 if (firstloop
1968 && CONST_INT_P (w_src1)
1969 && INTVAL (w_src1) == 0)
1970 continue;
1971
1972 if (firstloop)
1973 insn = gen_addchi4 (w_dest, w_src0, w_src1);
1974 else
1975 insn = gen_addchi5 (w_dest, w_src0, w_src1);
1976 break;
1977
1978 case NEG:
1979 case MINUS:
1980 case COMPARE:
1981 if (code == COMPARE && i == num_words - 1)
1982 {
1983 rtx branch, sub, clobber, sub_1;
1984
1985 sub_1 = gen_rtx_MINUS (HImode, w_src0,
1986 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
1987 sub = gen_rtx_SET (VOIDmode, w_dest,
1988 gen_rtx_MINUS (HImode, sub_1, w_src1));
1989 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1990 branch = gen_rtx_SET (VOIDmode, pc_rtx,
1991 gen_rtx_IF_THEN_ELSE (VOIDmode,
1992 gen_rtx_EQ (HImode,
1993 sub_1,
1994 w_src1),
1995 pc_rtx,
1996 pc_rtx));
1997 insn = gen_rtx_PARALLEL (VOIDmode,
1998 gen_rtvec (3, branch, sub, clobber));
1999 }
2000 else if (firstloop
2001 && code != COMPARE
2002 && CONST_INT_P (w_src1)
2003 && INTVAL (w_src1) == 0)
2004 continue;
2005 else if (firstloop)
2006 insn = gen_subchi4 (w_dest, w_src0, w_src1);
2007 else
2008 insn = gen_subchi5 (w_dest, w_src0, w_src1);
2009 break;
2010
2011 case IOR:
2012 case XOR:
2013 case AND:
2014 if (CONST_INT_P (w_src1)
2015 && INTVAL (w_src1) == -(code == AND))
2016 continue;
2017
2018 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2019 w_src0, w_src1));
2020 break;
2021
2022 case NOT:
2023 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2024 break;
2025
2026 default:
2027 gcc_unreachable ();
2028 }
2029
2030 firstloop = 0;
2031 emit (insn);
2032 }
2033
2034 /* If we emit nothing, try_split() will think we failed. So emit
2035 something that does nothing and can be optimized away. */
2036 if (firstloop)
2037 emit (gen_nop ());
2038 }
2039
2040 /* The shift operations are split at output time for constant values;
2041 variable-width shifts get handed off to a library routine.
2042
2043 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2044 SIZE_R will be a CONST_INT, X will be a hard register. */
2045
2046 const char *
xstormy16_output_shift(enum machine_mode mode,enum rtx_code code,rtx x,rtx size_r,rtx temp)2047 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2048 rtx x, rtx size_r, rtx temp)
2049 {
2050 HOST_WIDE_INT size;
2051 const char *r0, *r1, *rt;
2052 static char r[64];
2053
2054 gcc_assert (CONST_INT_P (size_r)
2055 && REG_P (x)
2056 && mode == SImode);
2057
2058 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2059
2060 if (size == 0)
2061 return "";
2062
2063 r0 = reg_names [REGNO (x)];
2064 r1 = reg_names [REGNO (x) + 1];
2065
2066 /* For shifts of size 1, we can use the rotate instructions. */
2067 if (size == 1)
2068 {
2069 switch (code)
2070 {
2071 case ASHIFT:
2072 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2073 break;
2074 case ASHIFTRT:
2075 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2076 break;
2077 case LSHIFTRT:
2078 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2079 break;
2080 default:
2081 gcc_unreachable ();
2082 }
2083 return r;
2084 }
2085
2086 /* For large shifts, there are easy special cases. */
2087 if (size == 16)
2088 {
2089 switch (code)
2090 {
2091 case ASHIFT:
2092 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2093 break;
2094 case ASHIFTRT:
2095 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2096 break;
2097 case LSHIFTRT:
2098 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2099 break;
2100 default:
2101 gcc_unreachable ();
2102 }
2103 return r;
2104 }
2105 if (size > 16)
2106 {
2107 switch (code)
2108 {
2109 case ASHIFT:
2110 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2111 r1, r0, r0, r1, (int) size - 16);
2112 break;
2113 case ASHIFTRT:
2114 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2115 r0, r1, r1, r0, (int) size - 16);
2116 break;
2117 case LSHIFTRT:
2118 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2119 r0, r1, r1, r0, (int) size - 16);
2120 break;
2121 default:
2122 gcc_unreachable ();
2123 }
2124 return r;
2125 }
2126
2127 /* For the rest, we have to do more work. In particular, we
2128 need a temporary. */
2129 rt = reg_names [REGNO (temp)];
2130 switch (code)
2131 {
2132 case ASHIFT:
2133 sprintf (r,
2134 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2135 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2136 r1, rt);
2137 break;
2138 case ASHIFTRT:
2139 sprintf (r,
2140 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2141 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2142 r0, rt);
2143 break;
2144 case LSHIFTRT:
2145 sprintf (r,
2146 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2147 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2148 r0, rt);
2149 break;
2150 default:
2151 gcc_unreachable ();
2152 }
2153 return r;
2154 }
2155
2156 /* Attribute handling. */
2157
2158 /* Return nonzero if the function is an interrupt function. */
2159
2160 int
xstormy16_interrupt_function_p(void)2161 xstormy16_interrupt_function_p (void)
2162 {
2163 tree attributes;
2164
2165 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2166 any functions are declared, which is demonstrably wrong, but
2167 it is worked around here. FIXME. */
2168 if (!cfun)
2169 return 0;
2170
2171 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2172 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2173 }
2174
2175 #undef TARGET_ATTRIBUTE_TABLE
2176 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2177
2178 static tree xstormy16_handle_interrupt_attribute
2179 (tree *, tree, tree, int, bool *);
2180 static tree xstormy16_handle_below100_attribute
2181 (tree *, tree, tree, int, bool *);
2182
2183 static const struct attribute_spec xstormy16_attribute_table[] =
2184 {
2185 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2186 affects_type_identity. */
2187 { "interrupt", 0, 0, false, true, true,
2188 xstormy16_handle_interrupt_attribute , false },
2189 { "BELOW100", 0, 0, false, false, false,
2190 xstormy16_handle_below100_attribute, false },
2191 { "below100", 0, 0, false, false, false,
2192 xstormy16_handle_below100_attribute, false },
2193 { NULL, 0, 0, false, false, false, NULL, false }
2194 };
2195
2196 /* Handle an "interrupt" attribute;
2197 arguments as in struct attribute_spec.handler. */
2198
2199 static tree
xstormy16_handle_interrupt_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)2200 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2201 tree args ATTRIBUTE_UNUSED,
2202 int flags ATTRIBUTE_UNUSED,
2203 bool *no_add_attrs)
2204 {
2205 if (TREE_CODE (*node) != FUNCTION_TYPE)
2206 {
2207 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2208 name);
2209 *no_add_attrs = true;
2210 }
2211
2212 return NULL_TREE;
2213 }
2214
2215 /* Handle an "below" attribute;
2216 arguments as in struct attribute_spec.handler. */
2217
2218 static tree
xstormy16_handle_below100_attribute(tree * node,tree name ATTRIBUTE_UNUSED,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)2219 xstormy16_handle_below100_attribute (tree *node,
2220 tree name ATTRIBUTE_UNUSED,
2221 tree args ATTRIBUTE_UNUSED,
2222 int flags ATTRIBUTE_UNUSED,
2223 bool *no_add_attrs)
2224 {
2225 if (TREE_CODE (*node) != VAR_DECL
2226 && TREE_CODE (*node) != POINTER_TYPE
2227 && TREE_CODE (*node) != TYPE_DECL)
2228 {
2229 warning (OPT_Wattributes,
2230 "%<__BELOW100__%> attribute only applies to variables");
2231 *no_add_attrs = true;
2232 }
2233 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2234 {
2235 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2236 {
2237 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2238 "with auto storage class");
2239 *no_add_attrs = true;
2240 }
2241 }
2242
2243 return NULL_TREE;
2244 }
2245
2246 #undef TARGET_INIT_BUILTINS
2247 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2248 #undef TARGET_EXPAND_BUILTIN
2249 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2250
2251 static struct
2252 {
2253 const char * name;
2254 int md_code;
2255 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2256 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2257 }
2258 s16builtins[] =
2259 {
2260 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2261 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2262 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2263 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2264 { NULL, 0, NULL, NULL }
2265 };
2266
2267 static void
xstormy16_init_builtins(void)2268 xstormy16_init_builtins (void)
2269 {
2270 tree args[2], ret_type, arg = NULL_TREE, ftype;
2271 int i, a, n_args;
2272
2273 ret_type = void_type_node;
2274
2275 for (i = 0; s16builtins[i].name; i++)
2276 {
2277 n_args = strlen (s16builtins[i].arg_types) - 1;
2278
2279 gcc_assert (n_args <= (int) ARRAY_SIZE (args));
2280
2281 for (a = n_args - 1; a >= 0; a--)
2282 args[a] = NULL_TREE;
2283
2284 for (a = n_args; a >= 0; a--)
2285 {
2286 switch (s16builtins[i].arg_types[a])
2287 {
2288 case 's': arg = short_integer_type_node; break;
2289 case 'S': arg = short_unsigned_type_node; break;
2290 case 'l': arg = long_integer_type_node; break;
2291 case 'L': arg = long_unsigned_type_node; break;
2292 default: gcc_unreachable ();
2293 }
2294 if (a == 0)
2295 ret_type = arg;
2296 else
2297 args[a-1] = arg;
2298 }
2299 ftype = build_function_type_list (ret_type, args[0], args[1], NULL_TREE);
2300 add_builtin_function (s16builtins[i].name, ftype,
2301 i, BUILT_IN_MD, NULL, NULL_TREE);
2302 }
2303 }
2304
2305 static rtx
xstormy16_expand_builtin(tree exp,rtx target,rtx subtarget ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)2306 xstormy16_expand_builtin (tree exp, rtx target,
2307 rtx subtarget ATTRIBUTE_UNUSED,
2308 enum machine_mode mode ATTRIBUTE_UNUSED,
2309 int ignore ATTRIBUTE_UNUSED)
2310 {
2311 rtx op[10], args[10], pat, copyto[10], retval = 0;
2312 tree fndecl, argtree;
2313 int i, a, o, code;
2314
2315 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2316 argtree = TREE_OPERAND (exp, 1);
2317 i = DECL_FUNCTION_CODE (fndecl);
2318 code = s16builtins[i].md_code;
2319
2320 for (a = 0; a < 10 && argtree; a++)
2321 {
2322 args[a] = expand_normal (TREE_VALUE (argtree));
2323 argtree = TREE_CHAIN (argtree);
2324 }
2325
2326 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2327 {
2328 char ao = s16builtins[i].arg_ops[o];
2329 char c = insn_data[code].operand[o].constraint[0];
2330 enum machine_mode omode;
2331
2332 copyto[o] = 0;
2333
2334 omode = (enum machine_mode) insn_data[code].operand[o].mode;
2335 if (ao == 'r')
2336 op[o] = target ? target : gen_reg_rtx (omode);
2337 else if (ao == 't')
2338 op[o] = gen_reg_rtx (omode);
2339 else
2340 op[o] = args[(int) hex_value (ao)];
2341
2342 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2343 {
2344 if (c == '+' || c == '=')
2345 {
2346 copyto[o] = op[o];
2347 op[o] = gen_reg_rtx (omode);
2348 }
2349 else
2350 op[o] = copy_to_mode_reg (omode, op[o]);
2351 }
2352
2353 if (ao == 'r')
2354 retval = op[o];
2355 }
2356
2357 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2358 op[5], op[6], op[7], op[8], op[9]);
2359 emit_insn (pat);
2360
2361 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2362 if (copyto[o])
2363 {
2364 emit_move_insn (copyto[o], op[o]);
2365 if (op[o] == retval)
2366 retval = copyto[o];
2367 }
2368
2369 return retval;
2370 }
2371
2372 /* Look for combinations of insns that can be converted to BN or BP
2373 opcodes. This is, unfortunately, too complex to do with MD
2374 patterns. */
2375
2376 static void
combine_bnp(rtx insn)2377 combine_bnp (rtx insn)
2378 {
2379 int insn_code, regno, need_extend;
2380 unsigned int mask;
2381 rtx cond, reg, and_insn, load, qireg, mem;
2382 enum machine_mode load_mode = QImode;
2383 enum machine_mode and_mode = QImode;
2384 rtx shift = NULL_RTX;
2385
2386 insn_code = recog_memoized (insn);
2387 if (insn_code != CODE_FOR_cbranchhi
2388 && insn_code != CODE_FOR_cbranchhi_neg)
2389 return;
2390
2391 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2392 cond = XEXP (cond, 1); /* if */
2393 cond = XEXP (cond, 0); /* cond */
2394 switch (GET_CODE (cond))
2395 {
2396 case NE:
2397 case EQ:
2398 need_extend = 0;
2399 break;
2400 case LT:
2401 case GE:
2402 need_extend = 1;
2403 break;
2404 default:
2405 return;
2406 }
2407
2408 reg = XEXP (cond, 0);
2409 if (! REG_P (reg))
2410 return;
2411 regno = REGNO (reg);
2412 if (XEXP (cond, 1) != const0_rtx)
2413 return;
2414 if (! find_regno_note (insn, REG_DEAD, regno))
2415 return;
2416 qireg = gen_rtx_REG (QImode, regno);
2417
2418 if (need_extend)
2419 {
2420 /* LT and GE conditionals should have a sign extend before
2421 them. */
2422 for (and_insn = prev_real_insn (insn);
2423 and_insn != NULL_RTX;
2424 and_insn = prev_real_insn (and_insn))
2425 {
2426 int and_code = recog_memoized (and_insn);
2427
2428 if (and_code == CODE_FOR_extendqihi2
2429 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2430 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg))
2431 break;
2432
2433 if (and_code == CODE_FOR_movhi_internal
2434 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg))
2435 {
2436 /* This is for testing bit 15. */
2437 and_insn = insn;
2438 break;
2439 }
2440
2441 if (reg_mentioned_p (reg, and_insn))
2442 return;
2443
2444 if (GET_CODE (and_insn) != NOTE
2445 && GET_CODE (and_insn) != INSN)
2446 return;
2447 }
2448 }
2449 else
2450 {
2451 /* EQ and NE conditionals have an AND before them. */
2452 for (and_insn = prev_real_insn (insn);
2453 and_insn != NULL_RTX;
2454 and_insn = prev_real_insn (and_insn))
2455 {
2456 if (recog_memoized (and_insn) == CODE_FOR_andhi3
2457 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2458 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg))
2459 break;
2460
2461 if (reg_mentioned_p (reg, and_insn))
2462 return;
2463
2464 if (GET_CODE (and_insn) != NOTE
2465 && GET_CODE (and_insn) != INSN)
2466 return;
2467 }
2468
2469 if (and_insn)
2470 {
2471 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2472 followed by an AND like this:
2473
2474 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2475 (clobber (reg:BI carry))]
2476
2477 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2478
2479 Attempt to detect this here. */
2480 for (shift = prev_real_insn (and_insn); shift;
2481 shift = prev_real_insn (shift))
2482 {
2483 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2484 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2485 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2486 break;
2487
2488 if (reg_mentioned_p (reg, shift)
2489 || (GET_CODE (shift) != NOTE
2490 && GET_CODE (shift) != INSN))
2491 {
2492 shift = NULL_RTX;
2493 break;
2494 }
2495 }
2496 }
2497 }
2498
2499 if (and_insn == NULL_RTX)
2500 return;
2501
2502 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn);
2503 load;
2504 load = prev_real_insn (load))
2505 {
2506 int load_code = recog_memoized (load);
2507
2508 if (load_code == CODE_FOR_movhi_internal
2509 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2510 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2511 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2512 {
2513 load_mode = HImode;
2514 break;
2515 }
2516
2517 if (load_code == CODE_FOR_movqi_internal
2518 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2519 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2520 {
2521 load_mode = QImode;
2522 break;
2523 }
2524
2525 if (load_code == CODE_FOR_zero_extendqihi2
2526 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2527 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2528 {
2529 load_mode = QImode;
2530 and_mode = HImode;
2531 break;
2532 }
2533
2534 if (reg_mentioned_p (reg, load))
2535 return;
2536
2537 if (GET_CODE (load) != NOTE
2538 && GET_CODE (load) != INSN)
2539 return;
2540 }
2541 if (!load)
2542 return;
2543
2544 mem = SET_SRC (PATTERN (load));
2545
2546 if (need_extend)
2547 {
2548 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2549
2550 /* If the mem includes a zero-extend operation and we are
2551 going to generate a sign-extend operation then move the
2552 mem inside the zero-extend. */
2553 if (GET_CODE (mem) == ZERO_EXTEND)
2554 mem = XEXP (mem, 0);
2555 }
2556 else
2557 {
2558 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1),
2559 load_mode))
2560 return;
2561
2562 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1));
2563
2564 if (shift)
2565 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2566 }
2567
2568 if (load_mode == HImode)
2569 {
2570 rtx addr = XEXP (mem, 0);
2571
2572 if (! (mask & 0xff))
2573 {
2574 addr = plus_constant (Pmode, addr, 1);
2575 mask >>= 8;
2576 }
2577 mem = gen_rtx_MEM (QImode, addr);
2578 }
2579
2580 if (need_extend)
2581 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2582 else
2583 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2584
2585 INSN_CODE (insn) = -1;
2586 delete_insn (load);
2587
2588 if (and_insn != insn)
2589 delete_insn (and_insn);
2590
2591 if (shift != NULL_RTX)
2592 delete_insn (shift);
2593 }
2594
2595 static void
xstormy16_reorg(void)2596 xstormy16_reorg (void)
2597 {
2598 rtx insn;
2599
2600 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2601 {
2602 if (! JUMP_P (insn))
2603 continue;
2604 combine_bnp (insn);
2605 }
2606 }
2607
2608 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2609
2610 static bool
xstormy16_return_in_memory(const_tree type,const_tree fntype ATTRIBUTE_UNUSED)2611 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2612 {
2613 const HOST_WIDE_INT size = int_size_in_bytes (type);
2614 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2615 }
2616
2617 #undef TARGET_ASM_ALIGNED_HI_OP
2618 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2619 #undef TARGET_ASM_ALIGNED_SI_OP
2620 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2621 #undef TARGET_ENCODE_SECTION_INFO
2622 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2623
2624 /* Select_section doesn't handle .bss_below100. */
2625 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2626 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2627
2628 #undef TARGET_ASM_OUTPUT_MI_THUNK
2629 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2630 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2631 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2632
2633 #undef TARGET_PRINT_OPERAND
2634 #define TARGET_PRINT_OPERAND xstormy16_print_operand
2635 #undef TARGET_PRINT_OPERAND_ADDRESS
2636 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2637
2638 #undef TARGET_MEMORY_MOVE_COST
2639 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
2640 #undef TARGET_RTX_COSTS
2641 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2642 #undef TARGET_ADDRESS_COST
2643 #define TARGET_ADDRESS_COST xstormy16_address_cost
2644
2645 #undef TARGET_BUILD_BUILTIN_VA_LIST
2646 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2647 #undef TARGET_EXPAND_BUILTIN_VA_START
2648 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2649 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2650 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2651
2652 #undef TARGET_PROMOTE_FUNCTION_MODE
2653 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2654 #undef TARGET_PROMOTE_PROTOTYPES
2655 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2656
2657 #undef TARGET_FUNCTION_ARG
2658 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2659 #undef TARGET_FUNCTION_ARG_ADVANCE
2660 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2661
2662 #undef TARGET_RETURN_IN_MEMORY
2663 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2664 #undef TARGET_FUNCTION_VALUE
2665 #define TARGET_FUNCTION_VALUE xstormy16_function_value
2666 #undef TARGET_LIBCALL_VALUE
2667 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2668 #undef TARGET_FUNCTION_VALUE_REGNO_P
2669 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2670
2671 #undef TARGET_MACHINE_DEPENDENT_REORG
2672 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2673
2674 #undef TARGET_PREFERRED_RELOAD_CLASS
2675 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2676 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2677 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2678
2679 #undef TARGET_LEGITIMATE_ADDRESS_P
2680 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2681 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2682 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2683
2684 #undef TARGET_CAN_ELIMINATE
2685 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2686
2687 #undef TARGET_TRAMPOLINE_INIT
2688 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2689
2690 struct gcc_target targetm = TARGET_INITIALIZER;
2691
2692 #include "gt-stormy16.h"
2693