1 /* Subroutines used for code generation on Renesas RX processors.
2 Copyright (C) 2008-2020 Free Software Foundation, Inc.
3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* To Do:
22
23 * Re-enable memory-to-memory copies and fix up reload. */
24
25 #define IN_TARGET_CODE 1
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "backend.h"
31 #include "target.h"
32 #include "rtl.h"
33 #include "tree.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "cfghooks.h"
37 #include "df.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "regs.h"
41 #include "emit-rtl.h"
42 #include "diagnostic-core.h"
43 #include "varasm.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "output.h"
47 #include "flags.h"
48 #include "explow.h"
49 #include "expr.h"
50 #include "toplev.h"
51 #include "langhooks.h"
52 #include "opts.h"
53 #include "builtins.h"
54
55 /* This file should be included last. */
56 #include "target-def.h"
57
58 static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
59 static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
60 static unsigned int rx_num_interrupt_regs;
61
62 static unsigned int
rx_gp_base_regnum(void)63 rx_gp_base_regnum (void)
64 {
65 if (rx_gp_base_regnum_val == INVALID_REGNUM)
66 gcc_unreachable ();
67 return rx_gp_base_regnum_val;
68 }
69
70 static unsigned int
rx_pid_base_regnum(void)71 rx_pid_base_regnum (void)
72 {
73 if (rx_pid_base_regnum_val == INVALID_REGNUM)
74 gcc_unreachable ();
75 return rx_pid_base_regnum_val;
76 }
77
78 /* Find a SYMBOL_REF in a "standard" MEM address and return its decl. */
79
80 static tree
rx_decl_for_addr(rtx op)81 rx_decl_for_addr (rtx op)
82 {
83 if (GET_CODE (op) == MEM)
84 op = XEXP (op, 0);
85 if (GET_CODE (op) == CONST)
86 op = XEXP (op, 0);
87 while (GET_CODE (op) == PLUS)
88 op = XEXP (op, 0);
89 if (GET_CODE (op) == SYMBOL_REF)
90 return SYMBOL_REF_DECL (op);
91 return NULL_TREE;
92 }
93
94 static void rx_print_operand (FILE *, rtx, int);
95
96 #define CC_FLAG_S (1 << 0)
97 #define CC_FLAG_Z (1 << 1)
98 #define CC_FLAG_O (1 << 2)
99 #define CC_FLAG_C (1 << 3)
100 #define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
101
102 static unsigned int flags_from_mode (machine_mode mode);
103 static unsigned int flags_from_code (enum rtx_code code);
104
105 /* Return true if OP is a reference to an object in a PID data area. */
106
107 enum pid_type
108 {
109 PID_NOT_PID = 0, /* The object is not in the PID data area. */
110 PID_ENCODED, /* The object is in the PID data area. */
111 PID_UNENCODED /* The object will be placed in the PID data area, but it has not been placed there yet. */
112 };
113
114 static enum pid_type
rx_pid_data_operand(rtx op)115 rx_pid_data_operand (rtx op)
116 {
117 tree op_decl;
118
119 if (!TARGET_PID)
120 return PID_NOT_PID;
121
122 if (GET_CODE (op) == PLUS
123 && GET_CODE (XEXP (op, 0)) == REG
124 && GET_CODE (XEXP (op, 1)) == CONST
125 && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
126 return PID_ENCODED;
127
128 op_decl = rx_decl_for_addr (op);
129
130 if (op_decl)
131 {
132 if (TREE_READONLY (op_decl))
133 return PID_UNENCODED;
134 }
135 else
136 {
137 /* Sigh, some special cases. */
138 if (GET_CODE (op) == SYMBOL_REF
139 || GET_CODE (op) == LABEL_REF)
140 return PID_UNENCODED;
141 }
142
143 return PID_NOT_PID;
144 }
145
146 static rtx
rx_legitimize_address(rtx x,rtx oldx ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED)147 rx_legitimize_address (rtx x,
148 rtx oldx ATTRIBUTE_UNUSED,
149 machine_mode mode ATTRIBUTE_UNUSED)
150 {
151 if (rx_pid_data_operand (x) == PID_UNENCODED)
152 {
153 rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
154 return rv;
155 }
156
157 if (GET_CODE (x) == PLUS
158 && GET_CODE (XEXP (x, 0)) == PLUS
159 && REG_P (XEXP (XEXP (x, 0), 0))
160 && REG_P (XEXP (x, 1)))
161 return force_reg (SImode, x);
162
163 return x;
164 }
165
166 /* Return true if OP is a reference to an object in a small data area. */
167
168 static bool
rx_small_data_operand(rtx op)169 rx_small_data_operand (rtx op)
170 {
171 if (rx_small_data_limit == 0)
172 return false;
173
174 if (GET_CODE (op) == SYMBOL_REF)
175 return SYMBOL_REF_SMALL_P (op);
176
177 return false;
178 }
179
180 static bool
rx_is_legitimate_address(machine_mode mode,rtx x,bool strict ATTRIBUTE_UNUSED)181 rx_is_legitimate_address (machine_mode mode, rtx x,
182 bool strict ATTRIBUTE_UNUSED)
183 {
184 if (RTX_OK_FOR_BASE (x, strict))
185 /* Register Indirect. */
186 return true;
187
188 if ((GET_MODE_SIZE (mode) == 4
189 || GET_MODE_SIZE (mode) == 2
190 || GET_MODE_SIZE (mode) == 1)
191 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
192 /* Pre-decrement Register Indirect or
193 Post-increment Register Indirect. */
194 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
195
196 switch (rx_pid_data_operand (x))
197 {
198 case PID_UNENCODED:
199 return false;
200 case PID_ENCODED:
201 return true;
202 default:
203 break;
204 }
205
206 if (GET_CODE (x) == PLUS)
207 {
208 rtx arg1 = XEXP (x, 0);
209 rtx arg2 = XEXP (x, 1);
210 rtx index = NULL_RTX;
211
212 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
213 index = arg2;
214 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
215 index = arg1;
216 else
217 return false;
218
219 switch (GET_CODE (index))
220 {
221 case CONST_INT:
222 {
223 /* Register Relative: REG + INT.
224 Only positive, mode-aligned, mode-sized
225 displacements are allowed. */
226 HOST_WIDE_INT val = INTVAL (index);
227 int factor;
228
229 if (val < 0)
230 return false;
231
232 switch (GET_MODE_SIZE (mode))
233 {
234 default:
235 case 4: factor = 4; break;
236 case 2: factor = 2; break;
237 case 1: factor = 1; break;
238 }
239
240 if (val > (65535 * factor))
241 return false;
242 return (val % factor) == 0;
243 }
244
245 case REG:
246 /* Unscaled Indexed Register Indirect: REG + REG
247 Size has to be "QI", REG has to be valid. */
248 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
249
250 case MULT:
251 {
252 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
253 Factor has to equal the mode size, REG has to be valid. */
254 rtx factor;
255
256 factor = XEXP (index, 1);
257 index = XEXP (index, 0);
258
259 return REG_P (index)
260 && RTX_OK_FOR_BASE (index, strict)
261 && CONST_INT_P (factor)
262 && GET_MODE_SIZE (mode) == INTVAL (factor);
263 }
264
265 default:
266 return false;
267 }
268 }
269
270 /* Small data area accesses turn into register relative offsets. */
271 return rx_small_data_operand (x);
272 }
273
274 /* Returns TRUE for simple memory addresses, ie ones
275 that do not involve register indirect addressing
276 or pre/post increment/decrement. */
277
278 bool
rx_is_restricted_memory_address(rtx mem,machine_mode mode)279 rx_is_restricted_memory_address (rtx mem, machine_mode mode)
280 {
281 if (! rx_is_legitimate_address
282 (mode, mem, reload_in_progress || reload_completed))
283 return false;
284
285 switch (GET_CODE (mem))
286 {
287 case REG:
288 /* Simple memory addresses are OK. */
289 return true;
290
291 case SUBREG:
292 return RX_REG_P (SUBREG_REG (mem));
293
294 case PRE_DEC:
295 case POST_INC:
296 return false;
297
298 case PLUS:
299 {
300 rtx base, index;
301
302 /* Only allow REG+INT addressing. */
303 base = XEXP (mem, 0);
304 index = XEXP (mem, 1);
305
306 if (! RX_REG_P (base) || ! CONST_INT_P (index))
307 return false;
308
309 return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
310 }
311
312 case SYMBOL_REF:
313 /* Can happen when small data is being supported.
314 Assume that it will be resolved into GP+INT. */
315 return true;
316
317 default:
318 gcc_unreachable ();
319 }
320 }
321
322 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
323
324 static bool
rx_mode_dependent_address_p(const_rtx addr,addr_space_t as ATTRIBUTE_UNUSED)325 rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
326 {
327 if (GET_CODE (addr) == CONST)
328 addr = XEXP (addr, 0);
329
330 switch (GET_CODE (addr))
331 {
332 /* --REG and REG++ only work in SImode. */
333 case PRE_DEC:
334 case POST_INC:
335 return true;
336
337 case MINUS:
338 case PLUS:
339 if (! REG_P (XEXP (addr, 0)))
340 return true;
341
342 addr = XEXP (addr, 1);
343
344 switch (GET_CODE (addr))
345 {
346 case REG:
347 /* REG+REG only works in SImode. */
348 return true;
349
350 case CONST_INT:
351 /* REG+INT is only mode independent if INT is a
352 multiple of 4, positive and will fit into 16-bits. */
353 if (((INTVAL (addr) & 3) == 0)
354 && IN_RANGE (INTVAL (addr), 4, 0xfffc))
355 return false;
356 return true;
357
358 case SYMBOL_REF:
359 case LABEL_REF:
360 return true;
361
362 case MULT:
363 /* REG+REG*SCALE is always mode dependent. */
364 return true;
365
366 default:
367 /* Not recognized, so treat as mode dependent. */
368 return true;
369 }
370
371 case CONST_INT:
372 case SYMBOL_REF:
373 case LABEL_REF:
374 case REG:
375 /* These are all mode independent. */
376 return false;
377
378 default:
379 /* Everything else is unrecognized,
380 so treat as mode dependent. */
381 return true;
382 }
383 }
384
385 /* A C compound statement to output to stdio stream FILE the
386 assembler syntax for an instruction operand that is a memory
387 reference whose address is ADDR. */
388
389 static void
rx_print_operand_address(FILE * file,machine_mode,rtx addr)390 rx_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
391 {
392 switch (GET_CODE (addr))
393 {
394 case REG:
395 fprintf (file, "[");
396 rx_print_operand (file, addr, 0);
397 fprintf (file, "]");
398 break;
399
400 case PRE_DEC:
401 fprintf (file, "[-");
402 rx_print_operand (file, XEXP (addr, 0), 0);
403 fprintf (file, "]");
404 break;
405
406 case POST_INC:
407 fprintf (file, "[");
408 rx_print_operand (file, XEXP (addr, 0), 0);
409 fprintf (file, "+]");
410 break;
411
412 case PLUS:
413 {
414 rtx arg1 = XEXP (addr, 0);
415 rtx arg2 = XEXP (addr, 1);
416 rtx base, index;
417
418 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
419 base = arg1, index = arg2;
420 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
421 base = arg2, index = arg1;
422 else
423 {
424 rx_print_operand (file, arg1, 0);
425 fprintf (file, " + ");
426 rx_print_operand (file, arg2, 0);
427 break;
428 }
429
430 if (REG_P (index) || GET_CODE (index) == MULT)
431 {
432 fprintf (file, "[");
433 rx_print_operand (file, index, 'A');
434 fprintf (file, ",");
435 }
436 else /* GET_CODE (index) == CONST_INT */
437 {
438 rx_print_operand (file, index, 'A');
439 fprintf (file, "[");
440 }
441 rx_print_operand (file, base, 0);
442 fprintf (file, "]");
443 break;
444 }
445
446 case CONST:
447 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
448 {
449 addr = XEXP (addr, 0);
450 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
451
452 addr = XVECEXP (addr, 0, 0);
453 gcc_assert (CONST_INT_P (addr));
454 fprintf (file, "#");
455 output_addr_const (file, addr);
456 break;
457 }
458 fprintf (file, "#");
459 output_addr_const (file, XEXP (addr, 0));
460 break;
461
462 case UNSPEC:
463 addr = XVECEXP (addr, 0, 0);
464 /* Fall through. */
465 case LABEL_REF:
466 case SYMBOL_REF:
467 fprintf (file, "#");
468 /* Fall through. */
469 default:
470 output_addr_const (file, addr);
471 break;
472 }
473 }
474
475 static void
rx_print_integer(FILE * file,HOST_WIDE_INT val)476 rx_print_integer (FILE * file, HOST_WIDE_INT val)
477 {
478 if (val < 64)
479 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
480 else
481 fprintf (file,
482 TARGET_AS100_SYNTAX
483 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
484 val);
485 }
486
487 static bool
rx_assemble_integer(rtx x,unsigned int size,int is_aligned)488 rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
489 {
490 const char * op = integer_asm_op (size, is_aligned);
491
492 if (! CONST_INT_P (x))
493 return default_assemble_integer (x, size, is_aligned);
494
495 if (op == NULL)
496 return false;
497 fputs (op, asm_out_file);
498
499 rx_print_integer (asm_out_file, INTVAL (x));
500 fputc ('\n', asm_out_file);
501 return true;
502 }
503
504
505 /* Handles the insertion of a single operand into the assembler output.
506 The %<letter> directives supported are:
507
508 %A Print an operand without a leading # character.
509 %B Print an integer comparison name.
510 %C Print a control register name.
511 %F Print a condition code flag name.
512 %G Register used for small-data-area addressing
513 %H Print high part of a DImode register, integer or address.
514 %L Print low part of a DImode register, integer or address.
515 %N Print the negation of the immediate value.
516 %P Register used for PID addressing
517 %Q If the operand is a MEM, then correctly generate
518 register indirect or register relative addressing.
519 %R Like %Q but for zero-extending loads. */
520
521 static void
rx_print_operand(FILE * file,rtx op,int letter)522 rx_print_operand (FILE * file, rtx op, int letter)
523 {
524 bool unsigned_load = false;
525 bool print_hash = true;
526
527 if (letter == 'A'
528 && ((GET_CODE (op) == CONST
529 && GET_CODE (XEXP (op, 0)) == UNSPEC)
530 || GET_CODE (op) == UNSPEC))
531 {
532 print_hash = false;
533 letter = 0;
534 }
535
536 switch (letter)
537 {
538 case 'A':
539 /* Print an operand without a leading #. */
540 if (MEM_P (op))
541 op = XEXP (op, 0);
542
543 switch (GET_CODE (op))
544 {
545 case LABEL_REF:
546 case SYMBOL_REF:
547 output_addr_const (file, op);
548 break;
549 case CONST_INT:
550 fprintf (file, "%ld", (long) INTVAL (op));
551 break;
552 default:
553 rx_print_operand (file, op, 0);
554 break;
555 }
556 break;
557
558 case 'B':
559 {
560 enum rtx_code code = GET_CODE (op);
561 machine_mode mode = GET_MODE (XEXP (op, 0));
562 const char *ret;
563
564 if (mode == CC_Fmode)
565 {
566 /* C flag is undefined, and O flag carries unordered. None of the
567 branch combinations that include O use it helpfully. */
568 switch (code)
569 {
570 case ORDERED:
571 ret = "no";
572 break;
573 case UNORDERED:
574 ret = "o";
575 break;
576 case LT:
577 ret = "n";
578 break;
579 case GE:
580 ret = "pz";
581 break;
582 case EQ:
583 ret = "eq";
584 break;
585 case NE:
586 ret = "ne";
587 break;
588 default:
589 gcc_unreachable ();
590 }
591 }
592 else
593 {
594 unsigned int flags = flags_from_mode (mode);
595
596 switch (code)
597 {
598 case LT:
599 ret = (flags & CC_FLAG_O ? "lt" : "n");
600 break;
601 case GE:
602 ret = (flags & CC_FLAG_O ? "ge" : "pz");
603 break;
604 case GT:
605 ret = "gt";
606 break;
607 case LE:
608 ret = "le";
609 break;
610 case GEU:
611 ret = "geu";
612 break;
613 case LTU:
614 ret = "ltu";
615 break;
616 case GTU:
617 ret = "gtu";
618 break;
619 case LEU:
620 ret = "leu";
621 break;
622 case EQ:
623 ret = "eq";
624 break;
625 case NE:
626 ret = "ne";
627 break;
628 default:
629 gcc_unreachable ();
630 }
631 gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
632 }
633 fputs (ret, file);
634 break;
635 }
636
637 case 'C':
638 gcc_assert (CONST_INT_P (op));
639 switch (INTVAL (op))
640 {
641 case CTRLREG_PSW: fprintf (file, "psw"); break;
642 case CTRLREG_USP: fprintf (file, "usp"); break;
643 case CTRLREG_FPSW: fprintf (file, "fpsw"); break;
644 case CTRLREG_BPSW: fprintf (file, "bpsw"); break;
645 case CTRLREG_BPC: fprintf (file, "bpc"); break;
646 case CTRLREG_ISP: fprintf (file, "isp"); break;
647 case CTRLREG_FINTV: fprintf (file, "fintv"); break;
648 case CTRLREG_INTB: fprintf (file, "intb"); break;
649 default:
650 warning (0, "unrecognized control register number: %d"
651 " - using %<psw%>", (int) INTVAL (op));
652 fprintf (file, "psw");
653 break;
654 }
655 break;
656
657 case 'F':
658 gcc_assert (CONST_INT_P (op));
659 switch (INTVAL (op))
660 {
661 case 0: case 'c': case 'C': fprintf (file, "C"); break;
662 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
663 case 2: case 's': case 'S': fprintf (file, "S"); break;
664 case 3: case 'o': case 'O': fprintf (file, "O"); break;
665 case 8: case 'i': case 'I': fprintf (file, "I"); break;
666 case 9: case 'u': case 'U': fprintf (file, "U"); break;
667 default:
668 gcc_unreachable ();
669 }
670 break;
671
672 case 'G':
673 fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
674 break;
675
676 case 'H':
677 switch (GET_CODE (op))
678 {
679 case REG:
680 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
681 break;
682 case CONST_INT:
683 {
684 HOST_WIDE_INT v = INTVAL (op);
685
686 fprintf (file, "#");
687 /* Trickery to avoid problems with shifting 32 bits at a time. */
688 v = v >> 16;
689 v = v >> 16;
690 rx_print_integer (file, v);
691 break;
692 }
693 case CONST_DOUBLE:
694 fprintf (file, "#");
695 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
696 break;
697 case MEM:
698 if (! WORDS_BIG_ENDIAN)
699 op = adjust_address (op, SImode, 4);
700 output_address (GET_MODE (op), XEXP (op, 0));
701 break;
702 default:
703 gcc_unreachable ();
704 }
705 break;
706
707 case 'L':
708 switch (GET_CODE (op))
709 {
710 case REG:
711 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
712 break;
713 case CONST_INT:
714 fprintf (file, "#");
715 rx_print_integer (file, INTVAL (op) & 0xffffffff);
716 break;
717 case CONST_DOUBLE:
718 fprintf (file, "#");
719 rx_print_integer (file, CONST_DOUBLE_LOW (op));
720 break;
721 case MEM:
722 if (WORDS_BIG_ENDIAN)
723 op = adjust_address (op, SImode, 4);
724 output_address (GET_MODE (op), XEXP (op, 0));
725 break;
726 default:
727 gcc_unreachable ();
728 }
729 break;
730
731 case 'N':
732 gcc_assert (CONST_INT_P (op));
733 fprintf (file, "#");
734 rx_print_integer (file, - INTVAL (op));
735 break;
736
737 case 'P':
738 fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
739 break;
740
741 case 'R':
742 gcc_assert (GET_MODE_SIZE (GET_MODE (op)) <= 4);
743 unsigned_load = true;
744 /* Fall through. */
745 case 'Q':
746 if (MEM_P (op))
747 {
748 HOST_WIDE_INT offset;
749 rtx mem = op;
750
751 op = XEXP (op, 0);
752
753 if (REG_P (op))
754 offset = 0;
755 else if (GET_CODE (op) == PLUS)
756 {
757 rtx displacement;
758
759 if (REG_P (XEXP (op, 0)))
760 {
761 displacement = XEXP (op, 1);
762 op = XEXP (op, 0);
763 }
764 else
765 {
766 displacement = XEXP (op, 0);
767 op = XEXP (op, 1);
768 gcc_assert (REG_P (op));
769 }
770
771 gcc_assert (CONST_INT_P (displacement));
772 offset = INTVAL (displacement);
773 gcc_assert (offset >= 0);
774
775 fprintf (file, "%ld", offset);
776 }
777 else
778 gcc_unreachable ();
779
780 fprintf (file, "[");
781 rx_print_operand (file, op, 0);
782 fprintf (file, "].");
783
784 switch (GET_MODE_SIZE (GET_MODE (mem)))
785 {
786 case 1:
787 gcc_assert (offset <= 65535 * 1);
788 fprintf (file, unsigned_load ? "UB" : "B");
789 break;
790 case 2:
791 gcc_assert (offset % 2 == 0);
792 gcc_assert (offset <= 65535 * 2);
793 fprintf (file, unsigned_load ? "UW" : "W");
794 break;
795 case 4:
796 gcc_assert (offset % 4 == 0);
797 gcc_assert (offset <= 65535 * 4);
798 fprintf (file, "L");
799 break;
800 default:
801 gcc_unreachable ();
802 }
803 break;
804 }
805
806 /* Fall through. */
807
808 default:
809 if (GET_CODE (op) == CONST
810 && GET_CODE (XEXP (op, 0)) == UNSPEC)
811 op = XEXP (op, 0);
812 else if (GET_CODE (op) == CONST
813 && GET_CODE (XEXP (op, 0)) == PLUS
814 && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
815 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
816 {
817 if (print_hash)
818 fprintf (file, "#");
819 fprintf (file, "(");
820 rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
821 fprintf (file, " + ");
822 output_addr_const (file, XEXP (XEXP (op, 0), 1));
823 fprintf (file, ")");
824 return;
825 }
826
827 switch (GET_CODE (op))
828 {
829 case MULT:
830 /* Should be the scaled part of an
831 indexed register indirect address. */
832 {
833 rtx base = XEXP (op, 0);
834 rtx index = XEXP (op, 1);
835
836 /* Check for a swaped index register and scaling factor.
837 Not sure if this can happen, but be prepared to handle it. */
838 if (CONST_INT_P (base) && REG_P (index))
839 {
840 rtx tmp = base;
841 base = index;
842 index = tmp;
843 }
844
845 gcc_assert (REG_P (base));
846 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
847 gcc_assert (CONST_INT_P (index));
848 /* Do not try to verify the value of the scalar as it is based
849 on the mode of the MEM not the mode of the MULT. (Which
850 will always be SImode). */
851 fprintf (file, "%s", reg_names [REGNO (base)]);
852 break;
853 }
854
855 case MEM:
856 output_address (GET_MODE (op), XEXP (op, 0));
857 break;
858
859 case PLUS:
860 output_address (VOIDmode, op);
861 break;
862
863 case REG:
864 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
865 fprintf (file, "%s", reg_names [REGNO (op)]);
866 break;
867
868 case SUBREG:
869 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
870 fprintf (file, "%s", reg_names [subreg_regno (op)]);
871 break;
872
873 /* This will only be single precision.... */
874 case CONST_DOUBLE:
875 {
876 unsigned long val;
877
878 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (op), val);
879 if (print_hash)
880 fprintf (file, "#");
881 fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
882 break;
883 }
884
885 case CONST_INT:
886 if (print_hash)
887 fprintf (file, "#");
888 rx_print_integer (file, INTVAL (op));
889 break;
890
891 case UNSPEC:
892 switch (XINT (op, 1))
893 {
894 case UNSPEC_PID_ADDR:
895 {
896 rtx sym, add;
897
898 if (print_hash)
899 fprintf (file, "#");
900 sym = XVECEXP (op, 0, 0);
901 add = NULL_RTX;
902 fprintf (file, "(");
903 if (GET_CODE (sym) == PLUS)
904 {
905 add = XEXP (sym, 1);
906 sym = XEXP (sym, 0);
907 }
908 output_addr_const (file, sym);
909 if (add != NULL_RTX)
910 {
911 fprintf (file, "+");
912 output_addr_const (file, add);
913 }
914 fprintf (file, "-__pid_base");
915 fprintf (file, ")");
916 return;
917 }
918 }
919 /* Fall through */
920
921 case CONST:
922 case SYMBOL_REF:
923 case LABEL_REF:
924 case CODE_LABEL:
925 rx_print_operand_address (file, VOIDmode, op);
926 break;
927
928 default:
929 gcc_unreachable ();
930 }
931 break;
932 }
933 }
934
935 /* Maybe convert an operand into its PID format. */
936
937 rtx
rx_maybe_pidify_operand(rtx op,int copy_to_reg)938 rx_maybe_pidify_operand (rtx op, int copy_to_reg)
939 {
940 if (rx_pid_data_operand (op) == PID_UNENCODED)
941 {
942 if (GET_CODE (op) == MEM)
943 {
944 rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
945 op = replace_equiv_address (op, a);
946 }
947 else
948 {
949 op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
950 }
951
952 if (copy_to_reg)
953 op = copy_to_mode_reg (GET_MODE (op), op);
954 }
955 return op;
956 }
957
958 /* Returns an assembler template for a move instruction. */
959
960 char *
rx_gen_move_template(rtx * operands,bool is_movu)961 rx_gen_move_template (rtx * operands, bool is_movu)
962 {
963 static char out_template [64];
964 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
965 const char * src_template;
966 const char * dst_template;
967 rtx dest = operands[0];
968 rtx src = operands[1];
969
970 /* Decide which extension, if any, should be given to the move instruction. */
971 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
972 {
973 case E_QImode:
974 /* The .B extension is not valid when
975 loading an immediate into a register. */
976 if (! REG_P (dest) || ! CONST_INT_P (src))
977 extension = ".B";
978 break;
979 case E_HImode:
980 if (! REG_P (dest) || ! CONST_INT_P (src))
981 /* The .W extension is not valid when
982 loading an immediate into a register. */
983 extension = ".W";
984 break;
985 case E_DFmode:
986 case E_DImode:
987 case E_SFmode:
988 case E_SImode:
989 extension = ".L";
990 break;
991 case E_VOIDmode:
992 /* This mode is used by constants. */
993 break;
994 default:
995 debug_rtx (src);
996 gcc_unreachable ();
997 }
998
999 if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
1000 {
1001 gcc_assert (GET_MODE (src) != DImode);
1002 gcc_assert (GET_MODE (src) != DFmode);
1003
1004 src_template = "(%A1 - __pid_base)[%P1]";
1005 }
1006 else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
1007 {
1008 gcc_assert (GET_MODE (src) != DImode);
1009 gcc_assert (GET_MODE (src) != DFmode);
1010
1011 src_template = "%%gp(%A1)[%G1]";
1012 }
1013 else
1014 src_template = "%1";
1015
1016 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
1017 {
1018 gcc_assert (GET_MODE (dest) != DImode);
1019 gcc_assert (GET_MODE (dest) != DFmode);
1020
1021 dst_template = "%%gp(%A0)[%G0]";
1022 }
1023 else
1024 dst_template = "%0";
1025
1026 if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
1027 {
1028 gcc_assert (! is_movu);
1029
1030 if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
1031 sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
1032 else
1033 sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
1034 }
1035 else
1036 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1037 extension, src_template, dst_template);
1038 return out_template;
1039 }
1040
1041 /* Return VALUE rounded up to the next ALIGNMENT boundary. */
1042
1043 static inline unsigned int
rx_round_up(unsigned int value,unsigned int alignment)1044 rx_round_up (unsigned int value, unsigned int alignment)
1045 {
1046 alignment -= 1;
1047 return (value + alignment) & (~ alignment);
1048 }
1049
1050 /* Return the number of bytes in the argument registers
1051 occupied by an argument of type TYPE and mode MODE. */
1052
1053 static unsigned int
rx_function_arg_size(machine_mode mode,const_tree type)1054 rx_function_arg_size (machine_mode mode, const_tree type)
1055 {
1056 unsigned int num_bytes;
1057
1058 num_bytes = (mode == BLKmode)
1059 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1060 return rx_round_up (num_bytes, UNITS_PER_WORD);
1061 }
1062
1063 #define NUM_ARG_REGS 4
1064 #define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
1065
1066 /* Return an RTL expression describing the register holding function
1067 argument ARG or NULL_RTX if the parameter should be passed on the
1068 stack. CUM describes the previous parameters to the function. */
1069
1070 static rtx
rx_function_arg(cumulative_args_t cum,const function_arg_info & arg)1071 rx_function_arg (cumulative_args_t cum, const function_arg_info &arg)
1072 {
1073 unsigned int next_reg;
1074 unsigned int bytes_so_far = *get_cumulative_args (cum);
1075 unsigned int size;
1076 unsigned int rounded_size;
1077
1078 size = arg.promoted_size_in_bytes ();
1079 /* If the size is not known it cannot be passed in registers. */
1080 if (size < 1)
1081 return NULL_RTX;
1082
1083 rounded_size = rx_round_up (size, UNITS_PER_WORD);
1084
1085 /* Don't pass this arg via registers if there
1086 are insufficient registers to hold all of it. */
1087 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1088 return NULL_RTX;
1089
1090 /* Unnamed arguments and the last named argument in a
1091 variadic function are always passed on the stack. */
1092 if (!arg.named)
1093 return NULL_RTX;
1094
1095 /* Structures must occupy an exact number of registers,
1096 otherwise they are passed on the stack. */
1097 if ((arg.type == NULL || AGGREGATE_TYPE_P (arg.type))
1098 && (size % UNITS_PER_WORD) != 0)
1099 return NULL_RTX;
1100
1101 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1102
1103 return gen_rtx_REG (arg.mode, next_reg);
1104 }
1105
1106 static void
rx_function_arg_advance(cumulative_args_t cum,const function_arg_info & arg)1107 rx_function_arg_advance (cumulative_args_t cum,
1108 const function_arg_info &arg)
1109 {
1110 *get_cumulative_args (cum) += rx_function_arg_size (arg.mode, arg.type);
1111 }
1112
1113 static unsigned int
rx_function_arg_boundary(machine_mode mode ATTRIBUTE_UNUSED,const_tree type ATTRIBUTE_UNUSED)1114 rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
1115 const_tree type ATTRIBUTE_UNUSED)
1116 {
1117 /* Older versions of the RX backend aligned all on-stack arguments
1118 to 32-bits. The RX C ABI however says that they should be
1119 aligned to their natural alignment. (See section 5.2.2 of the ABI). */
1120 if (TARGET_GCC_ABI)
1121 return STACK_BOUNDARY;
1122
1123 if (type)
1124 {
1125 if (DECL_P (type))
1126 return DECL_ALIGN (type);
1127 return TYPE_ALIGN (type);
1128 }
1129
1130 return PARM_BOUNDARY;
1131 }
1132
1133 /* Return an RTL describing where a function return value of type RET_TYPE
1134 is held. */
1135
1136 static rtx
rx_function_value(const_tree ret_type,const_tree fn_decl_or_type ATTRIBUTE_UNUSED,bool outgoing ATTRIBUTE_UNUSED)1137 rx_function_value (const_tree ret_type,
1138 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1139 bool outgoing ATTRIBUTE_UNUSED)
1140 {
1141 machine_mode mode = TYPE_MODE (ret_type);
1142
1143 /* RX ABI specifies that small integer types are
1144 promoted to int when returned by a function. */
1145 if (GET_MODE_SIZE (mode) > 0
1146 && GET_MODE_SIZE (mode) < 4
1147 && ! COMPLEX_MODE_P (mode)
1148 && ! VECTOR_TYPE_P (ret_type)
1149 && ! VECTOR_MODE_P (mode)
1150 )
1151 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1152
1153 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1154 }
1155
1156 /* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1157 regard to function returns as does TARGET_FUNCTION_VALUE. */
1158
1159 static machine_mode
rx_promote_function_mode(const_tree type ATTRIBUTE_UNUSED,machine_mode mode,int * punsignedp ATTRIBUTE_UNUSED,const_tree funtype ATTRIBUTE_UNUSED,int for_return)1160 rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1161 machine_mode mode,
1162 int * punsignedp ATTRIBUTE_UNUSED,
1163 const_tree funtype ATTRIBUTE_UNUSED,
1164 int for_return)
1165 {
1166 if (for_return != 1
1167 || GET_MODE_SIZE (mode) >= 4
1168 || COMPLEX_MODE_P (mode)
1169 || VECTOR_MODE_P (mode)
1170 || VECTOR_TYPE_P (type)
1171 || GET_MODE_SIZE (mode) < 1)
1172 return mode;
1173
1174 return SImode;
1175 }
1176
1177 static bool
rx_return_in_memory(const_tree type,const_tree fntype ATTRIBUTE_UNUSED)1178 rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1179 {
1180 HOST_WIDE_INT size;
1181
1182 if (TYPE_MODE (type) != BLKmode
1183 && ! AGGREGATE_TYPE_P (type))
1184 return false;
1185
1186 size = int_size_in_bytes (type);
1187 /* Large structs and those whose size is not an
1188 exact multiple of 4 are returned in memory. */
1189 return size < 1
1190 || size > 16
1191 || (size % UNITS_PER_WORD) != 0;
1192 }
1193
1194 static rtx
rx_struct_value_rtx(tree fndecl ATTRIBUTE_UNUSED,int incoming ATTRIBUTE_UNUSED)1195 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1196 int incoming ATTRIBUTE_UNUSED)
1197 {
1198 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1199 }
1200
1201 static bool
rx_return_in_msb(const_tree valtype)1202 rx_return_in_msb (const_tree valtype)
1203 {
1204 return TARGET_BIG_ENDIAN_DATA
1205 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1206 }
1207
1208 /* Returns true if the provided function has the specified attribute. */
1209
1210 static inline bool
has_func_attr(const_tree decl,const char * func_attr)1211 has_func_attr (const_tree decl, const char * func_attr)
1212 {
1213 if (decl == NULL_TREE)
1214 decl = current_function_decl;
1215
1216 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1217 }
1218
1219 /* Returns true if the provided function has the "fast_interrupt" attribute. */
1220
1221 bool
is_fast_interrupt_func(const_tree decl)1222 is_fast_interrupt_func (const_tree decl)
1223 {
1224 return has_func_attr (decl, "fast_interrupt");
1225 }
1226
1227 /* Returns true if the provided function has the "interrupt" attribute. */
1228
1229 bool
is_interrupt_func(const_tree decl)1230 is_interrupt_func (const_tree decl)
1231 {
1232 return has_func_attr (decl, "interrupt");
1233 }
1234
1235 /* Returns true if the provided function has the "naked" attribute. */
1236
1237 static inline bool
is_naked_func(const_tree decl)1238 is_naked_func (const_tree decl)
1239 {
1240 return has_func_attr (decl, "naked");
1241 }
1242
1243 static bool use_fixed_regs = false;
1244
1245 static void
rx_conditional_register_usage(void)1246 rx_conditional_register_usage (void)
1247 {
1248 static bool using_fixed_regs = false;
1249
1250 if (TARGET_PID)
1251 {
1252 rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1253 fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1254 }
1255
1256 if (rx_small_data_limit > 0)
1257 {
1258 if (TARGET_PID)
1259 rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1260 else
1261 rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1262
1263 fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1264 }
1265
1266 if (use_fixed_regs != using_fixed_regs)
1267 {
1268 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1269 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1270
1271 if (use_fixed_regs)
1272 {
1273 unsigned int r;
1274
1275 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1276 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
1277
1278 /* This is for fast interrupt handlers. Any register in
1279 the range r10 to r13 (inclusive) that is currently
1280 marked as fixed is now a viable, call-used register. */
1281 for (r = 10; r <= 13; r++)
1282 if (fixed_regs[r])
1283 {
1284 fixed_regs[r] = 0;
1285 call_used_regs[r] = 1;
1286 }
1287
1288 /* Mark r7 as fixed. This is just a hack to avoid
1289 altering the reg_alloc_order array so that the newly
1290 freed r10-r13 registers are the preferred registers. */
1291 fixed_regs[7] = call_used_regs[7] = 1;
1292 }
1293 else
1294 {
1295 /* Restore the normal register masks. */
1296 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1297 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1298 }
1299
1300 using_fixed_regs = use_fixed_regs;
1301 }
1302 }
1303
1304 struct decl_chain
1305 {
1306 tree fndecl;
1307 struct decl_chain * next;
1308 };
1309
1310 /* Stack of decls for which we have issued warnings. */
1311 static struct decl_chain * warned_decls = NULL;
1312
1313 static void
add_warned_decl(tree fndecl)1314 add_warned_decl (tree fndecl)
1315 {
1316 struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1317
1318 warned->fndecl = fndecl;
1319 warned->next = warned_decls;
1320 warned_decls = warned;
1321 }
1322
1323 /* Returns TRUE if FNDECL is on our list of warned about decls. */
1324
1325 static bool
already_warned(tree fndecl)1326 already_warned (tree fndecl)
1327 {
1328 struct decl_chain * warned;
1329
1330 for (warned = warned_decls;
1331 warned != NULL;
1332 warned = warned->next)
1333 if (warned->fndecl == fndecl)
1334 return true;
1335
1336 return false;
1337 }
1338
1339 /* Perform any actions necessary before starting to compile FNDECL.
1340 For the RX we use this to make sure that we have the correct
1341 set of register masks selected. If FNDECL is NULL then we are
1342 compiling top level things. */
1343
1344 static void
rx_set_current_function(tree fndecl)1345 rx_set_current_function (tree fndecl)
1346 {
1347 /* Remember the last target of rx_set_current_function. */
1348 static tree rx_previous_fndecl;
1349 bool prev_was_fast_interrupt;
1350 bool current_is_fast_interrupt;
1351
1352 /* Only change the context if the function changes. This hook is called
1353 several times in the course of compiling a function, and we don't want
1354 to slow things down too much or call target_reinit when it isn't safe. */
1355 if (fndecl == rx_previous_fndecl)
1356 return;
1357
1358 prev_was_fast_interrupt
1359 = rx_previous_fndecl
1360 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
1361
1362 current_is_fast_interrupt
1363 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1364
1365 if (prev_was_fast_interrupt != current_is_fast_interrupt)
1366 {
1367 use_fixed_regs = current_is_fast_interrupt;
1368 target_reinit ();
1369 }
1370
1371 if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1372 {
1373 /* We do not warn about the first fast interrupt routine that
1374 we see. Instead we just push it onto the stack. */
1375 if (warned_decls == NULL)
1376 add_warned_decl (fndecl);
1377
1378 /* Otherwise if this fast interrupt is one for which we have
1379 not already issued a warning, generate one and then push
1380 it onto the stack as well. */
1381 else if (! already_warned (fndecl))
1382 {
1383 warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1384 fndecl, warned_decls->fndecl);
1385 add_warned_decl (fndecl);
1386 }
1387 }
1388
1389 rx_previous_fndecl = fndecl;
1390 }
1391
1392 /* Typical stack layout should looks like this after the function's prologue:
1393
1394 | |
1395 -- ^
1396 | | \ |
1397 | | arguments saved | Increasing
1398 | | on the stack | addresses
1399 PARENT arg pointer -> | | /
1400 -------------------------- ---- -------------------
1401 CHILD |ret | return address
1402 --
1403 | | \
1404 | | call saved
1405 | | registers
1406 | | /
1407 --
1408 | | \
1409 | | local
1410 | | variables
1411 frame pointer -> | | /
1412 --
1413 | | \
1414 | | outgoing | Decreasing
1415 | | arguments | addresses
1416 current stack pointer -> | | / |
1417 -------------------------- ---- ------------------ V
1418 | | */
1419
1420 static unsigned int
bit_count(unsigned int x)1421 bit_count (unsigned int x)
1422 {
1423 const unsigned int m1 = 0x55555555;
1424 const unsigned int m2 = 0x33333333;
1425 const unsigned int m4 = 0x0f0f0f0f;
1426
1427 x -= (x >> 1) & m1;
1428 x = (x & m2) + ((x >> 2) & m2);
1429 x = (x + (x >> 4)) & m4;
1430 x += x >> 8;
1431
1432 return (x + (x >> 16)) & 0x3f;
1433 }
1434
1435 #if defined(TARGET_SAVE_ACC_REGISTER)
1436 #define MUST_SAVE_ACC_REGISTER \
1437 (TARGET_SAVE_ACC_REGISTER \
1438 && (is_interrupt_func (NULL_TREE) \
1439 || is_fast_interrupt_func (NULL_TREE)))
1440 #else
1441 #define MUST_SAVE_ACC_REGISTER 0
1442 #endif
1443
1444 /* Returns either the lowest numbered and highest numbered registers that
1445 occupy the call-saved area of the stack frame, if the registers are
1446 stored as a contiguous block, or else a bitmask of the individual
1447 registers if they are stored piecemeal.
1448
1449 Also computes the size of the frame and the size of the outgoing
1450 arguments block (in bytes). */
1451
1452 static void
rx_get_stack_layout(unsigned int * lowest,unsigned int * highest,unsigned int * register_mask,unsigned int * frame_size,unsigned int * stack_size)1453 rx_get_stack_layout (unsigned int * lowest,
1454 unsigned int * highest,
1455 unsigned int * register_mask,
1456 unsigned int * frame_size,
1457 unsigned int * stack_size)
1458 {
1459 unsigned int reg;
1460 unsigned int low;
1461 unsigned int high;
1462 unsigned int fixed_reg = 0;
1463 unsigned int save_mask;
1464 unsigned int pushed_mask;
1465 unsigned int unneeded_pushes;
1466
1467 if (is_naked_func (NULL_TREE))
1468 {
1469 /* Naked functions do not create their own stack frame.
1470 Instead the programmer must do that for us. */
1471 * lowest = 0;
1472 * highest = 0;
1473 * register_mask = 0;
1474 * frame_size = 0;
1475 * stack_size = 0;
1476 return;
1477 }
1478
1479 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
1480 {
1481 if ((df_regs_ever_live_p (reg)
1482 /* Always save all call clobbered registers inside non-leaf
1483 interrupt handlers, even if they are not live - they may
1484 be used in (non-interrupt aware) routines called from this one. */
1485 || (call_used_or_fixed_reg_p (reg)
1486 && is_interrupt_func (NULL_TREE)
1487 && ! crtl->is_leaf))
1488 && (! call_used_or_fixed_reg_p (reg)
1489 /* Even call clobbered registered must
1490 be pushed inside interrupt handlers. */
1491 || is_interrupt_func (NULL_TREE)
1492 /* Likewise for fast interrupt handlers, except registers r10 -
1493 r13. These are normally call-saved, but may have been set
1494 to call-used by rx_conditional_register_usage. If so then
1495 they can be used in the fast interrupt handler without
1496 saving them on the stack. */
1497 || (is_fast_interrupt_func (NULL_TREE)
1498 && ! IN_RANGE (reg, 10, 13))))
1499 {
1500 if (low == 0)
1501 low = reg;
1502 high = reg;
1503
1504 save_mask |= 1 << reg;
1505 }
1506
1507 /* Remember if we see a fixed register
1508 after having found the low register. */
1509 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1510 fixed_reg = reg;
1511 }
1512
1513 /* If we have to save the accumulator register, make sure
1514 that at least two registers are pushed into the frame. */
1515 if (MUST_SAVE_ACC_REGISTER
1516 && bit_count (save_mask) < 2)
1517 {
1518 save_mask |= (1 << 13) | (1 << 14);
1519 if (low == 0)
1520 low = 13;
1521 if (high == 0 || low == high)
1522 high = low + 1;
1523 }
1524
1525 /* Decide if it would be faster fill in the call-saved area of the stack
1526 frame using multiple PUSH instructions instead of a single PUSHM
1527 instruction.
1528
1529 SAVE_MASK is a bitmask of the registers that must be stored in the
1530 call-save area. PUSHED_MASK is a bitmask of the registers that would
1531 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1532 is a bitmask of those registers in pushed_mask that are not in
1533 save_mask.
1534
1535 We use a simple heuristic that says that it is better to use
1536 multiple PUSH instructions if the number of unnecessary pushes is
1537 greater than the number of necessary pushes.
1538
1539 We also use multiple PUSH instructions if there are any fixed registers
1540 between LOW and HIGH. The only way that this can happen is if the user
1541 has specified --fixed-<reg-name> on the command line and in such
1542 circumstances we do not want to touch the fixed registers at all.
1543
1544 Note also that the code in the prologue/epilogue handlers will
1545 automatically merge multiple PUSHes of adjacent registers into a single
1546 PUSHM.
1547
1548 FIXME: Is it worth improving this heuristic ? */
1549 pushed_mask = (HOST_WIDE_INT_M1U << low) & ~(HOST_WIDE_INT_M1U << (high + 1));
1550 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1551
1552 if ((fixed_reg && fixed_reg <= high)
1553 || (optimize_function_for_speed_p (cfun)
1554 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1555 {
1556 /* Use multiple pushes. */
1557 * lowest = 0;
1558 * highest = 0;
1559 * register_mask = save_mask;
1560 }
1561 else
1562 {
1563 /* Use one push multiple instruction. */
1564 * lowest = low;
1565 * highest = high;
1566 * register_mask = 0;
1567 }
1568
1569 * frame_size = rx_round_up
1570 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1571
1572 if (crtl->args.size > 0)
1573 * frame_size += rx_round_up
1574 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1575
1576 * stack_size = rx_round_up
1577 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1578 }
1579
1580 /* Generate a PUSHM instruction that matches the given operands. */
1581
1582 void
rx_emit_stack_pushm(rtx * operands)1583 rx_emit_stack_pushm (rtx * operands)
1584 {
1585 HOST_WIDE_INT last_reg;
1586 rtx first_push;
1587
1588 gcc_assert (CONST_INT_P (operands[0]));
1589 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1590
1591 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1592 first_push = XVECEXP (operands[1], 0, 1);
1593 gcc_assert (SET_P (first_push));
1594 first_push = SET_SRC (first_push);
1595 gcc_assert (REG_P (first_push));
1596
1597 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1598 reg_names [REGNO (first_push) - last_reg],
1599 reg_names [REGNO (first_push)]);
1600 }
1601
1602 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1603
1604 static rtx
gen_rx_store_vector(unsigned int low,unsigned int high)1605 gen_rx_store_vector (unsigned int low, unsigned int high)
1606 {
1607 unsigned int i;
1608 unsigned int count = (high - low) + 2;
1609 rtx vector;
1610
1611 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1612
1613 XVECEXP (vector, 0, 0) =
1614 gen_rtx_SET (stack_pointer_rtx,
1615 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1616 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1617
1618 for (i = 0; i < count - 1; i++)
1619 XVECEXP (vector, 0, i + 1) =
1620 gen_rtx_SET (gen_rtx_MEM (SImode,
1621 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1622 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1623 gen_rtx_REG (SImode, high - i));
1624 return vector;
1625 }
1626
1627 /* Mark INSN as being frame related. If it is a PARALLEL
1628 then mark each element as being frame related as well. */
1629
1630 static void
mark_frame_related(rtx insn)1631 mark_frame_related (rtx insn)
1632 {
1633 RTX_FRAME_RELATED_P (insn) = 1;
1634 insn = PATTERN (insn);
1635
1636 if (GET_CODE (insn) == PARALLEL)
1637 {
1638 unsigned int i;
1639
1640 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1641 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1642 }
1643 }
1644
1645 /* Create CFI notes for register pops. */
1646 static void
add_pop_cfi_notes(rtx_insn * insn,unsigned int high,unsigned int low)1647 add_pop_cfi_notes (rtx_insn *insn, unsigned int high, unsigned int low)
1648 {
1649 rtx t = plus_constant (Pmode, stack_pointer_rtx,
1650 (high - low + 1) * UNITS_PER_WORD);
1651 t = gen_rtx_SET (stack_pointer_rtx, t);
1652 add_reg_note (insn, REG_CFA_ADJUST_CFA, t);
1653 RTX_FRAME_RELATED_P (insn) = 1;
1654 for (unsigned int i = low; i <= high; i++)
1655 add_reg_note (insn, REG_CFA_RESTORE, gen_rtx_REG (word_mode, i));
1656 }
1657
1658
1659 static bool
ok_for_max_constant(HOST_WIDE_INT val)1660 ok_for_max_constant (HOST_WIDE_INT val)
1661 {
1662 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1663 /* If there is no constraint on the size of constants
1664 used as operands, then any value is legitimate. */
1665 return true;
1666
1667 /* rx_max_constant_size specifies the maximum number
1668 of bytes that can be used to hold a signed value. */
1669 return IN_RANGE (val, (HOST_WIDE_INT_M1U << (rx_max_constant_size * 8)),
1670 ( 1 << (rx_max_constant_size * 8)));
1671 }
1672
1673 /* Generate an ADD of SRC plus VAL into DEST.
1674 Handles the case where VAL is too big for max_constant_value.
1675 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1676
1677 static void
gen_safe_add(rtx dest,rtx src,rtx val,bool is_frame_related)1678 gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1679 {
1680 rtx insn;
1681
1682 if (val == NULL_RTX || INTVAL (val) == 0)
1683 {
1684 gcc_assert (dest != src);
1685
1686 insn = emit_move_insn (dest, src);
1687 }
1688 else if (ok_for_max_constant (INTVAL (val)))
1689 insn = emit_insn (gen_addsi3 (dest, src, val));
1690 else
1691 {
1692 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
1693 will not reject it. */
1694 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1695 insn = emit_insn (gen_addsi3 (dest, src, val));
1696
1697 if (is_frame_related)
1698 /* We have to provide our own frame related note here
1699 as the dwarf2out code cannot be expected to grok
1700 our unspec. */
1701 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1702 gen_rtx_SET (dest, gen_rtx_PLUS (SImode, src, val)));
1703 return;
1704 }
1705
1706 if (is_frame_related)
1707 RTX_FRAME_RELATED_P (insn) = 1;
1708 }
1709
1710 static void
push_regs(unsigned int high,unsigned int low)1711 push_regs (unsigned int high, unsigned int low)
1712 {
1713 rtx insn;
1714
1715 if (low == high)
1716 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1717 else
1718 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
1719 gen_rx_store_vector (low, high)));
1720 mark_frame_related (insn);
1721 }
1722
1723 void
rx_expand_prologue(void)1724 rx_expand_prologue (void)
1725 {
1726 unsigned int stack_size;
1727 unsigned int frame_size;
1728 unsigned int mask;
1729 unsigned int low;
1730 unsigned int high;
1731 unsigned int reg;
1732
1733 /* Naked functions use their own, programmer provided prologues. */
1734 if (is_naked_func (NULL_TREE))
1735 return;
1736
1737 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1738
1739 if (flag_stack_usage_info)
1740 current_function_static_stack_size = frame_size + stack_size;
1741
1742 /* If we use any of the callee-saved registers, save them now. */
1743 if (mask)
1744 {
1745 /* Push registers in reverse order. */
1746 for (reg = CC_REGNUM; reg --;)
1747 if (mask & (1 << reg))
1748 {
1749 low = high = reg;
1750
1751 /* Look for a span of registers.
1752 Note - we do not have to worry about -Os and whether
1753 it is better to use a single, longer PUSHM as
1754 rx_get_stack_layout has already done that for us. */
1755 while (reg-- > 0)
1756 if ((mask & (1 << reg)) == 0)
1757 break;
1758 else
1759 --low;
1760
1761 push_regs (high, low);
1762 if (reg == (unsigned) -1)
1763 break;
1764 }
1765 }
1766 else if (low)
1767 push_regs (high, low);
1768
1769 if (MUST_SAVE_ACC_REGISTER)
1770 {
1771 unsigned int acc_high, acc_low;
1772
1773 /* Interrupt handlers have to preserve the accumulator
1774 register if so requested by the user. Use the first
1775 two pushed registers as intermediaries. */
1776 if (mask)
1777 {
1778 acc_low = acc_high = 0;
1779
1780 for (reg = 1; reg < CC_REGNUM; reg ++)
1781 if (mask & (1 << reg))
1782 {
1783 if (acc_low == 0)
1784 acc_low = reg;
1785 else
1786 {
1787 acc_high = reg;
1788 break;
1789 }
1790 }
1791
1792 /* We have assumed that there are at least two registers pushed... */
1793 gcc_assert (acc_high != 0);
1794
1795 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1796 We just assume that they are zero. */
1797 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1798 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1799 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1800 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1801 }
1802 else
1803 {
1804 acc_low = low;
1805 acc_high = low + 1;
1806
1807 /* We have assumed that there are at least two registers pushed... */
1808 gcc_assert (acc_high <= high);
1809
1810 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1811 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1812 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1813 gen_rx_store_vector (acc_low, acc_high)));
1814 }
1815 }
1816
1817 /* If needed, set up the frame pointer. */
1818 if (frame_pointer_needed)
1819 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1820 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1821
1822 /* Allocate space for the outgoing args.
1823 If the stack frame has not already been set up then handle this as well. */
1824 if (stack_size)
1825 {
1826 if (frame_size)
1827 {
1828 if (frame_pointer_needed)
1829 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1830 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1831 else
1832 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1833 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1834 true);
1835 }
1836 else
1837 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1838 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1839 }
1840 else if (frame_size)
1841 {
1842 if (! frame_pointer_needed)
1843 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1844 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1845 else
1846 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1847 false /* False because the epilogue will use the FP not the SP. */);
1848 }
1849 }
1850
1851 static void
add_vector_labels(FILE * file,const char * aname)1852 add_vector_labels (FILE *file, const char *aname)
1853 {
1854 tree vec_attr;
1855 tree val_attr;
1856 const char *vname = "vect";
1857 const char *s;
1858 int vnum;
1859
1860 /* This node is for the vector/interrupt tag itself */
1861 vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1862 if (!vec_attr)
1863 return;
1864
1865 /* Now point it at the first argument */
1866 vec_attr = TREE_VALUE (vec_attr);
1867
1868 /* Iterate through the arguments. */
1869 while (vec_attr)
1870 {
1871 val_attr = TREE_VALUE (vec_attr);
1872 switch (TREE_CODE (val_attr))
1873 {
1874 case STRING_CST:
1875 s = TREE_STRING_POINTER (val_attr);
1876 goto string_id_common;
1877
1878 case IDENTIFIER_NODE:
1879 s = IDENTIFIER_POINTER (val_attr);
1880
1881 string_id_common:
1882 if (strcmp (s, "$default") == 0)
1883 {
1884 fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1885 fprintf (file, "$tableentry$default$%s:\n", vname);
1886 }
1887 else
1888 vname = s;
1889 break;
1890
1891 case INTEGER_CST:
1892 vnum = TREE_INT_CST_LOW (val_attr);
1893
1894 fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1895 fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1896 break;
1897
1898 default:
1899 ;
1900 }
1901
1902 vec_attr = TREE_CHAIN (vec_attr);
1903 }
1904
1905 }
1906
1907 static void
rx_output_function_prologue(FILE * file)1908 rx_output_function_prologue (FILE * file)
1909 {
1910 add_vector_labels (file, "interrupt");
1911 add_vector_labels (file, "vector");
1912
1913 if (is_fast_interrupt_func (NULL_TREE))
1914 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1915
1916 if (is_interrupt_func (NULL_TREE))
1917 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1918
1919 if (is_naked_func (NULL_TREE))
1920 asm_fprintf (file, "\t; Note: Naked Function\n");
1921
1922 if (cfun->static_chain_decl != NULL)
1923 asm_fprintf (file, "\t; Note: Nested function declared "
1924 "inside another function.\n");
1925
1926 if (crtl->calls_eh_return)
1927 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1928 }
1929
1930 /* Generate a POPM or RTSD instruction that matches the given operands. */
1931
1932 void
rx_emit_stack_popm(rtx * operands,bool is_popm)1933 rx_emit_stack_popm (rtx * operands, bool is_popm)
1934 {
1935 HOST_WIDE_INT stack_adjust;
1936 HOST_WIDE_INT last_reg;
1937 rtx first_push;
1938
1939 gcc_assert (CONST_INT_P (operands[0]));
1940 stack_adjust = INTVAL (operands[0]);
1941
1942 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1943 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1944
1945 first_push = XVECEXP (operands[1], 0, 1);
1946 gcc_assert (SET_P (first_push));
1947 first_push = SET_DEST (first_push);
1948 gcc_assert (REG_P (first_push));
1949
1950 if (is_popm)
1951 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1952 reg_names [REGNO (first_push)],
1953 reg_names [REGNO (first_push) + last_reg]);
1954 else
1955 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1956 (int) stack_adjust,
1957 reg_names [REGNO (first_push)],
1958 reg_names [REGNO (first_push) + last_reg]);
1959 }
1960
1961 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1962
1963 static rtx
gen_rx_rtsd_vector(unsigned int adjust,unsigned int low,unsigned int high)1964 gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1965 {
1966 unsigned int i;
1967 unsigned int bias = 3;
1968 unsigned int count = (high - low) + bias;
1969 rtx vector;
1970
1971 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1972
1973 XVECEXP (vector, 0, 0) =
1974 gen_rtx_SET (stack_pointer_rtx,
1975 plus_constant (Pmode, stack_pointer_rtx, adjust));
1976
1977 for (i = 0; i < count - 2; i++)
1978 XVECEXP (vector, 0, i + 1) =
1979 gen_rtx_SET (gen_rtx_REG (SImode, low + i),
1980 gen_rtx_MEM (SImode,
1981 i == 0 ? stack_pointer_rtx
1982 : plus_constant (Pmode, stack_pointer_rtx,
1983 i * UNITS_PER_WORD)));
1984
1985 XVECEXP (vector, 0, count - 1) = ret_rtx;
1986
1987 return vector;
1988 }
1989
1990 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1991
1992 static rtx
gen_rx_popm_vector(unsigned int low,unsigned int high)1993 gen_rx_popm_vector (unsigned int low, unsigned int high)
1994 {
1995 unsigned int i;
1996 unsigned int count = (high - low) + 2;
1997 rtx vector;
1998
1999 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2000
2001 XVECEXP (vector, 0, 0) =
2002 gen_rtx_SET (stack_pointer_rtx,
2003 plus_constant (Pmode, stack_pointer_rtx,
2004 (count - 1) * UNITS_PER_WORD));
2005
2006 for (i = 0; i < count - 1; i++)
2007 XVECEXP (vector, 0, i + 1) =
2008 gen_rtx_SET (gen_rtx_REG (SImode, low + i),
2009 gen_rtx_MEM (SImode,
2010 i == 0 ? stack_pointer_rtx
2011 : plus_constant (Pmode, stack_pointer_rtx,
2012 i * UNITS_PER_WORD)));
2013
2014 return vector;
2015 }
2016
2017 /* Returns true if a simple return insn can be used. */
2018
2019 bool
rx_can_use_simple_return(void)2020 rx_can_use_simple_return (void)
2021 {
2022 unsigned int low;
2023 unsigned int high;
2024 unsigned int frame_size;
2025 unsigned int stack_size;
2026 unsigned int register_mask;
2027
2028 if (is_naked_func (NULL_TREE)
2029 || is_fast_interrupt_func (NULL_TREE)
2030 || is_interrupt_func (NULL_TREE))
2031 return false;
2032
2033 rx_get_stack_layout (& low, & high, & register_mask,
2034 & frame_size, & stack_size);
2035
2036 return (register_mask == 0
2037 && (frame_size + stack_size) == 0
2038 && low == 0);
2039 }
2040
2041 static void
pop_regs(unsigned int high,unsigned int low)2042 pop_regs (unsigned int high, unsigned int low)
2043 {
2044 rtx_insn *insn;
2045 if (high == low)
2046 insn = emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2047 else
2048 insn = emit_insn (gen_stack_popm (GEN_INT (((high - low) + 1)
2049 * UNITS_PER_WORD),
2050 gen_rx_popm_vector (low, high)));
2051 add_pop_cfi_notes (insn, high, low);
2052 }
2053
2054 void
rx_expand_epilogue(bool is_sibcall)2055 rx_expand_epilogue (bool is_sibcall)
2056 {
2057 unsigned int low;
2058 unsigned int high;
2059 unsigned int frame_size;
2060 unsigned int stack_size;
2061 unsigned int register_mask;
2062 unsigned int regs_size;
2063 unsigned int reg;
2064 unsigned HOST_WIDE_INT total_size;
2065
2066 /* FIXME: We do not support indirect sibcalls at the moment becaause we
2067 cannot guarantee that the register holding the function address is a
2068 call-used register. If it is a call-saved register then the stack
2069 pop instructions generated in the epilogue will corrupt the address
2070 before it is used.
2071
2072 Creating a new call-used-only register class works but then the
2073 reload pass gets stuck because it cannot always find a call-used
2074 register for spilling sibcalls.
2075
2076 The other possible solution is for this pass to scan forward for the
2077 sibcall instruction (if it has been generated) and work out if it
2078 is an indirect sibcall using a call-saved register. If it is then
2079 the address can copied into a call-used register in this epilogue
2080 code and the sibcall instruction modified to use that register. */
2081
2082 if (is_naked_func (NULL_TREE))
2083 {
2084 gcc_assert (! is_sibcall);
2085
2086 /* Naked functions use their own, programmer provided epilogues.
2087 But, in order to keep gcc happy we have to generate some kind of
2088 epilogue RTL. */
2089 emit_jump_insn (gen_naked_return ());
2090 return;
2091 }
2092
2093 rx_get_stack_layout (& low, & high, & register_mask,
2094 & frame_size, & stack_size);
2095
2096 total_size = frame_size + stack_size;
2097 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
2098
2099 /* See if we are unable to use the special stack frame deconstruct and
2100 return instructions. In most cases we can use them, but the exceptions
2101 are:
2102
2103 - Sibling calling functions deconstruct the frame but do not return to
2104 their caller. Instead they branch to their sibling and allow their
2105 return instruction to return to this function's parent.
2106
2107 - Fast and normal interrupt handling functions have to use special
2108 return instructions.
2109
2110 - Functions where we have pushed a fragmented set of registers into the
2111 call-save area must have the same set of registers popped. */
2112 if (is_sibcall
2113 || is_fast_interrupt_func (NULL_TREE)
2114 || is_interrupt_func (NULL_TREE)
2115 || register_mask)
2116 {
2117 /* Cannot use the special instructions - deconstruct by hand. */
2118 if (total_size)
2119 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2120 GEN_INT (total_size), false);
2121
2122 if (MUST_SAVE_ACC_REGISTER)
2123 {
2124 unsigned int acc_low, acc_high;
2125
2126 /* Reverse the saving of the accumulator register onto the stack.
2127 Note we must adjust the saved "low" accumulator value as it
2128 is really the middle 32-bits of the accumulator. */
2129 if (register_mask)
2130 {
2131 acc_low = acc_high = 0;
2132
2133 for (reg = 1; reg < CC_REGNUM; reg ++)
2134 if (register_mask & (1 << reg))
2135 {
2136 if (acc_low == 0)
2137 acc_low = reg;
2138 else
2139 {
2140 acc_high = reg;
2141 break;
2142 }
2143 }
2144 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
2145 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
2146 }
2147 else
2148 {
2149 acc_low = low;
2150 acc_high = low + 1;
2151 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
2152 gen_rx_popm_vector (acc_low, acc_high)));
2153 }
2154
2155 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
2156 gen_rtx_REG (SImode, acc_low),
2157 GEN_INT (16)));
2158 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
2159 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
2160 }
2161
2162 if (register_mask)
2163 {
2164 for (reg = 0; reg < CC_REGNUM; reg ++)
2165 if (register_mask & (1 << reg))
2166 {
2167 low = high = reg;
2168 while (register_mask & (1 << high))
2169 high ++;
2170 pop_regs (high - 1, low);
2171 reg = high;
2172 }
2173 }
2174 else if (low)
2175 pop_regs (high, low);
2176
2177 if (is_fast_interrupt_func (NULL_TREE))
2178 {
2179 gcc_assert (! is_sibcall);
2180 emit_jump_insn (gen_fast_interrupt_return ());
2181 }
2182 else if (is_interrupt_func (NULL_TREE))
2183 {
2184 gcc_assert (! is_sibcall);
2185 emit_jump_insn (gen_exception_return ());
2186 }
2187 else if (! is_sibcall)
2188 emit_jump_insn (gen_simple_return ());
2189
2190 return;
2191 }
2192
2193 /* If we allocated space on the stack, free it now. */
2194 if (total_size)
2195 {
2196 unsigned HOST_WIDE_INT rtsd_size;
2197
2198 /* See if we can use the RTSD instruction. */
2199 rtsd_size = total_size + regs_size;
2200 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
2201 {
2202 if (low)
2203 emit_jump_insn (gen_pop_and_return
2204 (GEN_INT (rtsd_size),
2205 gen_rx_rtsd_vector (rtsd_size, low, high)));
2206 else
2207 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2208
2209 return;
2210 }
2211
2212 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2213 GEN_INT (total_size), false);
2214 }
2215
2216 if (low)
2217 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2218 gen_rx_rtsd_vector (regs_size,
2219 low, high)));
2220 else
2221 emit_jump_insn (gen_simple_return ());
2222 }
2223
2224
2225 /* Compute the offset (in words) between FROM (arg pointer
2226 or frame pointer) and TO (frame pointer or stack pointer).
2227 See ASCII art comment at the start of rx_expand_prologue
2228 for more information. */
2229
2230 int
rx_initial_elimination_offset(int from,int to)2231 rx_initial_elimination_offset (int from, int to)
2232 {
2233 unsigned int low;
2234 unsigned int high;
2235 unsigned int frame_size;
2236 unsigned int stack_size;
2237 unsigned int mask;
2238
2239 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2240
2241 if (from == ARG_POINTER_REGNUM)
2242 {
2243 /* Extend the computed size of the stack frame to
2244 include the registers pushed in the prologue. */
2245 if (low)
2246 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2247 else
2248 frame_size += bit_count (mask) * UNITS_PER_WORD;
2249
2250 /* Remember to include the return address. */
2251 frame_size += 1 * UNITS_PER_WORD;
2252
2253 if (to == FRAME_POINTER_REGNUM)
2254 return frame_size;
2255
2256 gcc_assert (to == STACK_POINTER_REGNUM);
2257 return frame_size + stack_size;
2258 }
2259
2260 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2261 return stack_size;
2262 }
2263
2264 /* Decide if a variable should go into one of the small data sections. */
2265
2266 static bool
rx_in_small_data(const_tree decl)2267 rx_in_small_data (const_tree decl)
2268 {
2269 int size;
2270 const char * section;
2271
2272 if (rx_small_data_limit == 0)
2273 return false;
2274
2275 if (TREE_CODE (decl) != VAR_DECL)
2276 return false;
2277
2278 /* We do not put read-only variables into a small data area because
2279 they would be placed with the other read-only sections, far away
2280 from the read-write data sections, and we only have one small
2281 data area pointer.
2282 Similarly commons are placed in the .bss section which might be
2283 far away (and out of alignment with respect to) the .data section. */
2284 if (TREE_READONLY (decl) || DECL_COMMON (decl))
2285 return false;
2286
2287 section = DECL_SECTION_NAME (decl);
2288 if (section)
2289 return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
2290
2291 size = int_size_in_bytes (TREE_TYPE (decl));
2292
2293 return (size > 0) && (size <= rx_small_data_limit);
2294 }
2295
2296 /* Return a section for X.
2297 The only special thing we do here is to honor small data. */
2298
2299 static section *
rx_select_rtx_section(machine_mode mode,rtx x,unsigned HOST_WIDE_INT align)2300 rx_select_rtx_section (machine_mode mode,
2301 rtx x,
2302 unsigned HOST_WIDE_INT align)
2303 {
2304 if (rx_small_data_limit > 0
2305 && GET_MODE_SIZE (mode) <= rx_small_data_limit
2306 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2307 return sdata_section;
2308
2309 return default_elf_select_rtx_section (mode, x, align);
2310 }
2311
2312 static section *
rx_select_section(tree decl,int reloc,unsigned HOST_WIDE_INT align)2313 rx_select_section (tree decl,
2314 int reloc,
2315 unsigned HOST_WIDE_INT align)
2316 {
2317 if (rx_small_data_limit > 0)
2318 {
2319 switch (categorize_decl_for_section (decl, reloc))
2320 {
2321 case SECCAT_SDATA: return sdata_section;
2322 case SECCAT_SBSS: return sbss_section;
2323 case SECCAT_SRODATA:
2324 /* Fall through. We do not put small, read only
2325 data into the C_2 section because we are not
2326 using the C_2 section. We do not use the C_2
2327 section because it is located with the other
2328 read-only data sections, far away from the read-write
2329 data sections and we only have one small data
2330 pointer (r13). */
2331 default:
2332 break;
2333 }
2334 }
2335
2336 /* If we are supporting the Renesas assembler
2337 we cannot use mergeable sections. */
2338 if (TARGET_AS100_SYNTAX)
2339 switch (categorize_decl_for_section (decl, reloc))
2340 {
2341 case SECCAT_RODATA_MERGE_CONST:
2342 case SECCAT_RODATA_MERGE_STR_INIT:
2343 case SECCAT_RODATA_MERGE_STR:
2344 return readonly_data_section;
2345
2346 default:
2347 break;
2348 }
2349
2350 return default_elf_select_section (decl, reloc, align);
2351 }
2352
2353 enum rx_builtin
2354 {
2355 RX_BUILTIN_BRK,
2356 RX_BUILTIN_CLRPSW,
2357 RX_BUILTIN_INT,
2358 RX_BUILTIN_MACHI,
2359 RX_BUILTIN_MACLO,
2360 RX_BUILTIN_MULHI,
2361 RX_BUILTIN_MULLO,
2362 RX_BUILTIN_MVFACHI,
2363 RX_BUILTIN_MVFACMI,
2364 RX_BUILTIN_MVFC,
2365 RX_BUILTIN_MVTACHI,
2366 RX_BUILTIN_MVTACLO,
2367 RX_BUILTIN_MVTC,
2368 RX_BUILTIN_MVTIPL,
2369 RX_BUILTIN_RACW,
2370 RX_BUILTIN_REVW,
2371 RX_BUILTIN_RMPA,
2372 RX_BUILTIN_ROUND,
2373 RX_BUILTIN_SETPSW,
2374 RX_BUILTIN_WAIT,
2375 RX_BUILTIN_max
2376 };
2377
2378 static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2379
2380 static void
rx_init_builtins(void)2381 rx_init_builtins (void)
2382 {
2383 #define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE) \
2384 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2385 add_builtin_function ("__builtin_rx_" LC_NAME, \
2386 build_function_type_list (RET_TYPE##_type_node, \
2387 NULL_TREE), \
2388 RX_BUILTIN_##UC_NAME, \
2389 BUILT_IN_MD, NULL, NULL_TREE)
2390
2391 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
2392 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2393 add_builtin_function ("__builtin_rx_" LC_NAME, \
2394 build_function_type_list (RET_TYPE##_type_node, \
2395 ARG_TYPE##_type_node, \
2396 NULL_TREE), \
2397 RX_BUILTIN_##UC_NAME, \
2398 BUILT_IN_MD, NULL, NULL_TREE)
2399
2400 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
2401 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2402 add_builtin_function ("__builtin_rx_" LC_NAME, \
2403 build_function_type_list (RET_TYPE##_type_node, \
2404 ARG_TYPE1##_type_node,\
2405 ARG_TYPE2##_type_node,\
2406 NULL_TREE), \
2407 RX_BUILTIN_##UC_NAME, \
2408 BUILT_IN_MD, NULL, NULL_TREE)
2409
2410 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
2411 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2412 add_builtin_function ("__builtin_rx_" LC_NAME, \
2413 build_function_type_list (RET_TYPE##_type_node, \
2414 ARG_TYPE1##_type_node,\
2415 ARG_TYPE2##_type_node,\
2416 ARG_TYPE3##_type_node,\
2417 NULL_TREE), \
2418 RX_BUILTIN_##UC_NAME, \
2419 BUILT_IN_MD, NULL, NULL_TREE)
2420
2421 ADD_RX_BUILTIN0 (BRK, "brk", void);
2422 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
2423 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
2424 ADD_RX_BUILTIN1 (INT, "int", void, integer);
2425 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
2426 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
2427 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
2428 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
2429 ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
2430 ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
2431 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
2432 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
2433 ADD_RX_BUILTIN0 (RMPA, "rmpa", void);
2434 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
2435 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
2436 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
2437 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
2438 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
2439 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
2440 ADD_RX_BUILTIN0 (WAIT, "wait", void);
2441 }
2442
2443 /* Return the RX builtin for CODE. */
2444
2445 static tree
rx_builtin_decl(unsigned code,bool initialize_p ATTRIBUTE_UNUSED)2446 rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2447 {
2448 if (code >= RX_BUILTIN_max)
2449 return error_mark_node;
2450
2451 return rx_builtins[code];
2452 }
2453
2454 static rtx
rx_expand_void_builtin_1_arg(rtx arg,rtx (* gen_func)(rtx),bool reg)2455 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2456 {
2457 if (reg && ! REG_P (arg))
2458 arg = force_reg (SImode, arg);
2459
2460 emit_insn (gen_func (arg));
2461
2462 return NULL_RTX;
2463 }
2464
2465 static rtx
rx_expand_builtin_mvtc(tree exp)2466 rx_expand_builtin_mvtc (tree exp)
2467 {
2468 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2469 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2470
2471 if (! CONST_INT_P (arg1))
2472 return NULL_RTX;
2473
2474 if (! REG_P (arg2))
2475 arg2 = force_reg (SImode, arg2);
2476
2477 emit_insn (gen_mvtc (arg1, arg2));
2478
2479 return NULL_RTX;
2480 }
2481
2482 static rtx
rx_expand_builtin_mvfc(tree t_arg,rtx target)2483 rx_expand_builtin_mvfc (tree t_arg, rtx target)
2484 {
2485 rtx arg = expand_normal (t_arg);
2486
2487 if (! CONST_INT_P (arg))
2488 return NULL_RTX;
2489
2490 if (target == NULL_RTX)
2491 return NULL_RTX;
2492
2493 if (! REG_P (target))
2494 target = force_reg (SImode, target);
2495
2496 emit_insn (gen_mvfc (target, arg));
2497
2498 return target;
2499 }
2500
2501 static rtx
rx_expand_builtin_mvtipl(rtx arg)2502 rx_expand_builtin_mvtipl (rtx arg)
2503 {
2504 /* The RX610 does not support the MVTIPL instruction. */
2505 if (rx_cpu_type == RX610)
2506 return NULL_RTX;
2507
2508 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
2509 return NULL_RTX;
2510
2511 emit_insn (gen_mvtipl (arg));
2512
2513 return NULL_RTX;
2514 }
2515
2516 static rtx
rx_expand_builtin_mac(tree exp,rtx (* gen_func)(rtx,rtx))2517 rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2518 {
2519 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2520 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2521
2522 if (! REG_P (arg1))
2523 arg1 = force_reg (SImode, arg1);
2524
2525 if (! REG_P (arg2))
2526 arg2 = force_reg (SImode, arg2);
2527
2528 emit_insn (gen_func (arg1, arg2));
2529
2530 return NULL_RTX;
2531 }
2532
2533 static rtx
rx_expand_int_builtin_1_arg(rtx arg,rtx target,rtx (* gen_func)(rtx,rtx),bool mem_ok)2534 rx_expand_int_builtin_1_arg (rtx arg,
2535 rtx target,
2536 rtx (* gen_func)(rtx, rtx),
2537 bool mem_ok)
2538 {
2539 if (! REG_P (arg))
2540 if (!mem_ok || ! MEM_P (arg))
2541 arg = force_reg (SImode, arg);
2542
2543 if (target == NULL_RTX || ! REG_P (target))
2544 target = gen_reg_rtx (SImode);
2545
2546 emit_insn (gen_func (target, arg));
2547
2548 return target;
2549 }
2550
2551 static rtx
rx_expand_int_builtin_0_arg(rtx target,rtx (* gen_func)(rtx))2552 rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2553 {
2554 if (target == NULL_RTX || ! REG_P (target))
2555 target = gen_reg_rtx (SImode);
2556
2557 emit_insn (gen_func (target));
2558
2559 return target;
2560 }
2561
2562 static rtx
rx_expand_builtin_round(rtx arg,rtx target)2563 rx_expand_builtin_round (rtx arg, rtx target)
2564 {
2565 if ((! REG_P (arg) && ! MEM_P (arg))
2566 || GET_MODE (arg) != SFmode)
2567 arg = force_reg (SFmode, arg);
2568
2569 if (target == NULL_RTX || ! REG_P (target))
2570 target = gen_reg_rtx (SImode);
2571
2572 emit_insn (gen_lrintsf2 (target, arg));
2573
2574 return target;
2575 }
2576
2577 static int
valid_psw_flag(rtx op,const char * which)2578 valid_psw_flag (rtx op, const char *which)
2579 {
2580 static int mvtc_inform_done = 0;
2581
2582 if (GET_CODE (op) == CONST_INT)
2583 switch (INTVAL (op))
2584 {
2585 case 0: case 'c': case 'C':
2586 case 1: case 'z': case 'Z':
2587 case 2: case 's': case 'S':
2588 case 3: case 'o': case 'O':
2589 case 8: case 'i': case 'I':
2590 case 9: case 'u': case 'U':
2591 return 1;
2592 }
2593
2594 error ("%<__builtin_rx_%s%> takes %<C%>, %<Z%>, %<S%>, %<O%>, %<I%>, "
2595 "or %<U%>", which);
2596 if (!mvtc_inform_done)
2597 error ("use %<__builtin_rx_mvtc%> (0, ... ) to write arbitrary values to PSW");
2598 mvtc_inform_done = 1;
2599
2600 return 0;
2601 }
2602
2603 static rtx
rx_expand_builtin(tree exp,rtx target,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)2604 rx_expand_builtin (tree exp,
2605 rtx target,
2606 rtx subtarget ATTRIBUTE_UNUSED,
2607 machine_mode mode ATTRIBUTE_UNUSED,
2608 int ignore ATTRIBUTE_UNUSED)
2609 {
2610 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2611 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
2612 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2613 unsigned int fcode = DECL_MD_FUNCTION_CODE (fndecl);
2614
2615 switch (fcode)
2616 {
2617 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
2618 case RX_BUILTIN_CLRPSW:
2619 if (!valid_psw_flag (op, "clrpsw"))
2620 return NULL_RTX;
2621 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2622 case RX_BUILTIN_SETPSW:
2623 if (!valid_psw_flag (op, "setpsw"))
2624 return NULL_RTX;
2625 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
2626 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2627 (op, gen_int, false);
2628 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2629 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2630 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2631 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2632 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2633 (target, gen_mvfachi);
2634 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2635 (target, gen_mvfacmi);
2636 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2637 (op, gen_mvtachi, true);
2638 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2639 (op, gen_mvtaclo, true);
2640 case RX_BUILTIN_RMPA:
2641 if (rx_allow_string_insns)
2642 emit_insn (gen_rmpa ());
2643 else
2644 error ("%<-mno-allow-string-insns%> forbids the generation "
2645 "of the RMPA instruction");
2646 return NULL_RTX;
2647 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2648 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
2649 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
2650 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2651 (op, gen_racw, false);
2652 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2653 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2654 (op, target, gen_revw, false);
2655 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2656
2657 default:
2658 internal_error ("bad builtin code");
2659 break;
2660 }
2661
2662 return NULL_RTX;
2663 }
2664
2665 /* Place an element into a constructor or destructor section.
2666 Like default_ctor_section_asm_out_constructor in varasm.c
2667 except that it uses .init_array (or .fini_array) and it
2668 handles constructor priorities. */
2669
2670 static void
rx_elf_asm_cdtor(rtx symbol,int priority,bool is_ctor)2671 rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2672 {
2673 section * s;
2674
2675 if (priority != DEFAULT_INIT_PRIORITY)
2676 {
2677 char buf[18];
2678
2679 sprintf (buf, "%s.%.5u",
2680 is_ctor ? ".init_array" : ".fini_array",
2681 priority);
2682 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2683 }
2684 else if (is_ctor)
2685 s = ctors_section;
2686 else
2687 s = dtors_section;
2688
2689 switch_to_section (s);
2690 assemble_align (POINTER_SIZE);
2691 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2692 }
2693
2694 static void
rx_elf_asm_constructor(rtx symbol,int priority)2695 rx_elf_asm_constructor (rtx symbol, int priority)
2696 {
2697 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2698 }
2699
2700 static void
rx_elf_asm_destructor(rtx symbol,int priority)2701 rx_elf_asm_destructor (rtx symbol, int priority)
2702 {
2703 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2704 }
2705
2706 /* Check "fast_interrupt", "interrupt" and "naked" attributes. */
2707
2708 static tree
rx_handle_func_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)2709 rx_handle_func_attribute (tree * node,
2710 tree name,
2711 tree args ATTRIBUTE_UNUSED,
2712 int flags ATTRIBUTE_UNUSED,
2713 bool * no_add_attrs)
2714 {
2715 gcc_assert (DECL_P (* node));
2716
2717 if (TREE_CODE (* node) != FUNCTION_DECL)
2718 {
2719 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2720 name);
2721 * no_add_attrs = true;
2722 }
2723
2724 /* FIXME: We ought to check for conflicting attributes. */
2725
2726 /* FIXME: We ought to check that the interrupt and exception
2727 handler attributes have been applied to void functions. */
2728 return NULL_TREE;
2729 }
2730
2731 /* Check "vector" attribute. */
2732
2733 static tree
rx_handle_vector_attribute(tree * node,tree name,tree args,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)2734 rx_handle_vector_attribute (tree * node,
2735 tree name,
2736 tree args,
2737 int flags ATTRIBUTE_UNUSED,
2738 bool * no_add_attrs)
2739 {
2740 gcc_assert (DECL_P (* node));
2741 gcc_assert (args != NULL_TREE);
2742
2743 if (TREE_CODE (* node) != FUNCTION_DECL)
2744 {
2745 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2746 name);
2747 * no_add_attrs = true;
2748 }
2749
2750 return NULL_TREE;
2751 }
2752
2753 /* Table of RX specific attributes. */
2754 const struct attribute_spec rx_attribute_table[] =
2755 {
2756 /* Name, min_len, max_len, decl_req, type_req, fn_type_req,
2757 affects_type_identity, handler, exclude. */
2758 { "fast_interrupt", 0, 0, true, false, false, false,
2759 rx_handle_func_attribute, NULL },
2760 { "interrupt", 0, -1, true, false, false, false,
2761 rx_handle_func_attribute, NULL },
2762 { "naked", 0, 0, true, false, false, false,
2763 rx_handle_func_attribute, NULL },
2764 { "vector", 1, -1, true, false, false, false,
2765 rx_handle_vector_attribute, NULL },
2766 { NULL, 0, 0, false, false, false, false, NULL, NULL }
2767 };
2768
2769 /* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
2770
2771 static void
rx_override_options_after_change(void)2772 rx_override_options_after_change (void)
2773 {
2774 static bool first_time = TRUE;
2775
2776 if (first_time)
2777 {
2778 /* If this is the first time through and the user has not disabled
2779 the use of RX FPU hardware then enable -ffinite-math-only,
2780 since the FPU instructions do not support NaNs and infinities. */
2781 if (TARGET_USE_FPU)
2782 flag_finite_math_only = 1;
2783
2784 first_time = FALSE;
2785 }
2786 else
2787 {
2788 /* Alert the user if they are changing the optimization options
2789 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2790 if (TARGET_USE_FPU
2791 && !flag_finite_math_only)
2792 warning (0, "RX FPU instructions do not support NaNs and infinities");
2793 }
2794 }
2795
2796 static void
rx_option_override(void)2797 rx_option_override (void)
2798 {
2799 unsigned int i;
2800 cl_deferred_option *opt;
2801 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
2802
2803 if (v)
2804 FOR_EACH_VEC_ELT (*v, i, opt)
2805 {
2806 switch (opt->opt_index)
2807 {
2808 case OPT_mint_register_:
2809 switch (opt->value)
2810 {
2811 case 4:
2812 fixed_regs[10] = call_used_regs [10] = 1;
2813 /* Fall through. */
2814 case 3:
2815 fixed_regs[11] = call_used_regs [11] = 1;
2816 /* Fall through. */
2817 case 2:
2818 fixed_regs[12] = call_used_regs [12] = 1;
2819 /* Fall through. */
2820 case 1:
2821 fixed_regs[13] = call_used_regs [13] = 1;
2822 /* Fall through. */
2823 case 0:
2824 rx_num_interrupt_regs = opt->value;
2825 break;
2826 default:
2827 rx_num_interrupt_regs = 0;
2828 /* Error message already given because rx_handle_option
2829 returned false. */
2830 break;
2831 }
2832 break;
2833
2834 default:
2835 gcc_unreachable ();
2836 }
2837 }
2838
2839 /* This target defaults to strict volatile bitfields. */
2840 if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
2841 flag_strict_volatile_bitfields = 1;
2842
2843 rx_override_options_after_change ();
2844
2845 /* These values are bytes, not log. */
2846 if (! optimize_size)
2847 {
2848 if (flag_align_jumps && !str_align_jumps)
2849 str_align_jumps = ((rx_cpu_type == RX100
2850 || rx_cpu_type == RX200) ? "4" : "8");
2851 if (flag_align_loops && !str_align_loops)
2852 str_align_loops = ((rx_cpu_type == RX100
2853 || rx_cpu_type == RX200) ? "4" : "8");
2854 if (flag_align_labels && !str_align_labels)
2855 str_align_labels = ((rx_cpu_type == RX100
2856 || rx_cpu_type == RX200) ? "4" : "8");
2857 }
2858 }
2859
2860
2861 static bool
rx_allocate_stack_slots_for_args(void)2862 rx_allocate_stack_slots_for_args (void)
2863 {
2864 /* Naked functions should not allocate stack slots for arguments. */
2865 return ! is_naked_func (NULL_TREE);
2866 }
2867
2868 static bool
rx_func_attr_inlinable(const_tree decl)2869 rx_func_attr_inlinable (const_tree decl)
2870 {
2871 return ! is_fast_interrupt_func (decl)
2872 && ! is_interrupt_func (decl)
2873 && ! is_naked_func (decl);
2874 }
2875
2876 static bool
rx_warn_func_return(tree decl)2877 rx_warn_func_return (tree decl)
2878 {
2879 /* Naked functions are implemented entirely in assembly, including the
2880 return sequence, so suppress warnings about this. */
2881 return !is_naked_func (decl);
2882 }
2883
2884 /* Return nonzero if it is ok to make a tail-call to DECL,
2885 a function_decl or NULL if this is an indirect call, using EXP */
2886
2887 static bool
rx_function_ok_for_sibcall(tree decl,tree exp ATTRIBUTE_UNUSED)2888 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2889 {
2890 if (TARGET_JSR)
2891 return false;
2892
2893 /* Do not allow indirect tailcalls. The
2894 sibcall patterns do not support them. */
2895 if (decl == NULL)
2896 return false;
2897
2898 /* Never tailcall from inside interrupt handlers or naked functions. */
2899 if (is_fast_interrupt_func (NULL_TREE)
2900 || is_interrupt_func (NULL_TREE)
2901 || is_naked_func (NULL_TREE))
2902 return false;
2903
2904 return true;
2905 }
2906
2907 static void
rx_file_start(void)2908 rx_file_start (void)
2909 {
2910 if (! TARGET_AS100_SYNTAX)
2911 default_file_start ();
2912 }
2913
2914 static bool
rx_is_ms_bitfield_layout(const_tree record_type ATTRIBUTE_UNUSED)2915 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2916 {
2917 /* The packed attribute overrides the MS behavior. */
2918 return ! TYPE_PACKED (record_type);
2919 }
2920
2921 /* Returns true if X a legitimate constant for an immediate
2922 operand on the RX. X is already known to satisfy CONSTANT_P. */
2923
2924 bool
rx_is_legitimate_constant(machine_mode mode ATTRIBUTE_UNUSED,rtx x)2925 rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2926 {
2927 switch (GET_CODE (x))
2928 {
2929 case CONST:
2930 x = XEXP (x, 0);
2931
2932 if (GET_CODE (x) == PLUS)
2933 {
2934 if (! CONST_INT_P (XEXP (x, 1)))
2935 return false;
2936
2937 /* GCC would not pass us CONST_INT + CONST_INT so we
2938 know that we have {SYMBOL|LABEL} + CONST_INT. */
2939 x = XEXP (x, 0);
2940 gcc_assert (! CONST_INT_P (x));
2941 }
2942
2943 switch (GET_CODE (x))
2944 {
2945 case LABEL_REF:
2946 case SYMBOL_REF:
2947 return true;
2948
2949 case UNSPEC:
2950 return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
2951
2952 default:
2953 /* FIXME: Can this ever happen ? */
2954 gcc_unreachable ();
2955 }
2956 break;
2957
2958 case LABEL_REF:
2959 case SYMBOL_REF:
2960 return true;
2961 case CONST_DOUBLE:
2962 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2963 case CONST_VECTOR:
2964 return false;
2965 default:
2966 gcc_assert (CONST_INT_P (x));
2967 break;
2968 }
2969
2970 return ok_for_max_constant (INTVAL (x));
2971 }
2972
2973 static int
rx_address_cost(rtx addr,machine_mode mode ATTRIBUTE_UNUSED,addr_space_t as ATTRIBUTE_UNUSED,bool speed)2974 rx_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
2975 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
2976 {
2977 rtx a, b;
2978
2979 if (GET_CODE (addr) != PLUS)
2980 return COSTS_N_INSNS (1);
2981
2982 a = XEXP (addr, 0);
2983 b = XEXP (addr, 1);
2984
2985 if (REG_P (a) && REG_P (b))
2986 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2987 return COSTS_N_INSNS (4);
2988
2989 if (speed)
2990 /* [REG+OFF] is just as fast as [REG]. */
2991 return COSTS_N_INSNS (1);
2992
2993 if (CONST_INT_P (b)
2994 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2995 /* Try to discourage REG + <large OFF> when optimizing for size. */
2996 return COSTS_N_INSNS (2);
2997
2998 return COSTS_N_INSNS (1);
2999 }
3000
3001 static bool
rx_rtx_costs(rtx x,machine_mode mode,int outer_code ATTRIBUTE_UNUSED,int opno ATTRIBUTE_UNUSED,int * total,bool speed)3002 rx_rtx_costs (rtx x, machine_mode mode, int outer_code ATTRIBUTE_UNUSED,
3003 int opno ATTRIBUTE_UNUSED, int* total, bool speed)
3004 {
3005 if (x == const0_rtx)
3006 {
3007 *total = 0;
3008 return true;
3009 }
3010
3011 switch (GET_CODE (x))
3012 {
3013 case MULT:
3014 if (mode == DImode)
3015 {
3016 *total = COSTS_N_INSNS (2);
3017 return true;
3018 }
3019 /* fall through */
3020
3021 case PLUS:
3022 case MINUS:
3023 case AND:
3024 case COMPARE:
3025 case IOR:
3026 case XOR:
3027 *total = COSTS_N_INSNS (1);
3028 return true;
3029
3030 case DIV:
3031 if (speed)
3032 /* This is the worst case for a division. Pessimize divisions when
3033 not optimizing for size and allow reciprocal optimizations which
3034 produce bigger code. */
3035 *total = COSTS_N_INSNS (20);
3036 else
3037 *total = COSTS_N_INSNS (3);
3038 return true;
3039
3040 case UDIV:
3041 if (speed)
3042 /* This is the worst case for a division. Pessimize divisions when
3043 not optimizing for size and allow reciprocal optimizations which
3044 produce bigger code. */
3045 *total = COSTS_N_INSNS (18);
3046 else
3047 *total = COSTS_N_INSNS (3);
3048 return true;
3049
3050 default:
3051 break;
3052 }
3053
3054 return false;
3055 }
3056
3057 static bool
rx_can_eliminate(const int from ATTRIBUTE_UNUSED,const int to)3058 rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3059 {
3060 /* We can always eliminate to the frame pointer.
3061 We can eliminate to the stack pointer unless a frame
3062 pointer is needed. */
3063
3064 return to == FRAME_POINTER_REGNUM
3065 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
3066 }
3067
3068
3069 static void
rx_trampoline_template(FILE * file)3070 rx_trampoline_template (FILE * file)
3071 {
3072 /* Output assembler code for a block containing the constant
3073 part of a trampoline, leaving space for the variable parts.
3074
3075 On the RX, (where r8 is the static chain regnum) the trampoline
3076 looks like:
3077
3078 mov #<static chain value>, r8
3079 mov #<function's address>, r9
3080 jmp r9
3081
3082 In big-endian-data-mode however instructions are read into the CPU
3083 4 bytes at a time. These bytes are then swapped around before being
3084 passed to the decoder. So...we must partition our trampoline into
3085 4 byte packets and swap these packets around so that the instruction
3086 reader will reverse the process. But, in order to avoid splitting
3087 the 32-bit constants across these packet boundaries, (making inserting
3088 them into the constructed trampoline very difficult) we have to pad the
3089 instruction sequence with NOP insns. ie:
3090
3091 nop
3092 nop
3093 mov.l #<...>, r8
3094 nop
3095 nop
3096 mov.l #<...>, r9
3097 jmp r9
3098 nop
3099 nop */
3100
3101 if (! TARGET_BIG_ENDIAN_DATA)
3102 {
3103 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
3104 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
3105 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
3106 }
3107 else
3108 {
3109 char r8 = '0' + STATIC_CHAIN_REGNUM;
3110 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
3111
3112 if (TARGET_AS100_SYNTAX)
3113 {
3114 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
3115 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3116 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
3117 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3118 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
3119 }
3120 else
3121 {
3122 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
3123 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3124 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
3125 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3126 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
3127 }
3128 }
3129 }
3130
3131 static void
rx_trampoline_init(rtx tramp,tree fndecl,rtx chain)3132 rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
3133 {
3134 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3135
3136 emit_block_move (tramp, assemble_trampoline_template (),
3137 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3138
3139 if (TARGET_BIG_ENDIAN_DATA)
3140 {
3141 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
3142 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
3143 }
3144 else
3145 {
3146 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
3147 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
3148 }
3149 }
3150
3151 static int
rx_memory_move_cost(machine_mode mode ATTRIBUTE_UNUSED,reg_class_t regclass ATTRIBUTE_UNUSED,bool in)3152 rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
3153 reg_class_t regclass ATTRIBUTE_UNUSED,
3154 bool in)
3155 {
3156 return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
3157 }
3158
3159 /* Convert a CC_MODE to the set of flags that it represents. */
3160
3161 static unsigned int
flags_from_mode(machine_mode mode)3162 flags_from_mode (machine_mode mode)
3163 {
3164 switch (mode)
3165 {
3166 case E_CC_ZSmode:
3167 return CC_FLAG_S | CC_FLAG_Z;
3168 case E_CC_ZSOmode:
3169 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
3170 case E_CC_ZSCmode:
3171 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
3172 case E_CCmode:
3173 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
3174 case E_CC_Fmode:
3175 return CC_FLAG_FP;
3176 default:
3177 gcc_unreachable ();
3178 }
3179 }
3180
3181 /* Convert a set of flags to a CC_MODE that can implement it. */
3182
3183 static machine_mode
mode_from_flags(unsigned int f)3184 mode_from_flags (unsigned int f)
3185 {
3186 if (f & CC_FLAG_FP)
3187 return CC_Fmode;
3188 if (f & CC_FLAG_O)
3189 {
3190 if (f & CC_FLAG_C)
3191 return CCmode;
3192 else
3193 return CC_ZSOmode;
3194 }
3195 else if (f & CC_FLAG_C)
3196 return CC_ZSCmode;
3197 else
3198 return CC_ZSmode;
3199 }
3200
3201 /* Convert an RTX_CODE to the set of flags needed to implement it.
3202 This assumes an integer comparison. */
3203
3204 static unsigned int
flags_from_code(enum rtx_code code)3205 flags_from_code (enum rtx_code code)
3206 {
3207 switch (code)
3208 {
3209 case LT:
3210 case GE:
3211 return CC_FLAG_S;
3212 case GT:
3213 case LE:
3214 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
3215 case GEU:
3216 case LTU:
3217 return CC_FLAG_C;
3218 case GTU:
3219 case LEU:
3220 return CC_FLAG_C | CC_FLAG_Z;
3221 case EQ:
3222 case NE:
3223 return CC_FLAG_Z;
3224 default:
3225 gcc_unreachable ();
3226 }
3227 }
3228
3229 /* Return a CC_MODE of which both M1 and M2 are subsets. */
3230
3231 static machine_mode
rx_cc_modes_compatible(machine_mode m1,machine_mode m2)3232 rx_cc_modes_compatible (machine_mode m1, machine_mode m2)
3233 {
3234 unsigned f;
3235
3236 /* Early out for identical modes. */
3237 if (m1 == m2)
3238 return m1;
3239
3240 /* There's no valid combination for FP vs non-FP. */
3241 f = flags_from_mode (m1) | flags_from_mode (m2);
3242 if (f & CC_FLAG_FP)
3243 return VOIDmode;
3244
3245 /* Otherwise, see what mode can implement all the flags. */
3246 return mode_from_flags (f);
3247 }
3248
3249 /* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
3250
3251 machine_mode
rx_select_cc_mode(enum rtx_code cmp_code,rtx x,rtx y)3252 rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
3253 {
3254 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3255 return CC_Fmode;
3256
3257 if (y != const0_rtx)
3258 return CCmode;
3259
3260 return mode_from_flags (flags_from_code (cmp_code));
3261 }
3262
3263 /* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
3264 CC_MODE, and use that in branches based on that compare. */
3265
3266 void
rx_split_cbranch(machine_mode cc_mode,enum rtx_code cmp1,rtx c1,rtx c2,rtx label)3267 rx_split_cbranch (machine_mode cc_mode, enum rtx_code cmp1,
3268 rtx c1, rtx c2, rtx label)
3269 {
3270 rtx flags, x;
3271
3272 flags = gen_rtx_REG (cc_mode, CC_REG);
3273 x = gen_rtx_COMPARE (cc_mode, c1, c2);
3274 x = gen_rtx_SET (flags, x);
3275 emit_insn (x);
3276
3277 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
3278 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
3279 x = gen_rtx_SET (pc_rtx, x);
3280 emit_jump_insn (x);
3281 }
3282
3283 /* A helper function for matching parallels that set the flags. */
3284
3285 bool
rx_match_ccmode(rtx insn,machine_mode cc_mode)3286 rx_match_ccmode (rtx insn, machine_mode cc_mode)
3287 {
3288 rtx op1, flags;
3289 machine_mode flags_mode;
3290
3291 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
3292
3293 op1 = XVECEXP (PATTERN (insn), 0, 0);
3294 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
3295
3296 flags = SET_DEST (op1);
3297 flags_mode = GET_MODE (flags);
3298
3299 if (GET_MODE (SET_SRC (op1)) != flags_mode)
3300 return false;
3301 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
3302 return false;
3303
3304 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
3305 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3306 return false;
3307
3308 return true;
3309 }
3310
3311
3312 static int
rx_max_skip_for_label(rtx_insn * lab)3313 rx_max_skip_for_label (rtx_insn *lab)
3314 {
3315 int opsize;
3316 rtx_insn *op;
3317
3318 if (optimize_size)
3319 return 0;
3320
3321 if (lab == NULL)
3322 return 0;
3323
3324 op = lab;
3325 do
3326 {
3327 op = next_nonnote_nondebug_insn (op);
3328 }
3329 while (op && (LABEL_P (op)
3330 || (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3331 if (!op)
3332 return 0;
3333
3334 opsize = get_attr_length (op);
3335 if (opsize >= 0 && opsize < 8)
3336 return MAX (0, opsize - 1);
3337 return 0;
3338 }
3339
3340 static int
rx_align_log_for_label(rtx_insn * lab,int uses_threshold)3341 rx_align_log_for_label (rtx_insn *lab, int uses_threshold)
3342 {
3343 /* This is a simple heuristic to guess when an alignment would not be useful
3344 because the delay due to the inserted NOPs would be greater than the delay
3345 due to the misaligned branch. If uses_threshold is zero then the alignment
3346 is always useful. */
3347 if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
3348 return 0;
3349
3350 if (optimize_size)
3351 return 0;
3352
3353 /* Return zero if max_skip not a positive number. */
3354 int max_skip = rx_max_skip_for_label (lab);
3355 if (max_skip <= 0)
3356 return 0;
3357
3358 /* These values are log, not bytes. */
3359 if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
3360 return 2; /* 4 bytes */
3361 return 3; /* 8 bytes */
3362 }
3363
3364 align_flags
rx_align_for_label(rtx_insn * lab,int uses_threshold)3365 rx_align_for_label (rtx_insn *lab, int uses_threshold)
3366 {
3367 return align_flags (rx_align_log_for_label (lab, uses_threshold),
3368 rx_max_skip_for_label (lab));
3369 }
3370
3371 /* Compute the real length of the extending load-and-op instructions. */
3372
3373 int
rx_adjust_insn_length(rtx_insn * insn,int current_length)3374 rx_adjust_insn_length (rtx_insn *insn, int current_length)
3375 {
3376 rtx extend, mem, offset;
3377 bool zero;
3378 int factor;
3379
3380 if (!INSN_P (insn))
3381 return current_length;
3382
3383 switch (INSN_CODE (insn))
3384 {
3385 default:
3386 return current_length;
3387
3388 case CODE_FOR_plussi3_zero_extendhi:
3389 case CODE_FOR_andsi3_zero_extendhi:
3390 case CODE_FOR_iorsi3_zero_extendhi:
3391 case CODE_FOR_xorsi3_zero_extendhi:
3392 case CODE_FOR_divsi3_zero_extendhi:
3393 case CODE_FOR_udivsi3_zero_extendhi:
3394 case CODE_FOR_minussi3_zero_extendhi:
3395 case CODE_FOR_smaxsi3_zero_extendhi:
3396 case CODE_FOR_sminsi3_zero_extendhi:
3397 case CODE_FOR_multsi3_zero_extendhi:
3398 case CODE_FOR_comparesi3_zero_extendhi:
3399 zero = true;
3400 factor = 2;
3401 break;
3402
3403 case CODE_FOR_plussi3_sign_extendhi:
3404 case CODE_FOR_andsi3_sign_extendhi:
3405 case CODE_FOR_iorsi3_sign_extendhi:
3406 case CODE_FOR_xorsi3_sign_extendhi:
3407 case CODE_FOR_divsi3_sign_extendhi:
3408 case CODE_FOR_udivsi3_sign_extendhi:
3409 case CODE_FOR_minussi3_sign_extendhi:
3410 case CODE_FOR_smaxsi3_sign_extendhi:
3411 case CODE_FOR_sminsi3_sign_extendhi:
3412 case CODE_FOR_multsi3_sign_extendhi:
3413 case CODE_FOR_comparesi3_sign_extendhi:
3414 zero = false;
3415 factor = 2;
3416 break;
3417
3418 case CODE_FOR_plussi3_zero_extendqi:
3419 case CODE_FOR_andsi3_zero_extendqi:
3420 case CODE_FOR_iorsi3_zero_extendqi:
3421 case CODE_FOR_xorsi3_zero_extendqi:
3422 case CODE_FOR_divsi3_zero_extendqi:
3423 case CODE_FOR_udivsi3_zero_extendqi:
3424 case CODE_FOR_minussi3_zero_extendqi:
3425 case CODE_FOR_smaxsi3_zero_extendqi:
3426 case CODE_FOR_sminsi3_zero_extendqi:
3427 case CODE_FOR_multsi3_zero_extendqi:
3428 case CODE_FOR_comparesi3_zero_extendqi:
3429 zero = true;
3430 factor = 1;
3431 break;
3432
3433 case CODE_FOR_plussi3_sign_extendqi:
3434 case CODE_FOR_andsi3_sign_extendqi:
3435 case CODE_FOR_iorsi3_sign_extendqi:
3436 case CODE_FOR_xorsi3_sign_extendqi:
3437 case CODE_FOR_divsi3_sign_extendqi:
3438 case CODE_FOR_udivsi3_sign_extendqi:
3439 case CODE_FOR_minussi3_sign_extendqi:
3440 case CODE_FOR_smaxsi3_sign_extendqi:
3441 case CODE_FOR_sminsi3_sign_extendqi:
3442 case CODE_FOR_multsi3_sign_extendqi:
3443 case CODE_FOR_comparesi3_sign_extendqi:
3444 zero = false;
3445 factor = 1;
3446 break;
3447 }
3448
3449 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
3450 extend = single_set (insn);
3451 gcc_assert (extend != NULL_RTX);
3452
3453 extend = SET_SRC (extend);
3454 if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3455 || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3456 extend = XEXP (extend, 0);
3457 else
3458 extend = XEXP (extend, 1);
3459
3460 gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3461 || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3462
3463 mem = XEXP (extend, 0);
3464 gcc_checking_assert (MEM_P (mem));
3465 if (REG_P (XEXP (mem, 0)))
3466 return (zero && factor == 1) ? 2 : 3;
3467
3468 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
3469 gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3470 gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3471
3472 offset = XEXP (XEXP (mem, 0), 1);
3473 gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3474
3475 if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3476 return (zero && factor == 1) ? 3 : 4;
3477
3478 return (zero && factor == 1) ? 4 : 5;
3479 }
3480
3481 static bool
rx_narrow_volatile_bitfield(void)3482 rx_narrow_volatile_bitfield (void)
3483 {
3484 return true;
3485 }
3486
3487 static bool
rx_ok_to_inline(tree caller,tree callee)3488 rx_ok_to_inline (tree caller, tree callee)
3489 {
3490 /* Do not inline functions with local variables
3491 into a naked CALLER - naked function have no stack frame and
3492 locals need a frame in order to have somewhere to live.
3493
3494 Unfortunately we have no way to determine the presence of
3495 local variables in CALLEE, so we have to be cautious and
3496 assume that there might be some there.
3497
3498 We do allow inlining when CALLEE has the "inline" type
3499 modifier or the "always_inline" or "gnu_inline" attributes. */
3500 return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3501 || DECL_DECLARED_INLINE_P (callee)
3502 || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3503 || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3504 }
3505
3506 static bool
rx_enable_lra(void)3507 rx_enable_lra (void)
3508 {
3509 return TARGET_ENABLE_LRA;
3510 }
3511
rx_atomic_sequence(const_tree fun_decl)3512 rx_atomic_sequence::rx_atomic_sequence (const_tree fun_decl)
3513 {
3514 if (is_fast_interrupt_func (fun_decl) || is_interrupt_func (fun_decl))
3515 {
3516 /* If we are inside an interrupt handler, assume that interrupts are
3517 off -- which is the default hardware behavior. In this case, there
3518 is no need to disable the interrupts. */
3519 m_prev_psw_reg = NULL;
3520 }
3521 else
3522 {
3523 m_prev_psw_reg = gen_reg_rtx (SImode);
3524 emit_insn (gen_mvfc (m_prev_psw_reg, GEN_INT (CTRLREG_PSW)));
3525 emit_insn (gen_clrpsw (GEN_INT ('I')));
3526 }
3527 }
3528
~rx_atomic_sequence(void)3529 rx_atomic_sequence::~rx_atomic_sequence (void)
3530 {
3531 if (m_prev_psw_reg != NULL)
3532 emit_insn (gen_mvtc (GEN_INT (CTRLREG_PSW), m_prev_psw_reg));
3533 }
3534
3535 /* Given an insn and a reg number, tell whether the reg dies or is unused
3536 after the insn. */
3537 bool
rx_reg_dead_or_unused_after_insn(const rtx_insn * i,int regno)3538 rx_reg_dead_or_unused_after_insn (const rtx_insn* i, int regno)
3539 {
3540 return find_regno_note (i, REG_DEAD, regno) != NULL
3541 || find_regno_note (i, REG_UNUSED, regno) != NULL;
3542 }
3543
3544 /* Copy dead and unused notes from SRC to DST for the specified REGNO. */
3545 void
rx_copy_reg_dead_or_unused_notes(rtx reg,const rtx_insn * src,rtx_insn * dst)3546 rx_copy_reg_dead_or_unused_notes (rtx reg, const rtx_insn* src, rtx_insn* dst)
3547 {
3548 int regno = REGNO (SUBREG_P (reg) ? SUBREG_REG (reg) : reg);
3549
3550 if (rtx note = find_regno_note (src, REG_DEAD, regno))
3551 add_shallow_copy_of_reg_note (dst, note);
3552
3553 if (rtx note = find_regno_note (src, REG_UNUSED, regno))
3554 add_shallow_copy_of_reg_note (dst, note);
3555 }
3556
3557 /* Try to fuse the current bit-operation insn with the surrounding memory load
3558 and store. */
3559 bool
rx_fuse_in_memory_bitop(rtx * operands,rtx_insn * curr_insn,rtx (* gen_insn)(rtx,rtx))3560 rx_fuse_in_memory_bitop (rtx* operands, rtx_insn* curr_insn,
3561 rtx (*gen_insn)(rtx, rtx))
3562 {
3563 rtx op2_reg = SUBREG_P (operands[2]) ? SUBREG_REG (operands[2]) : operands[2];
3564
3565 set_of_reg op2_def = rx_find_set_of_reg (op2_reg, curr_insn,
3566 prev_nonnote_nondebug_insn_bb);
3567 if (op2_def.set_src == NULL_RTX
3568 || !MEM_P (op2_def.set_src)
3569 || GET_MODE (op2_def.set_src) != QImode
3570 || !rx_is_restricted_memory_address (XEXP (op2_def.set_src, 0),
3571 GET_MODE (op2_def.set_src))
3572 || reg_used_between_p (operands[2], op2_def.insn, curr_insn)
3573 || !rx_reg_dead_or_unused_after_insn (curr_insn, REGNO (op2_reg))
3574 )
3575 return false;
3576
3577 /* The register operand originates from a memory load and the memory load
3578 could be fused with the bitop insn.
3579 Look for the following memory store with the same memory operand. */
3580 rtx mem = op2_def.set_src;
3581
3582 /* If the memory is an auto-mod address, it can't be fused. */
3583 if (GET_CODE (XEXP (mem, 0)) == POST_INC
3584 || GET_CODE (XEXP (mem, 0)) == PRE_INC
3585 || GET_CODE (XEXP (mem, 0)) == POST_DEC
3586 || GET_CODE (XEXP (mem, 0)) == PRE_DEC)
3587 return false;
3588
3589 rtx_insn* op0_use = rx_find_use_of_reg (operands[0], curr_insn,
3590 next_nonnote_nondebug_insn_bb);
3591 if (op0_use == NULL
3592 || !(GET_CODE (PATTERN (op0_use)) == SET
3593 && RX_REG_P (XEXP (PATTERN (op0_use), 1))
3594 && reg_overlap_mentioned_p (operands[0], XEXP (PATTERN (op0_use), 1))
3595 && rtx_equal_p (mem, XEXP (PATTERN (op0_use), 0)))
3596 || !rx_reg_dead_or_unused_after_insn (op0_use, REGNO (operands[0]))
3597 || reg_set_between_p (operands[2], curr_insn, op0_use))
3598 return false;
3599
3600 /* If the load-modify-store operation is fused it could potentially modify
3601 load/store ordering if there are other memory accesses between the load
3602 and the store for this insn. If there are volatile mems between the load
3603 and store it's better not to change the ordering. If there is a call
3604 between the load and store, it's also not safe to fuse it. */
3605 for (rtx_insn* i = next_nonnote_nondebug_insn_bb (op2_def.insn);
3606 i != NULL && i != op0_use;
3607 i = next_nonnote_nondebug_insn_bb (i))
3608 if (volatile_insn_p (PATTERN (i)) || CALL_P (i))
3609 return false;
3610
3611 emit_insn (gen_insn (mem, gen_lowpart (QImode, operands[1])));
3612 set_insn_deleted (op2_def.insn);
3613 set_insn_deleted (op0_use);
3614 return true;
3615 }
3616
3617 /* Implement TARGET_HARD_REGNO_NREGS. */
3618
3619 static unsigned int
rx_hard_regno_nregs(unsigned int,machine_mode mode)3620 rx_hard_regno_nregs (unsigned int, machine_mode mode)
3621 {
3622 return CLASS_MAX_NREGS (0, mode);
3623 }
3624
3625 /* Implement TARGET_HARD_REGNO_MODE_OK. */
3626
3627 static bool
rx_hard_regno_mode_ok(unsigned int regno,machine_mode)3628 rx_hard_regno_mode_ok (unsigned int regno, machine_mode)
3629 {
3630 return REGNO_REG_CLASS (regno) == GR_REGS;
3631 }
3632
3633 /* Implement TARGET_MODES_TIEABLE_P. */
3634
3635 static bool
rx_modes_tieable_p(machine_mode mode1,machine_mode mode2)3636 rx_modes_tieable_p (machine_mode mode1, machine_mode mode2)
3637 {
3638 return ((GET_MODE_CLASS (mode1) == MODE_FLOAT
3639 || GET_MODE_CLASS (mode1) == MODE_COMPLEX_FLOAT)
3640 == (GET_MODE_CLASS (mode2) == MODE_FLOAT
3641 || GET_MODE_CLASS (mode2) == MODE_COMPLEX_FLOAT));
3642 }
3643
3644 #undef TARGET_NARROW_VOLATILE_BITFIELD
3645 #define TARGET_NARROW_VOLATILE_BITFIELD rx_narrow_volatile_bitfield
3646
3647 #undef TARGET_CAN_INLINE_P
3648 #define TARGET_CAN_INLINE_P rx_ok_to_inline
3649
3650 #undef TARGET_FUNCTION_VALUE
3651 #define TARGET_FUNCTION_VALUE rx_function_value
3652
3653 #undef TARGET_RETURN_IN_MSB
3654 #define TARGET_RETURN_IN_MSB rx_return_in_msb
3655
3656 #undef TARGET_IN_SMALL_DATA_P
3657 #define TARGET_IN_SMALL_DATA_P rx_in_small_data
3658
3659 #undef TARGET_RETURN_IN_MEMORY
3660 #define TARGET_RETURN_IN_MEMORY rx_return_in_memory
3661
3662 #undef TARGET_HAVE_SRODATA_SECTION
3663 #define TARGET_HAVE_SRODATA_SECTION true
3664
3665 #undef TARGET_ASM_SELECT_RTX_SECTION
3666 #define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
3667
3668 #undef TARGET_ASM_SELECT_SECTION
3669 #define TARGET_ASM_SELECT_SECTION rx_select_section
3670
3671 #undef TARGET_INIT_BUILTINS
3672 #define TARGET_INIT_BUILTINS rx_init_builtins
3673
3674 #undef TARGET_BUILTIN_DECL
3675 #define TARGET_BUILTIN_DECL rx_builtin_decl
3676
3677 #undef TARGET_EXPAND_BUILTIN
3678 #define TARGET_EXPAND_BUILTIN rx_expand_builtin
3679
3680 #undef TARGET_ASM_CONSTRUCTOR
3681 #define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
3682
3683 #undef TARGET_ASM_DESTRUCTOR
3684 #define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
3685
3686 #undef TARGET_STRUCT_VALUE_RTX
3687 #define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
3688
3689 #undef TARGET_ATTRIBUTE_TABLE
3690 #define TARGET_ATTRIBUTE_TABLE rx_attribute_table
3691
3692 #undef TARGET_ASM_FILE_START
3693 #define TARGET_ASM_FILE_START rx_file_start
3694
3695 #undef TARGET_MS_BITFIELD_LAYOUT_P
3696 #define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
3697
3698 #undef TARGET_LEGITIMATE_ADDRESS_P
3699 #define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
3700
3701 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
3702 #define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
3703
3704 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3705 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
3706
3707 #undef TARGET_ASM_FUNCTION_PROLOGUE
3708 #define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
3709
3710 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3711 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
3712
3713 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
3714 #define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
3715
3716 #undef TARGET_FUNCTION_ARG
3717 #define TARGET_FUNCTION_ARG rx_function_arg
3718
3719 #undef TARGET_FUNCTION_ARG_ADVANCE
3720 #define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
3721
3722 #undef TARGET_FUNCTION_ARG_BOUNDARY
3723 #define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
3724
3725 #undef TARGET_SET_CURRENT_FUNCTION
3726 #define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
3727
3728 #undef TARGET_ASM_INTEGER
3729 #define TARGET_ASM_INTEGER rx_assemble_integer
3730
3731 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
3732 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
3733
3734 #undef TARGET_MAX_ANCHOR_OFFSET
3735 #define TARGET_MAX_ANCHOR_OFFSET 32
3736
3737 #undef TARGET_ADDRESS_COST
3738 #define TARGET_ADDRESS_COST rx_address_cost
3739
3740 #undef TARGET_CAN_ELIMINATE
3741 #define TARGET_CAN_ELIMINATE rx_can_eliminate
3742
3743 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3744 #define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
3745
3746 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3747 #define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
3748
3749 #undef TARGET_TRAMPOLINE_INIT
3750 #define TARGET_TRAMPOLINE_INIT rx_trampoline_init
3751
3752 #undef TARGET_PRINT_OPERAND
3753 #define TARGET_PRINT_OPERAND rx_print_operand
3754
3755 #undef TARGET_PRINT_OPERAND_ADDRESS
3756 #define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
3757
3758 #undef TARGET_CC_MODES_COMPATIBLE
3759 #define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
3760
3761 #undef TARGET_MEMORY_MOVE_COST
3762 #define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
3763
3764 #undef TARGET_OPTION_OVERRIDE
3765 #define TARGET_OPTION_OVERRIDE rx_option_override
3766
3767 #undef TARGET_PROMOTE_FUNCTION_MODE
3768 #define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
3769
3770 #undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3771 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
3772
3773 #undef TARGET_FLAGS_REGNUM
3774 #define TARGET_FLAGS_REGNUM CC_REG
3775
3776 #undef TARGET_LEGITIMATE_CONSTANT_P
3777 #define TARGET_LEGITIMATE_CONSTANT_P rx_is_legitimate_constant
3778
3779 #undef TARGET_LEGITIMIZE_ADDRESS
3780 #define TARGET_LEGITIMIZE_ADDRESS rx_legitimize_address
3781
3782 #undef TARGET_WARN_FUNC_RETURN
3783 #define TARGET_WARN_FUNC_RETURN rx_warn_func_return
3784
3785 #undef TARGET_LRA_P
3786 #define TARGET_LRA_P rx_enable_lra
3787
3788 #undef TARGET_HARD_REGNO_NREGS
3789 #define TARGET_HARD_REGNO_NREGS rx_hard_regno_nregs
3790 #undef TARGET_HARD_REGNO_MODE_OK
3791 #define TARGET_HARD_REGNO_MODE_OK rx_hard_regno_mode_ok
3792
3793 #undef TARGET_MODES_TIEABLE_P
3794 #define TARGET_MODES_TIEABLE_P rx_modes_tieable_p
3795
3796 #undef TARGET_RTX_COSTS
3797 #define TARGET_RTX_COSTS rx_rtx_costs
3798
3799 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
3800 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
3801
3802 struct gcc_target targetm = TARGET_INITIALIZER;
3803
3804 #include "gt-rx.h"
3805