1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002
3 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "expr.h"
37 #include "function.h"
38 #include "toplev.h"
39 #include "ggc.h"
40 #include "integrate.h"
41 #include "tm_p.h"
42 #include "target.h"
43 #include "target-def.h"
44
45 #ifndef streq
46 #define streq(a,b) (strcmp (a, b) == 0)
47 #endif
48
49 /* Function prototypes for stupid compilers: */
50 static void const_double_split PARAMS ((rtx, HOST_WIDE_INT *, HOST_WIDE_INT *));
51 static int const_costs_int PARAMS ((HOST_WIDE_INT, int));
52 static void substitute_ep_register PARAMS ((rtx, rtx, int, int, rtx *, rtx *));
53 static int ep_memory_offset PARAMS ((enum machine_mode, int));
54 static void v850_set_data_area PARAMS ((tree, v850_data_area));
55 const struct attribute_spec v850_attribute_table[];
56 static tree v850_handle_interrupt_attribute PARAMS ((tree *, tree, tree, int, bool *));
57 static tree v850_handle_data_area_attribute PARAMS ((tree *, tree, tree, int, bool *));
58 static void v850_insert_attributes PARAMS ((tree, tree *));
59 static void v850_select_section PARAMS ((tree, int, unsigned HOST_WIDE_INT));
60 static void v850_encode_data_area PARAMS ((tree));
61 static void v850_encode_section_info PARAMS ((tree, int));
62 static const char *v850_strip_name_encoding PARAMS ((const char *));
63
64 /* Information about the various small memory areas. */
65 struct small_memory_info small_memory[ (int)SMALL_MEMORY_max ] =
66 {
67 /* name value max physical max */
68 { "tda", (char *)0, 0, 256 },
69 { "sda", (char *)0, 0, 65536 },
70 { "zda", (char *)0, 0, 32768 },
71 };
72
73 /* Names of the various data areas used on the v850. */
74 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
75 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
76
77 /* Track the current data area set by the data area pragma (which
78 can be nested). Tested by check_default_data_area. */
79 data_area_stack_element * data_area_stack = NULL;
80
81 /* True if we don't need to check any more if the current
82 function is an interrupt handler. */
83 static int v850_interrupt_cache_p = FALSE;
84
85 /* Whether current function is an interrupt handler. */
86 static int v850_interrupt_p = FALSE;
87
88 /* Initialize the GCC target structure. */
89 #undef TARGET_ASM_ALIGNED_HI_OP
90 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
91
92 #undef TARGET_ATTRIBUTE_TABLE
93 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
94
95 #undef TARGET_INSERT_ATTRIBUTES
96 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
97
98 #undef TARGET_ASM_SELECT_SECTION
99 #define TARGET_ASM_SELECT_SECTION v850_select_section
100
101 #undef TARGET_ENCODE_SECTION_INFO
102 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
103 #undef TARGET_STRIP_NAME_ENCODING
104 #define TARGET_STRIP_NAME_ENCODING v850_strip_name_encoding
105
106 struct gcc_target targetm = TARGET_INITIALIZER;
107
108 /* Sometimes certain combinations of command options do not make
109 sense on a particular target machine. You can define a macro
110 `OVERRIDE_OPTIONS' to take account of this. This macro, if
111 defined, is executed once just after all the command options have
112 been parsed.
113
114 Don't use this macro to turn on various extra optimizations for
115 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
116
117 void
override_options()118 override_options ()
119 {
120 int i;
121 extern int atoi PARAMS ((const char *));
122
123 /* Parse -m{s,t,z}da=nnn switches */
124 for (i = 0; i < (int)SMALL_MEMORY_max; i++)
125 {
126 if (small_memory[i].value)
127 {
128 if (!ISDIGIT (*small_memory[i].value))
129 error ("%s=%s is not numeric",
130 small_memory[i].name,
131 small_memory[i].value);
132 else
133 {
134 small_memory[i].max = atoi (small_memory[i].value);
135 if (small_memory[i].max > small_memory[i].physical_max)
136 error ("%s=%s is too large",
137 small_memory[i].name,
138 small_memory[i].value);
139 }
140 }
141 }
142
143 /* Make sure that the US_BIT_SET mask has been correctly initialized. */
144 if ((target_flags & MASK_US_MASK_SET) == 0)
145 {
146 target_flags |= MASK_US_MASK_SET;
147 target_flags &= ~MASK_US_BIT_SET;
148 }
149 }
150
151
152 /* Output assembly code for the start of the file. */
153
154 void
asm_file_start(file)155 asm_file_start (file)
156 FILE *file;
157 {
158 output_file_directive (file, main_input_filename);
159 }
160
161
162 /* Return an RTX to represent where a value with mode MODE will be returned
163 from a function. If the result is 0, the argument is pushed. */
164
165 rtx
function_arg(cum,mode,type,named)166 function_arg (cum, mode, type, named)
167 CUMULATIVE_ARGS *cum;
168 enum machine_mode mode;
169 tree type;
170 int named;
171 {
172 rtx result = 0;
173 int size, align;
174
175 if (TARGET_GHS && !named)
176 return NULL_RTX;
177
178 if (mode == BLKmode)
179 size = int_size_in_bytes (type);
180 else
181 size = GET_MODE_SIZE (mode);
182
183 if (size < 1)
184 return 0;
185
186 if (type)
187 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
188 else
189 align = size;
190
191 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
192
193 if (cum->nbytes > 4 * UNITS_PER_WORD)
194 return 0;
195
196 if (type == NULL_TREE
197 && cum->nbytes + size > 4 * UNITS_PER_WORD)
198 return 0;
199
200 switch (cum->nbytes / UNITS_PER_WORD)
201 {
202 case 0:
203 result = gen_rtx_REG (mode, 6);
204 break;
205 case 1:
206 result = gen_rtx_REG (mode, 7);
207 break;
208 case 2:
209 result = gen_rtx_REG (mode, 8);
210 break;
211 case 3:
212 result = gen_rtx_REG (mode, 9);
213 break;
214 default:
215 result = 0;
216 }
217
218 return result;
219 }
220
221
222 /* Return the number of words which must be put into registers
223 for values which are part in registers and part in memory. */
224
225 int
function_arg_partial_nregs(cum,mode,type,named)226 function_arg_partial_nregs (cum, mode, type, named)
227 CUMULATIVE_ARGS *cum;
228 enum machine_mode mode;
229 tree type;
230 int named;
231 {
232 int size, align;
233
234 if (TARGET_GHS && !named)
235 return 0;
236
237 if (mode == BLKmode)
238 size = int_size_in_bytes (type);
239 else
240 size = GET_MODE_SIZE (mode);
241
242 if (type)
243 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
244 else
245 align = size;
246
247 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
248
249 if (cum->nbytes > 4 * UNITS_PER_WORD)
250 return 0;
251
252 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
253 return 0;
254
255 if (type == NULL_TREE
256 && cum->nbytes + size > 4 * UNITS_PER_WORD)
257 return 0;
258
259 return (4 * UNITS_PER_WORD - cum->nbytes) / UNITS_PER_WORD;
260 }
261
262
263 /* Return the high and low words of a CONST_DOUBLE */
264
265 static void
const_double_split(x,p_high,p_low)266 const_double_split (x, p_high, p_low)
267 rtx x;
268 HOST_WIDE_INT *p_high;
269 HOST_WIDE_INT *p_low;
270 {
271 if (GET_CODE (x) == CONST_DOUBLE)
272 {
273 long t[2];
274 REAL_VALUE_TYPE rv;
275
276 switch (GET_MODE (x))
277 {
278 case DFmode:
279 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
280 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
281 *p_high = t[1]; /* since v850 is little endian */
282 *p_low = t[0]; /* high is second word */
283 return;
284
285 case SFmode:
286 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
287 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
288 *p_low = 0;
289 return;
290
291 case VOIDmode:
292 case DImode:
293 *p_high = CONST_DOUBLE_HIGH (x);
294 *p_low = CONST_DOUBLE_LOW (x);
295 return;
296
297 default:
298 break;
299 }
300 }
301
302 fatal_insn ("const_double_split got a bad insn:", x);
303 }
304
305
306 /* Return the cost of the rtx R with code CODE. */
307
308 static int
const_costs_int(value,zero_cost)309 const_costs_int (value, zero_cost)
310 HOST_WIDE_INT value;
311 int zero_cost;
312 {
313 if (CONST_OK_FOR_I (value))
314 return zero_cost;
315 else if (CONST_OK_FOR_J (value))
316 return 1;
317 else if (CONST_OK_FOR_K (value))
318 return 2;
319 else
320 return 4;
321 }
322
323 int
const_costs(r,c)324 const_costs (r, c)
325 rtx r;
326 enum rtx_code c;
327 {
328 HOST_WIDE_INT high, low;
329
330 switch (c)
331 {
332 case CONST_INT:
333 return const_costs_int (INTVAL (r), 0);
334
335 case CONST_DOUBLE:
336 const_double_split (r, &high, &low);
337 if (GET_MODE (r) == SFmode)
338 return const_costs_int (high, 1);
339 else
340 return const_costs_int (high, 1) + const_costs_int (low, 1);
341
342 case SYMBOL_REF:
343 case LABEL_REF:
344 case CONST:
345 return 2;
346
347 case HIGH:
348 return 1;
349
350 default:
351 return 4;
352 }
353 }
354
355
356 /* Print operand X using operand code CODE to assembly language output file
357 FILE. */
358
359 void
print_operand(file,x,code)360 print_operand (file, x, code)
361 FILE *file;
362 rtx x;
363 int code;
364 {
365 HOST_WIDE_INT high, low;
366
367 switch (code)
368 {
369 case 'c':
370 /* We use 'c' operands with symbols for .vtinherit */
371 if (GET_CODE (x) == SYMBOL_REF)
372 {
373 output_addr_const(file, x);
374 break;
375 }
376 /* fall through */
377 case 'b':
378 case 'B':
379 case 'C':
380 switch ((code == 'B' || code == 'C')
381 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
382 {
383 case NE:
384 if (code == 'c' || code == 'C')
385 fprintf (file, "nz");
386 else
387 fprintf (file, "ne");
388 break;
389 case EQ:
390 if (code == 'c' || code == 'C')
391 fprintf (file, "z");
392 else
393 fprintf (file, "e");
394 break;
395 case GE:
396 fprintf (file, "ge");
397 break;
398 case GT:
399 fprintf (file, "gt");
400 break;
401 case LE:
402 fprintf (file, "le");
403 break;
404 case LT:
405 fprintf (file, "lt");
406 break;
407 case GEU:
408 fprintf (file, "nl");
409 break;
410 case GTU:
411 fprintf (file, "h");
412 break;
413 case LEU:
414 fprintf (file, "nh");
415 break;
416 case LTU:
417 fprintf (file, "l");
418 break;
419 default:
420 abort ();
421 }
422 break;
423 case 'F': /* high word of CONST_DOUBLE */
424 if (GET_CODE (x) == CONST_INT)
425 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
426 else if (GET_CODE (x) == CONST_DOUBLE)
427 {
428 const_double_split (x, &high, &low);
429 fprintf (file, "%ld", (long) high);
430 }
431 else
432 abort ();
433 break;
434 case 'G': /* low word of CONST_DOUBLE */
435 if (GET_CODE (x) == CONST_INT)
436 fprintf (file, "%ld", (long) INTVAL (x));
437 else if (GET_CODE (x) == CONST_DOUBLE)
438 {
439 const_double_split (x, &high, &low);
440 fprintf (file, "%ld", (long) low);
441 }
442 else
443 abort ();
444 break;
445 case 'L':
446 fprintf (file, "%d\n", INTVAL (x) & 0xffff);
447 break;
448 case 'M':
449 fprintf (file, "%d", exact_log2 (INTVAL (x)));
450 break;
451 case 'O':
452 if (special_symbolref_operand (x, VOIDmode))
453 {
454 const char *name;
455
456 if (GET_CODE (x) == SYMBOL_REF)
457 name = XSTR (x, 0);
458 else if (GET_CODE (x) == CONST)
459 name = XSTR (XEXP (XEXP (x, 0), 0), 0);
460 else
461 abort ();
462
463 if (ZDA_NAME_P (name))
464 fprintf (file, "zdaoff");
465 else if (SDA_NAME_P (name))
466 fprintf (file, "sdaoff");
467 else if (TDA_NAME_P (name))
468 fprintf (file, "tdaoff");
469 else
470 abort ();
471 }
472 else
473 abort ();
474 break;
475 case 'P':
476 if (special_symbolref_operand (x, VOIDmode))
477 output_addr_const (file, x);
478 else
479 abort ();
480 break;
481 case 'Q':
482 if (special_symbolref_operand (x, VOIDmode))
483 {
484 const char *name;
485
486 if (GET_CODE (x) == SYMBOL_REF)
487 name = XSTR (x, 0);
488 else if (GET_CODE (x) == CONST)
489 name = XSTR (XEXP (XEXP (x, 0), 0), 0);
490 else
491 abort ();
492
493 if (ZDA_NAME_P (name))
494 fprintf (file, "r0");
495 else if (SDA_NAME_P (name))
496 fprintf (file, "gp");
497 else if (TDA_NAME_P (name))
498 fprintf (file, "ep");
499 else
500 abort ();
501 }
502 else
503 abort ();
504 break;
505 case 'R': /* 2nd word of a double. */
506 switch (GET_CODE (x))
507 {
508 case REG:
509 fprintf (file, reg_names[REGNO (x) + 1]);
510 break;
511 case MEM:
512 x = XEXP (adjust_address (x, SImode, 4), 0);
513 print_operand_address (file, x);
514 if (GET_CODE (x) == CONST_INT)
515 fprintf (file, "[r0]");
516 break;
517
518 default:
519 break;
520 }
521 break;
522 case 'S':
523 {
524 /* if it's a reference to a TDA variable, use sst/sld vs. st/ld */
525 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
526 fputs ("s", file);
527
528 break;
529 }
530 case 'T':
531 {
532 /* Like an 'S' operand above, but for unsigned loads only. */
533 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
534 fputs ("s", file);
535
536 break;
537 }
538 case 'W': /* print the instruction suffix */
539 switch (GET_MODE (x))
540 {
541 default:
542 abort ();
543
544 case QImode: fputs (".b", file); break;
545 case HImode: fputs (".h", file); break;
546 case SImode: fputs (".w", file); break;
547 case SFmode: fputs (".w", file); break;
548 }
549 break;
550 case '.': /* register r0 */
551 fputs (reg_names[0], file);
552 break;
553 case 'z': /* reg or zero */
554 if (x == const0_rtx)
555 fputs (reg_names[0], file);
556 else if (GET_CODE (x) == REG)
557 fputs (reg_names[REGNO (x)], file);
558 else
559 abort ();
560 break;
561 default:
562 switch (GET_CODE (x))
563 {
564 case MEM:
565 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
566 output_address (gen_rtx_PLUS (SImode, gen_rtx (REG, SImode, 0),
567 XEXP (x, 0)));
568 else
569 output_address (XEXP (x, 0));
570 break;
571
572 case REG:
573 fputs (reg_names[REGNO (x)], file);
574 break;
575 case SUBREG:
576 fputs (reg_names[subreg_regno (x)], file);
577 break;
578 case CONST_INT:
579 case SYMBOL_REF:
580 case CONST:
581 case LABEL_REF:
582 case CODE_LABEL:
583 print_operand_address (file, x);
584 break;
585 default:
586 abort ();
587 }
588 break;
589
590 }
591 }
592
593
594 /* Output assembly language output for the address ADDR to FILE. */
595
596 void
print_operand_address(file,addr)597 print_operand_address (file, addr)
598 FILE *file;
599 rtx addr;
600 {
601 switch (GET_CODE (addr))
602 {
603 case REG:
604 fprintf (file, "0[");
605 print_operand (file, addr, 0);
606 fprintf (file, "]");
607 break;
608 case LO_SUM:
609 if (GET_CODE (XEXP (addr, 0)) == REG)
610 {
611 /* reg,foo */
612 fprintf (file, "lo(");
613 print_operand (file, XEXP (addr, 1), 0);
614 fprintf (file, ")[");
615 print_operand (file, XEXP (addr, 0), 0);
616 fprintf (file, "]");
617 }
618 break;
619 case PLUS:
620 if (GET_CODE (XEXP (addr, 0)) == REG
621 || GET_CODE (XEXP (addr, 0)) == SUBREG)
622 {
623 /* reg,foo */
624 print_operand (file, XEXP (addr, 1), 0);
625 fprintf (file, "[");
626 print_operand (file, XEXP (addr, 0), 0);
627 fprintf (file, "]");
628 }
629 else
630 {
631 print_operand (file, XEXP (addr, 0), 0);
632 fprintf (file, "+");
633 print_operand (file, XEXP (addr, 1), 0);
634 }
635 break;
636 case SYMBOL_REF:
637 if (ENCODED_NAME_P (XSTR (addr, 0)))
638 {
639 const char *name = XSTR (addr, 0);
640 const char *off_name;
641 const char *reg_name;
642
643 if (ZDA_NAME_P (name))
644 {
645 off_name = "zdaoff";
646 reg_name = "r0";
647 }
648 else if (SDA_NAME_P (name))
649 {
650 off_name = "sdaoff";
651 reg_name = "gp";
652 }
653 else if (TDA_NAME_P (name))
654 {
655 off_name = "tdaoff";
656 reg_name = "ep";
657 }
658 else
659 abort ();
660
661 fprintf (file, "%s(", off_name);
662 output_addr_const (file, addr);
663 fprintf (file, ")[%s]", reg_name);
664 }
665 else
666 output_addr_const (file, addr);
667 break;
668 case CONST:
669 if (special_symbolref_operand (addr, VOIDmode))
670 {
671 const char *name = XSTR (XEXP (XEXP (addr, 0), 0), 0);
672 const char *off_name;
673 const char *reg_name;
674
675 if (ZDA_NAME_P (name))
676 {
677 off_name = "zdaoff";
678 reg_name = "r0";
679 }
680 else if (SDA_NAME_P (name))
681 {
682 off_name = "sdaoff";
683 reg_name = "gp";
684 }
685 else if (TDA_NAME_P (name))
686 {
687 off_name = "tdaoff";
688 reg_name = "ep";
689 }
690 else
691 abort ();
692
693 fprintf (file, "%s(", off_name);
694 output_addr_const (file, addr);
695 fprintf (file, ")[%s]", reg_name);
696 }
697 else
698 output_addr_const (file, addr);
699 break;
700 default:
701 output_addr_const (file, addr);
702 break;
703 }
704 }
705
706 /* When assemble_integer is used to emit the offsets for a switch
707 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
708 output_addr_const will normally barf at this, but it is OK to omit
709 the truncate and just emit the difference of the two labels. The
710 .hword directive will automatically handle the truncation for us.
711
712 Returns 1 if rtx was handled, 0 otherwise. */
713
714 int
v850_output_addr_const_extra(file,x)715 v850_output_addr_const_extra (file, x)
716 FILE * file;
717 rtx x;
718 {
719 if (GET_CODE (x) != TRUNCATE)
720 return 0;
721
722 x = XEXP (x, 0);
723
724 /* We must also handle the case where the switch table was passed a
725 constant value and so has been collapsed. In this case the first
726 label will have been deleted. In such a case it is OK to emit
727 nothing, since the table will not be used.
728 (cf gcc.c-torture/compile/990801-1.c). */
729 if (GET_CODE (x) == MINUS
730 && GET_CODE (XEXP (x, 0)) == LABEL_REF
731 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
732 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
733 return 1;
734
735 output_addr_const (file, x);
736 return 1;
737 }
738
739 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
740 point value. */
741
742 const char *
output_move_single(operands)743 output_move_single (operands)
744 rtx *operands;
745 {
746 rtx dst = operands[0];
747 rtx src = operands[1];
748
749 if (REG_P (dst))
750 {
751 if (REG_P (src))
752 return "mov %1,%0";
753
754 else if (GET_CODE (src) == CONST_INT)
755 {
756 HOST_WIDE_INT value = INTVAL (src);
757
758 if (CONST_OK_FOR_J (value)) /* Signed 5 bit immediate. */
759 return "mov %1,%0";
760
761 else if (CONST_OK_FOR_K (value)) /* Signed 16 bit immediate. */
762 return "movea lo(%1),%.,%0";
763
764 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
765 return "movhi hi(%1),%.,%0";
766
767 /* A random constant. */
768 else if (TARGET_V850E)
769 return "mov %1,%0";
770 else
771 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
772 }
773
774 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
775 {
776 HOST_WIDE_INT high, low;
777
778 const_double_split (src, &high, &low);
779
780 if (CONST_OK_FOR_J (high)) /* Signed 5 bit immediate. */
781 return "mov %F1,%0";
782
783 else if (CONST_OK_FOR_K (high)) /* Signed 16 bit immediate. */
784 return "movea lo(%F1),%.,%0";
785
786 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
787 return "movhi hi(%F1),%.,%0";
788
789 /* A random constant. */
790 else if (TARGET_V850E)
791 return "mov %F1,%0";
792
793 else
794 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
795 }
796
797 else if (GET_CODE (src) == MEM)
798 return "%S1ld%W1 %1,%0";
799
800 else if (special_symbolref_operand (src, VOIDmode))
801 return "movea %O1(%P1),%Q1,%0";
802
803 else if (GET_CODE (src) == LABEL_REF
804 || GET_CODE (src) == SYMBOL_REF
805 || GET_CODE (src) == CONST)
806 {
807 if (TARGET_V850E)
808 return "mov hilo(%1),%0";
809 else
810 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
811 }
812
813 else if (GET_CODE (src) == HIGH)
814 return "movhi hi(%1),%.,%0";
815
816 else if (GET_CODE (src) == LO_SUM)
817 {
818 operands[2] = XEXP (src, 0);
819 operands[3] = XEXP (src, 1);
820 return "movea lo(%3),%2,%0";
821 }
822 }
823
824 else if (GET_CODE (dst) == MEM)
825 {
826 if (REG_P (src))
827 return "%S0st%W0 %1,%0";
828
829 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
830 return "%S0st%W0 %.,%0";
831
832 else if (GET_CODE (src) == CONST_DOUBLE
833 && CONST0_RTX (GET_MODE (dst)) == src)
834 return "%S0st%W0 %.,%0";
835 }
836
837 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
838 return "";
839 }
840
841
842 /* Return appropriate code to load up an 8 byte integer or
843 floating point value */
844
845 const char *
output_move_double(operands)846 output_move_double (operands)
847 rtx *operands;
848 {
849 enum machine_mode mode = GET_MODE (operands[0]);
850 rtx dst = operands[0];
851 rtx src = operands[1];
852
853 if (register_operand (dst, mode)
854 && register_operand (src, mode))
855 {
856 if (REGNO (src) + 1 == REGNO (dst))
857 return "mov %R1,%R0\n\tmov %1,%0";
858 else
859 return "mov %1,%0\n\tmov %R1,%R0";
860 }
861
862 /* Storing 0 */
863 if (GET_CODE (dst) == MEM
864 && ((GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
865 || (GET_CODE (src) == CONST_DOUBLE && CONST_DOUBLE_OK_FOR_G (src))))
866 return "st.w %.,%0\n\tst.w %.,%R0";
867
868 if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE)
869 {
870 HOST_WIDE_INT high_low[2];
871 int i;
872 rtx xop[10];
873
874 if (GET_CODE (src) == CONST_DOUBLE)
875 const_double_split (src, &high_low[1], &high_low[0]);
876 else
877 {
878 high_low[0] = INTVAL (src);
879 high_low[1] = (INTVAL (src) >= 0) ? 0 : -1;
880 }
881
882 for (i = 0; i < 2; i++)
883 {
884 xop[0] = gen_rtx_REG (SImode, REGNO (dst)+i);
885 xop[1] = GEN_INT (high_low[i]);
886 output_asm_insn (output_move_single (xop), xop);
887 }
888
889 return "";
890 }
891
892 if (GET_CODE (src) == MEM)
893 {
894 int ptrreg = -1;
895 int dreg = REGNO (dst);
896 rtx inside = XEXP (src, 0);
897
898 if (GET_CODE (inside) == REG)
899 ptrreg = REGNO (inside);
900 else if (GET_CODE (inside) == SUBREG)
901 ptrreg = subreg_regno (inside);
902 else if (GET_CODE (inside) == PLUS)
903 ptrreg = REGNO (XEXP (inside, 0));
904 else if (GET_CODE (inside) == LO_SUM)
905 ptrreg = REGNO (XEXP (inside, 0));
906
907 if (dreg == ptrreg)
908 return "ld.w %R1,%R0\n\tld.w %1,%0";
909 }
910
911 if (GET_CODE (src) == MEM)
912 return "ld.w %1,%0\n\tld.w %R1,%R0";
913
914 if (GET_CODE (dst) == MEM)
915 return "st.w %1,%0\n\tst.w %R1,%R0";
916
917 return "mov %1,%0\n\tmov %R1,%R0";
918 }
919
920
921 /* Return maximum offset supported for a short EP memory reference of mode
922 MODE and signedness UNSIGNEDP. */
923
924 static int
ep_memory_offset(mode,unsignedp)925 ep_memory_offset (mode, unsignedp)
926 enum machine_mode mode;
927 int ATTRIBUTE_UNUSED unsignedp;
928 {
929 int max_offset = 0;
930
931 switch (mode)
932 {
933 case QImode:
934 if (TARGET_SMALL_SLD)
935 max_offset = (1 << 4);
936 else if (TARGET_V850E
937 && ( ( unsignedp && ! TARGET_US_BIT_SET)
938 || (! unsignedp && TARGET_US_BIT_SET)))
939 max_offset = (1 << 4);
940 else
941 max_offset = (1 << 7);
942 break;
943
944 case HImode:
945 if (TARGET_SMALL_SLD)
946 max_offset = (1 << 5);
947 else if (TARGET_V850E
948 && ( ( unsignedp && ! TARGET_US_BIT_SET)
949 || (! unsignedp && TARGET_US_BIT_SET)))
950 max_offset = (1 << 5);
951 else
952 max_offset = (1 << 8);
953 break;
954
955 case SImode:
956 case SFmode:
957 max_offset = (1 << 8);
958 break;
959
960 default:
961 break;
962 }
963
964 return max_offset;
965 }
966
967 /* Return true if OP is a valid short EP memory reference */
968
969 int
ep_memory_operand(op,mode,unsigned_load)970 ep_memory_operand (op, mode, unsigned_load)
971 rtx op;
972 enum machine_mode mode;
973 int unsigned_load;
974 {
975 rtx addr, op0, op1;
976 int max_offset;
977 int mask;
978
979 if (GET_CODE (op) != MEM)
980 return FALSE;
981
982 max_offset = ep_memory_offset (mode, unsigned_load);
983
984 mask = GET_MODE_SIZE (mode) - 1;
985
986 addr = XEXP (op, 0);
987 if (GET_CODE (addr) == CONST)
988 addr = XEXP (addr, 0);
989
990 switch (GET_CODE (addr))
991 {
992 default:
993 break;
994
995 case SYMBOL_REF:
996 return TDA_NAME_P (XSTR (addr, 0));
997
998 case REG:
999 return REGNO (addr) == EP_REGNUM;
1000
1001 case PLUS:
1002 op0 = XEXP (addr, 0);
1003 op1 = XEXP (addr, 1);
1004 if (GET_CODE (op1) == CONST_INT
1005 && INTVAL (op1) < max_offset
1006 && INTVAL (op1) >= 0
1007 && (INTVAL (op1) & mask) == 0)
1008 {
1009 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1010 return TRUE;
1011
1012 if (GET_CODE (op0) == SYMBOL_REF && TDA_NAME_P (XSTR (op0, 0)))
1013 return TRUE;
1014 }
1015 break;
1016 }
1017
1018 return FALSE;
1019 }
1020
1021 /* Return true if OP is either a register or 0 */
1022
1023 int
reg_or_0_operand(op,mode)1024 reg_or_0_operand (op, mode)
1025 rtx op;
1026 enum machine_mode mode;
1027 {
1028 if (GET_CODE (op) == CONST_INT)
1029 return INTVAL (op) == 0;
1030
1031 else if (GET_CODE (op) == CONST_DOUBLE)
1032 return CONST_DOUBLE_OK_FOR_G (op);
1033
1034 else
1035 return register_operand (op, mode);
1036 }
1037
1038 /* Return true if OP is either a register or a signed five bit integer */
1039
1040 int
reg_or_int5_operand(op,mode)1041 reg_or_int5_operand (op, mode)
1042 rtx op;
1043 enum machine_mode mode;
1044 {
1045 if (GET_CODE (op) == CONST_INT)
1046 return CONST_OK_FOR_J (INTVAL (op));
1047
1048 else
1049 return register_operand (op, mode);
1050 }
1051
1052 /* Return true if OP is either a register or a signed nine bit integer. */
1053
1054 int
reg_or_int9_operand(op,mode)1055 reg_or_int9_operand (op, mode)
1056 rtx op;
1057 enum machine_mode mode;
1058 {
1059 if (GET_CODE (op) == CONST_INT)
1060 return CONST_OK_FOR_O (INTVAL (op));
1061
1062 return register_operand (op, mode);
1063 }
1064
1065 /* Return true if OP is either a register or a const integer. */
1066
1067 int
reg_or_const_operand(op,mode)1068 reg_or_const_operand (op, mode)
1069 rtx op;
1070 enum machine_mode mode;
1071 {
1072 if (GET_CODE (op) == CONST_INT)
1073 return TRUE;
1074
1075 return register_operand (op, mode);
1076 }
1077
1078 /* Return true if OP is a valid call operand. */
1079
1080 int
call_address_operand(op,mode)1081 call_address_operand (op, mode)
1082 rtx op;
1083 enum machine_mode ATTRIBUTE_UNUSED mode;
1084 {
1085 /* Only registers are valid call operands if TARGET_LONG_CALLS. */
1086 if (TARGET_LONG_CALLS)
1087 return GET_CODE (op) == REG;
1088 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == REG);
1089 }
1090
1091 int
special_symbolref_operand(op,mode)1092 special_symbolref_operand (op, mode)
1093 rtx op;
1094 enum machine_mode ATTRIBUTE_UNUSED mode;
1095 {
1096 if (GET_CODE (op) == SYMBOL_REF)
1097 return ENCODED_NAME_P (XSTR (op, 0));
1098
1099 else if (GET_CODE (op) == CONST)
1100 return (GET_CODE (XEXP (op, 0)) == PLUS
1101 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
1102 && ENCODED_NAME_P (XSTR (XEXP (XEXP (op, 0), 0), 0))
1103 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
1104 && CONST_OK_FOR_K (INTVAL (XEXP (XEXP (op, 0), 1))));
1105
1106 return FALSE;
1107 }
1108
1109 int
movsi_source_operand(op,mode)1110 movsi_source_operand (op, mode)
1111 rtx op;
1112 enum machine_mode mode;
1113 {
1114 /* Some constants, as well as symbolic operands
1115 must be done with HIGH & LO_SUM patterns. */
1116 if (CONSTANT_P (op)
1117 && GET_CODE (op) != HIGH
1118 && GET_CODE (op) != CONSTANT_P_RTX
1119 && !(GET_CODE (op) == CONST_INT
1120 && (CONST_OK_FOR_J (INTVAL (op))
1121 || CONST_OK_FOR_K (INTVAL (op))
1122 || CONST_OK_FOR_L (INTVAL (op)))))
1123 return special_symbolref_operand (op, mode);
1124 else
1125 return general_operand (op, mode);
1126 }
1127
1128 int
power_of_two_operand(op,mode)1129 power_of_two_operand (op, mode)
1130 rtx op;
1131 enum machine_mode ATTRIBUTE_UNUSED mode;
1132 {
1133 if (GET_CODE (op) != CONST_INT)
1134 return 0;
1135
1136 if (exact_log2 (INTVAL (op)) == -1)
1137 return 0;
1138 return 1;
1139 }
1140
1141 int
not_power_of_two_operand(op,mode)1142 not_power_of_two_operand (op, mode)
1143 rtx op;
1144 enum machine_mode mode;
1145 {
1146 unsigned int mask;
1147
1148 if (mode == QImode)
1149 mask = 0xff;
1150 else if (mode == HImode)
1151 mask = 0xffff;
1152 else if (mode == SImode)
1153 mask = 0xffffffff;
1154 else
1155 return 0;
1156
1157 if (GET_CODE (op) != CONST_INT)
1158 return 0;
1159
1160 if (exact_log2 (~INTVAL (op) & mask) == -1)
1161 return 0;
1162 return 1;
1163 }
1164
1165
1166 /* Substitute memory references involving a pointer, to use the ep pointer,
1167 taking care to save and preserve the ep. */
1168
1169 static void
substitute_ep_register(first_insn,last_insn,uses,regno,p_r1,p_ep)1170 substitute_ep_register (first_insn, last_insn, uses, regno, p_r1, p_ep)
1171 rtx first_insn;
1172 rtx last_insn;
1173 int uses;
1174 int regno;
1175 rtx *p_r1;
1176 rtx *p_ep;
1177 {
1178 rtx reg = gen_rtx_REG (Pmode, regno);
1179 rtx insn;
1180
1181 if (!*p_r1)
1182 {
1183 regs_ever_live[1] = 1;
1184 *p_r1 = gen_rtx_REG (Pmode, 1);
1185 *p_ep = gen_rtx_REG (Pmode, 30);
1186 }
1187
1188 if (TARGET_DEBUG)
1189 fprintf (stderr, "\
1190 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1191 2 * (uses - 3), uses, reg_names[regno],
1192 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1193 INSN_UID (first_insn), INSN_UID (last_insn));
1194
1195 if (GET_CODE (first_insn) == NOTE)
1196 first_insn = next_nonnote_insn (first_insn);
1197
1198 last_insn = next_nonnote_insn (last_insn);
1199 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1200 {
1201 if (GET_CODE (insn) == INSN)
1202 {
1203 rtx pattern = single_set (insn);
1204
1205 /* Replace the memory references. */
1206 if (pattern)
1207 {
1208 rtx *p_mem;
1209 /* Memory operands are signed by default. */
1210 int unsignedp = FALSE;
1211
1212 if (GET_CODE (SET_DEST (pattern)) == MEM
1213 && GET_CODE (SET_SRC (pattern)) == MEM)
1214 p_mem = (rtx *)0;
1215
1216 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1217 p_mem = &SET_DEST (pattern);
1218
1219 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1220 p_mem = &SET_SRC (pattern);
1221
1222 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1223 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1224 p_mem = &XEXP (SET_SRC (pattern), 0);
1225
1226 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1227 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1228 {
1229 p_mem = &XEXP (SET_SRC (pattern), 0);
1230 unsignedp = TRUE;
1231 }
1232 else
1233 p_mem = (rtx *)0;
1234
1235 if (p_mem)
1236 {
1237 rtx addr = XEXP (*p_mem, 0);
1238
1239 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1240 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1241
1242 else if (GET_CODE (addr) == PLUS
1243 && GET_CODE (XEXP (addr, 0)) == REG
1244 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1245 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1246 && ((INTVAL (XEXP (addr, 1)))
1247 < ep_memory_offset (GET_MODE (*p_mem),
1248 unsignedp))
1249 && ((INTVAL (XEXP (addr, 1))) >= 0))
1250 *p_mem = change_address (*p_mem, VOIDmode,
1251 gen_rtx_PLUS (Pmode,
1252 *p_ep,
1253 XEXP (addr, 1)));
1254 }
1255 }
1256 }
1257 }
1258
1259 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1260 insn = prev_nonnote_insn (first_insn);
1261 if (insn && GET_CODE (insn) == INSN
1262 && GET_CODE (PATTERN (insn)) == SET
1263 && SET_DEST (PATTERN (insn)) == *p_ep
1264 && SET_SRC (PATTERN (insn)) == *p_r1)
1265 delete_insn (insn);
1266 else
1267 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1268
1269 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1270 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1271 }
1272
1273
1274 /* In rare cases, correct code generation requires extra machine
1275 dependent processing between the second jump optimization pass and
1276 delayed branch scheduling. On those machines, define this macro
1277 as a C statement to act on the code starting at INSN.
1278
1279 On the 850, we use it to implement the -mep mode to copy heavily used
1280 pointers to ep to use the implicit addressing. */
1281
v850_reorg(start_insn)1282 void v850_reorg (start_insn)
1283 rtx start_insn;
1284 {
1285 struct
1286 {
1287 int uses;
1288 rtx first_insn;
1289 rtx last_insn;
1290 }
1291 regs[FIRST_PSEUDO_REGISTER];
1292
1293 int i;
1294 int use_ep = FALSE;
1295 rtx r1 = NULL_RTX;
1296 rtx ep = NULL_RTX;
1297 rtx insn;
1298 rtx pattern;
1299
1300 /* If not ep mode, just return now. */
1301 if (!TARGET_EP)
1302 return;
1303
1304 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1305 {
1306 regs[i].uses = 0;
1307 regs[i].first_insn = NULL_RTX;
1308 regs[i].last_insn = NULL_RTX;
1309 }
1310
1311 for (insn = start_insn; insn != NULL_RTX; insn = NEXT_INSN (insn))
1312 {
1313 switch (GET_CODE (insn))
1314 {
1315 /* End of basic block */
1316 default:
1317 if (!use_ep)
1318 {
1319 int max_uses = -1;
1320 int max_regno = -1;
1321
1322 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1323 {
1324 if (max_uses < regs[i].uses)
1325 {
1326 max_uses = regs[i].uses;
1327 max_regno = i;
1328 }
1329 }
1330
1331 if (max_uses > 3)
1332 substitute_ep_register (regs[max_regno].first_insn,
1333 regs[max_regno].last_insn,
1334 max_uses, max_regno, &r1, &ep);
1335 }
1336
1337 use_ep = FALSE;
1338 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1339 {
1340 regs[i].uses = 0;
1341 regs[i].first_insn = NULL_RTX;
1342 regs[i].last_insn = NULL_RTX;
1343 }
1344 break;
1345
1346 case NOTE:
1347 break;
1348
1349 case INSN:
1350 pattern = single_set (insn);
1351
1352 /* See if there are any memory references we can shorten */
1353 if (pattern)
1354 {
1355 rtx src = SET_SRC (pattern);
1356 rtx dest = SET_DEST (pattern);
1357 rtx mem;
1358 /* Memory operands are signed by default. */
1359 int unsignedp = FALSE;
1360
1361 /* We might have (SUBREG (MEM)) here, so just get rid of the
1362 subregs to make this code simpler. */
1363 if (GET_CODE (dest) == SUBREG
1364 && (GET_CODE (SUBREG_REG (dest)) == MEM
1365 || GET_CODE (SUBREG_REG (dest)) == REG))
1366 alter_subreg (&dest);
1367 if (GET_CODE (src) == SUBREG
1368 && (GET_CODE (SUBREG_REG (src)) == MEM
1369 || GET_CODE (SUBREG_REG (src)) == REG))
1370 alter_subreg (&src);
1371
1372 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1373 mem = NULL_RTX;
1374
1375 else if (GET_CODE (dest) == MEM)
1376 mem = dest;
1377
1378 else if (GET_CODE (src) == MEM)
1379 mem = src;
1380
1381 else if (GET_CODE (src) == SIGN_EXTEND
1382 && GET_CODE (XEXP (src, 0)) == MEM)
1383 mem = XEXP (src, 0);
1384
1385 else if (GET_CODE (src) == ZERO_EXTEND
1386 && GET_CODE (XEXP (src, 0)) == MEM)
1387 {
1388 mem = XEXP (src, 0);
1389 unsignedp = TRUE;
1390 }
1391 else
1392 mem = NULL_RTX;
1393
1394 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1395 use_ep = TRUE;
1396
1397 else if (!use_ep && mem
1398 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1399 {
1400 rtx addr = XEXP (mem, 0);
1401 int regno = -1;
1402 int short_p;
1403
1404 if (GET_CODE (addr) == REG)
1405 {
1406 short_p = TRUE;
1407 regno = REGNO (addr);
1408 }
1409
1410 else if (GET_CODE (addr) == PLUS
1411 && GET_CODE (XEXP (addr, 0)) == REG
1412 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1413 && ((INTVAL (XEXP (addr, 1)))
1414 < ep_memory_offset (GET_MODE (mem), unsignedp))
1415 && ((INTVAL (XEXP (addr, 1))) >= 0))
1416 {
1417 short_p = TRUE;
1418 regno = REGNO (XEXP (addr, 0));
1419 }
1420
1421 else
1422 short_p = FALSE;
1423
1424 if (short_p)
1425 {
1426 regs[regno].uses++;
1427 regs[regno].last_insn = insn;
1428 if (!regs[regno].first_insn)
1429 regs[regno].first_insn = insn;
1430 }
1431 }
1432
1433 /* Loading up a register in the basic block zaps any savings
1434 for the register */
1435 if (GET_CODE (dest) == REG)
1436 {
1437 enum machine_mode mode = GET_MODE (dest);
1438 int regno;
1439 int endregno;
1440
1441 regno = REGNO (dest);
1442 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1443
1444 if (!use_ep)
1445 {
1446 /* See if we can use the pointer before this
1447 modification. */
1448 int max_uses = -1;
1449 int max_regno = -1;
1450
1451 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1452 {
1453 if (max_uses < regs[i].uses)
1454 {
1455 max_uses = regs[i].uses;
1456 max_regno = i;
1457 }
1458 }
1459
1460 if (max_uses > 3
1461 && max_regno >= regno
1462 && max_regno < endregno)
1463 {
1464 substitute_ep_register (regs[max_regno].first_insn,
1465 regs[max_regno].last_insn,
1466 max_uses, max_regno, &r1,
1467 &ep);
1468
1469 /* Since we made a substitution, zap all remembered
1470 registers. */
1471 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1472 {
1473 regs[i].uses = 0;
1474 regs[i].first_insn = NULL_RTX;
1475 regs[i].last_insn = NULL_RTX;
1476 }
1477 }
1478 }
1479
1480 for (i = regno; i < endregno; i++)
1481 {
1482 regs[i].uses = 0;
1483 regs[i].first_insn = NULL_RTX;
1484 regs[i].last_insn = NULL_RTX;
1485 }
1486 }
1487 }
1488 }
1489 }
1490 }
1491
1492
1493 /* # of registers saved by the interrupt handler. */
1494 #define INTERRUPT_FIXED_NUM 4
1495
1496 /* # of bytes for registers saved by the interrupt handler. */
1497 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1498
1499 /* # of registers saved in register parameter area. */
1500 #define INTERRUPT_REGPARM_NUM 4
1501 /* # of words saved for other registers. */
1502 #define INTERRUPT_ALL_SAVE_NUM \
1503 (30 - INTERRUPT_FIXED_NUM + INTERRUPT_REGPARM_NUM)
1504
1505 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1506
1507 int
compute_register_save_size(p_reg_saved)1508 compute_register_save_size (p_reg_saved)
1509 long *p_reg_saved;
1510 {
1511 int size = 0;
1512 int i;
1513 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1514 int call_p = regs_ever_live [LINK_POINTER_REGNUM];
1515 long reg_saved = 0;
1516
1517 /* Count the return pointer if we need to save it. */
1518 if (current_function_profile && !call_p)
1519 regs_ever_live [LINK_POINTER_REGNUM] = call_p = 1;
1520
1521 /* Count space for the register saves. */
1522 if (interrupt_handler)
1523 {
1524 for (i = 0; i <= 31; i++)
1525 switch (i)
1526 {
1527 default:
1528 if (regs_ever_live[i] || call_p)
1529 {
1530 size += 4;
1531 reg_saved |= 1L << i;
1532 }
1533 break;
1534
1535 /* We don't save/restore r0 or the stack pointer */
1536 case 0:
1537 case STACK_POINTER_REGNUM:
1538 break;
1539
1540 /* For registers with fixed use, we save them, set them to the
1541 appropriate value, and then restore them.
1542 These registers are handled specially, so don't list them
1543 on the list of registers to save in the prologue. */
1544 case 1: /* temp used to hold ep */
1545 case 4: /* gp */
1546 case 10: /* temp used to call interrupt save/restore */
1547 case EP_REGNUM: /* ep */
1548 size += 4;
1549 break;
1550 }
1551 }
1552 else
1553 {
1554 /* Find the first register that needs to be saved. */
1555 for (i = 0; i <= 31; i++)
1556 if (regs_ever_live[i] && ((! call_used_regs[i])
1557 || i == LINK_POINTER_REGNUM))
1558 break;
1559
1560 /* If it is possible that an out-of-line helper function might be
1561 used to generate the prologue for the current function, then we
1562 need to cover the possibility that such a helper function will
1563 be used, despite the fact that there might be gaps in the list of
1564 registers that need to be saved. To detect this we note that the
1565 helper functions always push at least register r29 (provided
1566 that the function is not an interrupt handler). */
1567
1568 if (TARGET_PROLOG_FUNCTION
1569 && (i == 2 || ((i >= 20) && (i < 30))))
1570 {
1571 if (i == 2)
1572 {
1573 size += 4;
1574 reg_saved |= 1L << i;
1575
1576 i = 20;
1577 }
1578
1579 /* Helper functions save all registers between the starting
1580 register and the last register, regardless of whether they
1581 are actually used by the function or not. */
1582 for (; i <= 29; i++)
1583 {
1584 size += 4;
1585 reg_saved |= 1L << i;
1586 }
1587
1588 if (regs_ever_live [LINK_POINTER_REGNUM])
1589 {
1590 size += 4;
1591 reg_saved |= 1L << LINK_POINTER_REGNUM;
1592 }
1593 }
1594 else
1595 {
1596 for (; i <= 31; i++)
1597 if (regs_ever_live[i] && ((! call_used_regs[i])
1598 || i == LINK_POINTER_REGNUM))
1599 {
1600 size += 4;
1601 reg_saved |= 1L << i;
1602 }
1603 }
1604 }
1605
1606 if (p_reg_saved)
1607 *p_reg_saved = reg_saved;
1608
1609 return size;
1610 }
1611
1612 int
compute_frame_size(size,p_reg_saved)1613 compute_frame_size (size, p_reg_saved)
1614 int size;
1615 long *p_reg_saved;
1616 {
1617 return (size
1618 + compute_register_save_size (p_reg_saved)
1619 + current_function_outgoing_args_size);
1620 }
1621
1622
1623 void
expand_prologue()1624 expand_prologue ()
1625 {
1626 unsigned int i;
1627 int offset;
1628 unsigned int size = get_frame_size ();
1629 unsigned int actual_fsize;
1630 unsigned int init_stack_alloc = 0;
1631 rtx save_regs[32];
1632 rtx save_all;
1633 unsigned int num_save;
1634 unsigned int default_stack;
1635 int code;
1636 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1637 long reg_saved = 0;
1638
1639 actual_fsize = compute_frame_size (size, ®_saved);
1640
1641 /* Save/setup global registers for interrupt functions right now. */
1642 if (interrupt_handler)
1643 {
1644 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
1645 emit_insn (gen_callt_save_interrupt ());
1646 else
1647 emit_insn (gen_save_interrupt ());
1648
1649 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1650
1651 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1652 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1653 }
1654
1655 /* Save arg registers to the stack if necessary. */
1656 else if (current_function_args_info.anonymous_args)
1657 {
1658 if (TARGET_PROLOG_FUNCTION)
1659 {
1660 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
1661 emit_insn (gen_save_r6_r9_v850e ());
1662 else
1663 emit_insn (gen_save_r6_r9 ());
1664 }
1665 else
1666 {
1667 offset = 0;
1668 for (i = 6; i < 10; i++)
1669 {
1670 emit_move_insn (gen_rtx_MEM (SImode,
1671 plus_constant (stack_pointer_rtx,
1672 offset)),
1673 gen_rtx_REG (SImode, i));
1674 offset += 4;
1675 }
1676 }
1677 }
1678
1679 /* Identify all of the saved registers. */
1680 num_save = 0;
1681 default_stack = 0;
1682 for (i = 1; i < 31; i++)
1683 {
1684 if (((1L << i) & reg_saved) != 0)
1685 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1686 }
1687
1688 /* If the return pointer is saved, the helper functions also allocate
1689 16 bytes of stack for arguments to be saved in. */
1690 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1691 {
1692 save_regs[num_save++] = gen_rtx_REG (Pmode, LINK_POINTER_REGNUM);
1693 default_stack = 16;
1694 }
1695
1696 /* See if we have an insn that allocates stack space and saves the particular
1697 registers we want to. */
1698 save_all = NULL_RTX;
1699 if (TARGET_PROLOG_FUNCTION && num_save > 0 && actual_fsize >= default_stack)
1700 {
1701 int alloc_stack = (4 * num_save) + default_stack;
1702 int unalloc_stack = actual_fsize - alloc_stack;
1703 int save_func_len = 4;
1704 int save_normal_len;
1705
1706 if (unalloc_stack)
1707 save_func_len += CONST_OK_FOR_J (unalloc_stack) ? 2 : 4;
1708
1709 /* see if we would have used ep to save the stack */
1710 if (TARGET_EP && num_save > 3 && (unsigned)actual_fsize < 255)
1711 save_normal_len = (3 * 2) + (2 * num_save);
1712 else
1713 save_normal_len = 4 * num_save;
1714
1715 save_normal_len += CONST_OK_FOR_J (actual_fsize) ? 2 : 4;
1716
1717 /* Don't bother checking if we don't actually save any space.
1718 This happens for instance if one register is saved and additional
1719 stack space is allocated. */
1720 if (save_func_len < save_normal_len)
1721 {
1722 save_all = gen_rtx_PARALLEL
1723 (VOIDmode,
1724 rtvec_alloc (num_save + (TARGET_V850 ? 2 : 1)));
1725
1726 XVECEXP (save_all, 0, 0)
1727 = gen_rtx_SET (VOIDmode,
1728 stack_pointer_rtx,
1729 plus_constant (stack_pointer_rtx, -alloc_stack));
1730
1731 if (TARGET_V850)
1732 {
1733 XVECEXP (save_all, 0, num_save+1)
1734 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1735 }
1736
1737 offset = - default_stack;
1738 for (i = 0; i < num_save; i++)
1739 {
1740 XVECEXP (save_all, 0, i+1)
1741 = gen_rtx_SET (VOIDmode,
1742 gen_rtx_MEM (Pmode,
1743 plus_constant (stack_pointer_rtx,
1744 offset)),
1745 save_regs[i]);
1746 offset -= 4;
1747 }
1748
1749 code = recog (save_all, NULL_RTX, NULL);
1750 if (code >= 0)
1751 {
1752 rtx insn = emit_insn (save_all);
1753 INSN_CODE (insn) = code;
1754 actual_fsize -= alloc_stack;
1755
1756 if (TARGET_DEBUG)
1757 fprintf (stderr, "\
1758 Saved %d bytes via prologue function (%d vs. %d) for function %s\n",
1759 save_normal_len - save_func_len,
1760 save_normal_len, save_func_len,
1761 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)));
1762 }
1763 else
1764 save_all = NULL_RTX;
1765 }
1766 }
1767
1768 /* If no prolog save function is available, store the registers the old
1769 fashioned way (one by one). */
1770 if (!save_all)
1771 {
1772 /* Special case interrupt functions that save all registers for a call. */
1773 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1774 {
1775 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
1776 emit_insn (gen_callt_save_all_interrupt ());
1777 else
1778 emit_insn (gen_save_all_interrupt ());
1779 }
1780 else
1781 {
1782 /* If the stack is too big, allocate it in chunks so we can do the
1783 register saves. We use the register save size so we use the ep
1784 register. */
1785 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1786 init_stack_alloc = compute_register_save_size (NULL);
1787 else
1788 init_stack_alloc = actual_fsize;
1789
1790 /* Save registers at the beginning of the stack frame */
1791 offset = init_stack_alloc - 4;
1792
1793 if (init_stack_alloc)
1794 emit_insn (gen_addsi3 (stack_pointer_rtx,
1795 stack_pointer_rtx,
1796 GEN_INT (-init_stack_alloc)));
1797
1798 /* Save the return pointer first. */
1799 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1800 {
1801 emit_move_insn (gen_rtx_MEM (SImode,
1802 plus_constant (stack_pointer_rtx,
1803 offset)),
1804 save_regs[--num_save]);
1805 offset -= 4;
1806 }
1807
1808 for (i = 0; i < num_save; i++)
1809 {
1810 emit_move_insn (gen_rtx_MEM (SImode,
1811 plus_constant (stack_pointer_rtx,
1812 offset)),
1813 save_regs[i]);
1814 offset -= 4;
1815 }
1816 }
1817 }
1818
1819 /* Allocate the rest of the stack that was not allocated above (either it is
1820 > 32K or we just called a function to save the registers and needed more
1821 stack. */
1822 if (actual_fsize > init_stack_alloc)
1823 {
1824 int diff = actual_fsize - init_stack_alloc;
1825 if (CONST_OK_FOR_K (diff))
1826 emit_insn (gen_addsi3 (stack_pointer_rtx,
1827 stack_pointer_rtx,
1828 GEN_INT (-diff)));
1829 else
1830 {
1831 rtx reg = gen_rtx_REG (Pmode, 12);
1832 emit_move_insn (reg, GEN_INT (-diff));
1833 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
1834 }
1835 }
1836
1837 /* If we need a frame pointer, set it up now. */
1838 if (frame_pointer_needed)
1839 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1840 }
1841
1842
1843 void
expand_epilogue()1844 expand_epilogue ()
1845 {
1846 unsigned int i;
1847 int offset;
1848 unsigned int size = get_frame_size ();
1849 long reg_saved = 0;
1850 unsigned int actual_fsize = compute_frame_size (size, ®_saved);
1851 unsigned int init_stack_free = 0;
1852 rtx restore_regs[32];
1853 rtx restore_all;
1854 unsigned int num_restore;
1855 unsigned int default_stack;
1856 int code;
1857 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1858
1859 /* Eliminate the initial stack stored by interrupt functions. */
1860 if (interrupt_handler)
1861 {
1862 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1863 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1864 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1865 }
1866
1867 /* Cut off any dynamic stack created. */
1868 if (frame_pointer_needed)
1869 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1870
1871 /* Identify all of the saved registers. */
1872 num_restore = 0;
1873 default_stack = 0;
1874 for (i = 1; i < 31; i++)
1875 {
1876 if (((1L << i) & reg_saved) != 0)
1877 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1878 }
1879
1880 /* If the return pointer is saved, the helper functions also allocate
1881 16 bytes of stack for arguments to be saved in. */
1882 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1883 {
1884 restore_regs[num_restore++] = gen_rtx_REG (Pmode, LINK_POINTER_REGNUM);
1885 default_stack = 16;
1886 }
1887
1888 /* See if we have an insn that restores the particular registers we
1889 want to. */
1890 restore_all = NULL_RTX;
1891
1892 if (TARGET_PROLOG_FUNCTION
1893 && num_restore > 0
1894 && actual_fsize >= default_stack
1895 && !interrupt_handler)
1896 {
1897 int alloc_stack = (4 * num_restore) + default_stack;
1898 int unalloc_stack = actual_fsize - alloc_stack;
1899 int restore_func_len = 4;
1900 int restore_normal_len;
1901
1902 if (unalloc_stack)
1903 restore_func_len += CONST_OK_FOR_J (unalloc_stack) ? 2 : 4;
1904
1905 /* See if we would have used ep to restore the registers. */
1906 if (TARGET_EP && num_restore > 3 && (unsigned)actual_fsize < 255)
1907 restore_normal_len = (3 * 2) + (2 * num_restore);
1908 else
1909 restore_normal_len = 4 * num_restore;
1910
1911 restore_normal_len += (CONST_OK_FOR_J (actual_fsize) ? 2 : 4) + 2;
1912
1913 /* Don't bother checking if we don't actually save any space. */
1914 if (restore_func_len < restore_normal_len)
1915 {
1916 restore_all = gen_rtx_PARALLEL (VOIDmode,
1917 rtvec_alloc (num_restore + 2));
1918 XVECEXP (restore_all, 0, 0) = gen_rtx_RETURN (VOIDmode);
1919 XVECEXP (restore_all, 0, 1)
1920 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1921 gen_rtx_PLUS (Pmode,
1922 stack_pointer_rtx,
1923 GEN_INT (alloc_stack)));
1924
1925 offset = alloc_stack - 4;
1926 for (i = 0; i < num_restore; i++)
1927 {
1928 XVECEXP (restore_all, 0, i+2)
1929 = gen_rtx_SET (VOIDmode,
1930 restore_regs[i],
1931 gen_rtx_MEM (Pmode,
1932 plus_constant (stack_pointer_rtx,
1933 offset)));
1934 offset -= 4;
1935 }
1936
1937 code = recog (restore_all, NULL_RTX, NULL);
1938
1939 if (code >= 0)
1940 {
1941 rtx insn;
1942
1943 actual_fsize -= alloc_stack;
1944 if (actual_fsize)
1945 {
1946 if (CONST_OK_FOR_K (actual_fsize))
1947 emit_insn (gen_addsi3 (stack_pointer_rtx,
1948 stack_pointer_rtx,
1949 GEN_INT (actual_fsize)));
1950 else
1951 {
1952 rtx reg = gen_rtx_REG (Pmode, 12);
1953 emit_move_insn (reg, GEN_INT (actual_fsize));
1954 emit_insn (gen_addsi3 (stack_pointer_rtx,
1955 stack_pointer_rtx,
1956 reg));
1957 }
1958 }
1959
1960 insn = emit_jump_insn (restore_all);
1961 INSN_CODE (insn) = code;
1962
1963 if (TARGET_DEBUG)
1964 fprintf (stderr, "\
1965 Saved %d bytes via epilogue function (%d vs. %d) in function %s\n",
1966 restore_normal_len - restore_func_len,
1967 restore_normal_len, restore_func_len,
1968 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)));
1969 }
1970 else
1971 restore_all = NULL_RTX;
1972 }
1973 }
1974
1975 /* If no epilog save function is available, restore the registers the
1976 old fashioned way (one by one). */
1977 if (!restore_all)
1978 {
1979 /* If the stack is large, we need to cut it down in 2 pieces. */
1980 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1981 init_stack_free = 4 * num_restore;
1982 else
1983 init_stack_free = actual_fsize;
1984
1985 /* Deallocate the rest of the stack if it is > 32K. */
1986 if (actual_fsize > init_stack_free)
1987 {
1988 int diff;
1989
1990 diff = actual_fsize - ((interrupt_handler) ? 0 : init_stack_free);
1991
1992 if (CONST_OK_FOR_K (diff))
1993 emit_insn (gen_addsi3 (stack_pointer_rtx,
1994 stack_pointer_rtx,
1995 GEN_INT (diff)));
1996 else
1997 {
1998 rtx reg = gen_rtx_REG (Pmode, 12);
1999 emit_move_insn (reg, GEN_INT (diff));
2000 emit_insn (gen_addsi3 (stack_pointer_rtx,
2001 stack_pointer_rtx,
2002 reg));
2003 }
2004 }
2005
2006 /* Special case interrupt functions that save all registers
2007 for a call. */
2008 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
2009 {
2010 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
2011 emit_insn (gen_callt_restore_all_interrupt ());
2012 else
2013 emit_insn (gen_restore_all_interrupt ());
2014 }
2015 else
2016 {
2017 /* Restore registers from the beginning of the stack frame. */
2018 offset = init_stack_free - 4;
2019
2020 /* Restore the return pointer first. */
2021 if (num_restore > 0
2022 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
2023 {
2024 emit_move_insn (restore_regs[--num_restore],
2025 gen_rtx_MEM (SImode,
2026 plus_constant (stack_pointer_rtx,
2027 offset)));
2028 offset -= 4;
2029 }
2030
2031 for (i = 0; i < num_restore; i++)
2032 {
2033 emit_move_insn (restore_regs[i],
2034 gen_rtx_MEM (SImode,
2035 plus_constant (stack_pointer_rtx,
2036 offset)));
2037
2038 emit_insn (gen_rtx_USE (VOIDmode, restore_regs[i]));
2039 offset -= 4;
2040 }
2041
2042 /* Cut back the remainder of the stack. */
2043 if (init_stack_free)
2044 emit_insn (gen_addsi3 (stack_pointer_rtx,
2045 stack_pointer_rtx,
2046 GEN_INT (init_stack_free)));
2047 }
2048
2049 /* And return or use reti for interrupt handlers. */
2050 if (interrupt_handler)
2051 {
2052 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
2053 emit_insn (gen_callt_return_interrupt ());
2054 else
2055 emit_jump_insn (gen_return_interrupt ());
2056 }
2057 else if (actual_fsize)
2058 emit_jump_insn (gen_return_internal ());
2059 else
2060 emit_jump_insn (gen_return ());
2061 }
2062
2063 v850_interrupt_cache_p = FALSE;
2064 v850_interrupt_p = FALSE;
2065 }
2066
2067
2068 /* Update the condition code from the insn. */
2069
2070 void
notice_update_cc(body,insn)2071 notice_update_cc (body, insn)
2072 rtx body;
2073 rtx insn;
2074 {
2075 switch (get_attr_cc (insn))
2076 {
2077 case CC_NONE:
2078 /* Insn does not affect CC at all. */
2079 break;
2080
2081 case CC_NONE_0HIT:
2082 /* Insn does not change CC, but the 0'th operand has been changed. */
2083 if (cc_status.value1 != 0
2084 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
2085 cc_status.value1 = 0;
2086 break;
2087
2088 case CC_SET_ZN:
2089 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2090 V,C is in an unusable state. */
2091 CC_STATUS_INIT;
2092 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2093 cc_status.value1 = recog_data.operand[0];
2094 break;
2095
2096 case CC_SET_ZNV:
2097 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2098 C is in an unusable state. */
2099 CC_STATUS_INIT;
2100 cc_status.flags |= CC_NO_CARRY;
2101 cc_status.value1 = recog_data.operand[0];
2102 break;
2103
2104 case CC_COMPARE:
2105 /* The insn is a compare instruction. */
2106 CC_STATUS_INIT;
2107 cc_status.value1 = SET_SRC (body);
2108 break;
2109
2110 case CC_CLOBBER:
2111 /* Insn doesn't leave CC in a usable state. */
2112 CC_STATUS_INIT;
2113 break;
2114 }
2115 }
2116
2117 /* Retrieve the data area that has been chosen for the given decl. */
2118
2119 v850_data_area
v850_get_data_area(decl)2120 v850_get_data_area (decl)
2121 tree decl;
2122 {
2123 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2124 return DATA_AREA_SDA;
2125
2126 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2127 return DATA_AREA_TDA;
2128
2129 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2130 return DATA_AREA_ZDA;
2131
2132 return DATA_AREA_NORMAL;
2133 }
2134
2135 /* Store the indicated data area in the decl's attributes. */
2136
2137 static void
v850_set_data_area(decl,data_area)2138 v850_set_data_area (decl, data_area)
2139 tree decl;
2140 v850_data_area data_area;
2141 {
2142 tree name;
2143
2144 switch (data_area)
2145 {
2146 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2147 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2148 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2149 default:
2150 return;
2151 }
2152
2153 DECL_ATTRIBUTES (decl) = tree_cons
2154 (name, NULL, DECL_ATTRIBUTES (decl));
2155 }
2156
2157 const struct attribute_spec v850_attribute_table[] =
2158 {
2159 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2160 { "interrupt_handler", 0, 0, true, false, false, v850_handle_interrupt_attribute },
2161 { "interrupt", 0, 0, true, false, false, v850_handle_interrupt_attribute },
2162 { "sda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2163 { "tda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2164 { "zda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2165 { NULL, 0, 0, false, false, false, NULL }
2166 };
2167
2168 /* Handle an "interrupt" attribute; arguments as in
2169 struct attribute_spec.handler. */
2170 static tree
v850_handle_interrupt_attribute(node,name,args,flags,no_add_attrs)2171 v850_handle_interrupt_attribute (node, name, args, flags, no_add_attrs)
2172 tree *node;
2173 tree name;
2174 tree args ATTRIBUTE_UNUSED;
2175 int flags ATTRIBUTE_UNUSED;
2176 bool *no_add_attrs;
2177 {
2178 if (TREE_CODE (*node) != FUNCTION_DECL)
2179 {
2180 warning ("`%s' attribute only applies to functions",
2181 IDENTIFIER_POINTER (name));
2182 *no_add_attrs = true;
2183 }
2184
2185 return NULL_TREE;
2186 }
2187
2188 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2189 struct attribute_spec.handler. */
2190 static tree
v850_handle_data_area_attribute(node,name,args,flags,no_add_attrs)2191 v850_handle_data_area_attribute (node, name, args, flags, no_add_attrs)
2192 tree *node;
2193 tree name;
2194 tree args ATTRIBUTE_UNUSED;
2195 int flags ATTRIBUTE_UNUSED;
2196 bool *no_add_attrs;
2197 {
2198 v850_data_area data_area;
2199 v850_data_area area;
2200 tree decl = *node;
2201
2202 /* Implement data area attribute. */
2203 if (is_attribute_p ("sda", name))
2204 data_area = DATA_AREA_SDA;
2205 else if (is_attribute_p ("tda", name))
2206 data_area = DATA_AREA_TDA;
2207 else if (is_attribute_p ("zda", name))
2208 data_area = DATA_AREA_ZDA;
2209 else
2210 abort ();
2211
2212 switch (TREE_CODE (decl))
2213 {
2214 case VAR_DECL:
2215 if (current_function_decl != NULL_TREE)
2216 {
2217 error_with_decl (decl, "\
2218 a data area attribute cannot be specified for local variables");
2219 *no_add_attrs = true;
2220 }
2221
2222 /* Drop through. */
2223
2224 case FUNCTION_DECL:
2225 area = v850_get_data_area (decl);
2226 if (area != DATA_AREA_NORMAL && data_area != area)
2227 {
2228 error_with_decl (decl, "\
2229 data area of '%s' conflicts with previous declaration");
2230 *no_add_attrs = true;
2231 }
2232 break;
2233
2234 default:
2235 break;
2236 }
2237
2238 return NULL_TREE;
2239 }
2240
2241
2242 /* Return nonzero if FUNC is an interrupt function as specified
2243 by the "interrupt" attribute. */
2244
2245 int
v850_interrupt_function_p(func)2246 v850_interrupt_function_p (func)
2247 tree func;
2248 {
2249 tree a;
2250 int ret = 0;
2251
2252 if (v850_interrupt_cache_p)
2253 return v850_interrupt_p;
2254
2255 if (TREE_CODE (func) != FUNCTION_DECL)
2256 return 0;
2257
2258 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2259 if (a != NULL_TREE)
2260 ret = 1;
2261
2262 else
2263 {
2264 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2265 ret = a != NULL_TREE;
2266 }
2267
2268 /* Its not safe to trust global variables until after function inlining has
2269 been done. */
2270 if (reload_completed | reload_in_progress)
2271 v850_interrupt_p = ret;
2272
2273 return ret;
2274 }
2275
2276
2277 static void
v850_encode_data_area(decl)2278 v850_encode_data_area (decl)
2279 tree decl;
2280 {
2281 const char *str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2282 int len = strlen (str);
2283 char * newstr;
2284
2285 /* Map explict sections into the appropriate attribute */
2286 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2287 {
2288 if (DECL_SECTION_NAME (decl))
2289 {
2290 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2291
2292 if (streq (name, ".zdata") || streq (name, ".zbss"))
2293 v850_set_data_area (decl, DATA_AREA_ZDA);
2294
2295 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2296 v850_set_data_area (decl, DATA_AREA_SDA);
2297
2298 else if (streq (name, ".tdata"))
2299 v850_set_data_area (decl, DATA_AREA_TDA);
2300 }
2301
2302 /* If no attribute, support -m{zda,sda,tda}=n */
2303 else
2304 {
2305 int size = int_size_in_bytes (TREE_TYPE (decl));
2306 if (size <= 0)
2307 ;
2308
2309 else if (size <= small_memory [(int) SMALL_MEMORY_TDA].max)
2310 v850_set_data_area (decl, DATA_AREA_TDA);
2311
2312 else if (size <= small_memory [(int) SMALL_MEMORY_SDA].max)
2313 v850_set_data_area (decl, DATA_AREA_SDA);
2314
2315 else if (size <= small_memory [(int) SMALL_MEMORY_ZDA].max)
2316 v850_set_data_area (decl, DATA_AREA_ZDA);
2317 }
2318
2319 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2320 return;
2321 }
2322
2323 newstr = alloca (len + 2);
2324
2325 strcpy (newstr + 1, str);
2326
2327 switch (v850_get_data_area (decl))
2328 {
2329 case DATA_AREA_ZDA: *newstr = ZDA_NAME_FLAG_CHAR; break;
2330 case DATA_AREA_TDA: *newstr = TDA_NAME_FLAG_CHAR; break;
2331 case DATA_AREA_SDA: *newstr = SDA_NAME_FLAG_CHAR; break;
2332 default: abort ();
2333 }
2334
2335 XSTR (XEXP (DECL_RTL (decl), 0), 0) = ggc_alloc_string (newstr, len + 2);
2336 }
2337
2338 static void
v850_encode_section_info(decl,first)2339 v850_encode_section_info (decl, first)
2340 tree decl;
2341 int first;
2342 {
2343 if (first && TREE_CODE (decl) == VAR_DECL
2344 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2345 v850_encode_data_area (decl);
2346 }
2347
2348 static const char *
v850_strip_name_encoding(str)2349 v850_strip_name_encoding (str)
2350 const char *str;
2351 {
2352 return str + (ENCODED_NAME_P (str) || *str == '*');
2353 }
2354
2355 /* Return true if the given RTX is a register which can be restored
2356 by a function epilogue. */
2357 int
register_is_ok_for_epilogue(op,mode)2358 register_is_ok_for_epilogue (op, mode)
2359 rtx op;
2360 enum machine_mode ATTRIBUTE_UNUSED mode;
2361 {
2362 /* The save/restore routines can only cope with registers 20 - 31. */
2363 return ((GET_CODE (op) == REG)
2364 && (((REGNO (op) >= 20) && REGNO (op) <= 31)));
2365 }
2366
2367 /* Return nonzero if the given RTX is suitable for collapsing into
2368 jump to a function epilogue. */
2369 int
pattern_is_ok_for_epilogue(op,mode)2370 pattern_is_ok_for_epilogue (op, mode)
2371 rtx op;
2372 enum machine_mode ATTRIBUTE_UNUSED mode;
2373 {
2374 int count = XVECLEN (op, 0);
2375 int i;
2376
2377 /* If there are no registers to restore then the function epilogue
2378 is not suitable. */
2379 if (count <= 2)
2380 return 0;
2381
2382 /* The pattern matching has already established that we are performing a
2383 function epilogue and that we are popping at least one register. We must
2384 now check the remaining entries in the vector to make sure that they are
2385 also register pops. There is no good reason why there should ever be
2386 anything else in this vector, but being paranoid always helps...
2387
2388 The test below performs the C equivalent of this machine description
2389 pattern match:
2390
2391 (set (match_operand:SI n "register_is_ok_for_epilogue" "r")
2392 (mem:SI (plus:SI (reg:SI 3) (match_operand:SI n "immediate_operand" "i"))))
2393 */
2394
2395 for (i = 3; i < count; i++)
2396 {
2397 rtx vector_element = XVECEXP (op, 0, i);
2398 rtx dest;
2399 rtx src;
2400 rtx plus;
2401
2402 if (GET_CODE (vector_element) != SET)
2403 return 0;
2404
2405 dest = SET_DEST (vector_element);
2406 src = SET_SRC (vector_element);
2407
2408 if (GET_CODE (dest) != REG
2409 || GET_MODE (dest) != SImode
2410 || ! register_is_ok_for_epilogue (dest, SImode)
2411 || GET_CODE (src) != MEM
2412 || GET_MODE (src) != SImode)
2413 return 0;
2414
2415 plus = XEXP (src, 0);
2416
2417 if (GET_CODE (plus) != PLUS
2418 || GET_CODE (XEXP (plus, 0)) != REG
2419 || GET_MODE (XEXP (plus, 0)) != SImode
2420 || REGNO (XEXP (plus, 0)) != STACK_POINTER_REGNUM
2421 || GET_CODE (XEXP (plus, 1)) != CONST_INT)
2422 return 0;
2423 }
2424
2425 return 1;
2426 }
2427
2428 /* Construct a JR instruction to a routine that will perform the equivalent of
2429 the RTL passed in as an argument. This RTL is a function epilogue that
2430 pops registers off the stack and possibly releases some extra stack space
2431 as well. The code has already verified that the RTL matches these
2432 requirements. */
2433 char *
construct_restore_jr(op)2434 construct_restore_jr (op)
2435 rtx op;
2436 {
2437 int count = XVECLEN (op, 0);
2438 int stack_bytes;
2439 unsigned long int mask;
2440 unsigned long int first;
2441 unsigned long int last;
2442 int i;
2443 static char buff [100]; /* XXX */
2444
2445 if (count <= 2)
2446 {
2447 error ("bogus JR construction: %d\n", count);
2448 return NULL;
2449 }
2450
2451 /* Work out how many bytes to pop off the stack before retrieving
2452 registers. */
2453 if (GET_CODE (XVECEXP (op, 0, 1)) != SET)
2454 abort ();
2455 if (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) != PLUS)
2456 abort ();
2457 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) != CONST_INT)
2458 abort ();
2459
2460 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2461
2462 /* Each pop will remove 4 bytes from the stack... */
2463 stack_bytes -= (count - 2) * 4;
2464
2465 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2466 if (stack_bytes != 0 && stack_bytes != 16)
2467 {
2468 error ("bad amount of stack space removal: %d", stack_bytes);
2469 return NULL;
2470 }
2471
2472 /* Now compute the bit mask of registers to push. */
2473 mask = 0;
2474 for (i = 2; i < count; i++)
2475 {
2476 rtx vector_element = XVECEXP (op, 0, i);
2477
2478 if (GET_CODE (vector_element) != SET)
2479 abort ();
2480 if (GET_CODE (SET_DEST (vector_element)) != REG)
2481 abort ();
2482 if (! register_is_ok_for_epilogue (SET_DEST (vector_element), SImode))
2483 abort ();
2484
2485 mask |= 1 << REGNO (SET_DEST (vector_element));
2486 }
2487
2488 /* Scan for the first register to pop. */
2489 for (first = 0; first < 32; first++)
2490 {
2491 if (mask & (1 << first))
2492 break;
2493 }
2494
2495 if (first >= 32)
2496 abort ();
2497
2498 /* Discover the last register to pop. */
2499 if (mask & (1 << LINK_POINTER_REGNUM))
2500 {
2501 if (stack_bytes != 16)
2502 abort ();
2503
2504 last = LINK_POINTER_REGNUM;
2505 }
2506 else
2507 {
2508 if (stack_bytes != 0)
2509 abort ();
2510
2511 if ((mask & (1 << 29)) == 0)
2512 abort ();
2513
2514 last = 29;
2515 }
2516
2517 /* Note, it is possible to have gaps in the register mask.
2518 We ignore this here, and generate a JR anyway. We will
2519 be popping more registers than is strictly necessary, but
2520 it does save code space. */
2521
2522 if (TARGET_LONG_CALLS)
2523 {
2524 char name[40];
2525
2526 if (first == last)
2527 sprintf (name, "__return_%s", reg_names [first]);
2528 else
2529 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2530
2531 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2532 name, name);
2533 }
2534 else
2535 {
2536 if (first == last)
2537 sprintf (buff, "jr __return_%s", reg_names [first]);
2538 else
2539 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2540 }
2541
2542 return buff;
2543 }
2544
2545
2546 /* Return nonzero if the given RTX is suitable for collapsing into
2547 a jump to a function prologue. */
2548 int
pattern_is_ok_for_prologue(op,mode)2549 pattern_is_ok_for_prologue (op, mode)
2550 rtx op;
2551 enum machine_mode ATTRIBUTE_UNUSED mode;
2552 {
2553 int count = XVECLEN (op, 0);
2554 int i;
2555 rtx vector_element;
2556
2557 /* If there are no registers to save then the function prologue
2558 is not suitable. */
2559 if (count <= 2)
2560 return 0;
2561
2562 /* The pattern matching has already established that we are adjusting the
2563 stack and pushing at least one register. We must now check that the
2564 remaining entries in the vector to make sure that they are also register
2565 pushes, except for the last entry which should be a CLOBBER of r10.
2566
2567 The test below performs the C equivalent of this machine description
2568 pattern match:
2569
2570 (set (mem:SI (plus:SI (reg:SI 3)
2571 (match_operand:SI 2 "immediate_operand" "i")))
2572 (match_operand:SI 3 "register_is_ok_for_epilogue" "r"))
2573
2574 */
2575
2576 for (i = 2; i < count - 1; i++)
2577 {
2578 rtx dest;
2579 rtx src;
2580 rtx plus;
2581
2582 vector_element = XVECEXP (op, 0, i);
2583
2584 if (GET_CODE (vector_element) != SET)
2585 return 0;
2586
2587 dest = SET_DEST (vector_element);
2588 src = SET_SRC (vector_element);
2589
2590 if (GET_CODE (dest) != MEM
2591 || GET_MODE (dest) != SImode
2592 || GET_CODE (src) != REG
2593 || GET_MODE (src) != SImode
2594 || ! register_is_ok_for_epilogue (src, SImode))
2595 return 0;
2596
2597 plus = XEXP (dest, 0);
2598
2599 if ( GET_CODE (plus) != PLUS
2600 || GET_CODE (XEXP (plus, 0)) != REG
2601 || GET_MODE (XEXP (plus, 0)) != SImode
2602 || REGNO (XEXP (plus, 0)) != STACK_POINTER_REGNUM
2603 || GET_CODE (XEXP (plus, 1)) != CONST_INT)
2604 return 0;
2605
2606 /* If the register is being pushed somewhere other than the stack
2607 space just acquired by the first operand then abandon this quest.
2608 Note: the test is <= because both values are negative. */
2609 if (INTVAL (XEXP (plus, 1))
2610 <= INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)))
2611 {
2612 return 0;
2613 }
2614 }
2615
2616 /* Make sure that the last entry in the vector is a clobber. */
2617 vector_element = XVECEXP (op, 0, i);
2618
2619 if (GET_CODE (vector_element) != CLOBBER
2620 || GET_CODE (XEXP (vector_element, 0)) != REG
2621 || REGNO (XEXP (vector_element, 0)) != 10)
2622 return 0;
2623
2624 return 1;
2625 }
2626
2627 /* Construct a JARL instruction to a routine that will perform the equivalent
2628 of the RTL passed as a parameter. This RTL is a function prologue that
2629 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2630 some stack space as well. The code has already verified that the RTL
2631 matches these requirements. */
2632 char *
construct_save_jarl(op)2633 construct_save_jarl (op)
2634 rtx op;
2635 {
2636 int count = XVECLEN (op, 0);
2637 int stack_bytes;
2638 unsigned long int mask;
2639 unsigned long int first;
2640 unsigned long int last;
2641 int i;
2642 static char buff [100]; /* XXX */
2643
2644 if (count <= 2)
2645 {
2646 error ("bogus JARL construction: %d\n", count);
2647 return NULL;
2648 }
2649
2650 /* Paranoia. */
2651 if (GET_CODE (XVECEXP (op, 0, 0)) != SET)
2652 abort ();
2653 if (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != PLUS)
2654 abort ();
2655 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) != REG)
2656 abort ();
2657 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) != CONST_INT)
2658 abort ();
2659
2660 /* Work out how many bytes to push onto the stack after storing the
2661 registers. */
2662 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2663
2664 /* Each push will put 4 bytes from the stack... */
2665 stack_bytes += (count - 2) * 4;
2666
2667 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2668 if (stack_bytes != 0 && stack_bytes != -16)
2669 {
2670 error ("bad amount of stack space removal: %d", stack_bytes);
2671 return NULL;
2672 }
2673
2674 /* Now compute the bit mask of registers to push. */
2675 mask = 0;
2676 for (i = 1; i < count - 1; i++)
2677 {
2678 rtx vector_element = XVECEXP (op, 0, i);
2679
2680 if (GET_CODE (vector_element) != SET)
2681 abort ();
2682 if (GET_CODE (SET_SRC (vector_element)) != REG)
2683 abort ();
2684 if (! register_is_ok_for_epilogue (SET_SRC (vector_element), SImode))
2685 abort ();
2686
2687 mask |= 1 << REGNO (SET_SRC (vector_element));
2688 }
2689
2690 /* Scan for the first register to push. */
2691 for (first = 0; first < 32; first++)
2692 {
2693 if (mask & (1 << first))
2694 break;
2695 }
2696
2697 if (first >= 32)
2698 abort ();
2699
2700 /* Discover the last register to push. */
2701 if (mask & (1 << LINK_POINTER_REGNUM))
2702 {
2703 if (stack_bytes != -16)
2704 abort ();
2705
2706 last = LINK_POINTER_REGNUM;
2707 }
2708 else
2709 {
2710 if (stack_bytes != 0)
2711 abort ();
2712 if ((mask & (1 << 29)) == 0)
2713 abort ();
2714
2715 last = 29;
2716 }
2717
2718 /* Note, it is possible to have gaps in the register mask.
2719 We ignore this here, and generate a JARL anyway. We will
2720 be pushing more registers than is strictly necessary, but
2721 it does save code space. */
2722
2723 if (TARGET_LONG_CALLS)
2724 {
2725 char name[40];
2726
2727 if (first == last)
2728 sprintf (name, "__save_%s", reg_names [first]);
2729 else
2730 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2731
2732 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2733 name, name);
2734 }
2735 else
2736 {
2737 if (first == last)
2738 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2739 else
2740 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2741 reg_names [last]);
2742 }
2743
2744 return buff;
2745 }
2746
2747 extern tree last_assemble_variable_decl;
2748 extern int size_directive_output;
2749
2750 /* A version of asm_output_aligned_bss() that copes with the special
2751 data areas of the v850. */
2752 void
v850_output_aligned_bss(file,decl,name,size,align)2753 v850_output_aligned_bss (file, decl, name, size, align)
2754 FILE * file;
2755 tree decl;
2756 const char * name;
2757 int size;
2758 int align;
2759 {
2760 (*targetm.asm_out.globalize_label) (file, name);
2761
2762 switch (v850_get_data_area (decl))
2763 {
2764 case DATA_AREA_ZDA:
2765 zbss_section ();
2766 break;
2767
2768 case DATA_AREA_SDA:
2769 sbss_section ();
2770 break;
2771
2772 case DATA_AREA_TDA:
2773 tdata_section ();
2774
2775 default:
2776 bss_section ();
2777 break;
2778 }
2779
2780 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2781 #ifdef ASM_DECLARE_OBJECT_NAME
2782 last_assemble_variable_decl = decl;
2783 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2784 #else
2785 /* Standard thing is just output label for the object. */
2786 ASM_OUTPUT_LABEL (file, name);
2787 #endif /* ASM_DECLARE_OBJECT_NAME */
2788 ASM_OUTPUT_SKIP (file, size ? size : 1);
2789 }
2790
2791 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2792 void
v850_output_common(file,decl,name,size,align)2793 v850_output_common (file, decl, name, size, align)
2794 FILE * file;
2795 tree decl;
2796 const char * name;
2797 int size;
2798 int align;
2799 {
2800 if (decl == NULL_TREE)
2801 {
2802 fprintf (file, "%s", COMMON_ASM_OP);
2803 }
2804 else
2805 {
2806 switch (v850_get_data_area (decl))
2807 {
2808 case DATA_AREA_ZDA:
2809 fprintf (file, "%s", ZCOMMON_ASM_OP);
2810 break;
2811
2812 case DATA_AREA_SDA:
2813 fprintf (file, "%s", SCOMMON_ASM_OP);
2814 break;
2815
2816 case DATA_AREA_TDA:
2817 fprintf (file, "%s", TCOMMON_ASM_OP);
2818 break;
2819
2820 default:
2821 fprintf (file, "%s", COMMON_ASM_OP);
2822 break;
2823 }
2824 }
2825
2826 assemble_name (file, name);
2827 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2828 }
2829
2830 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2831 void
v850_output_local(file,decl,name,size,align)2832 v850_output_local (file, decl, name, size, align)
2833 FILE * file;
2834 tree decl;
2835 const char * name;
2836 int size;
2837 int align;
2838 {
2839 fprintf (file, "%s", LOCAL_ASM_OP);
2840 assemble_name (file, name);
2841 fprintf (file, "\n");
2842
2843 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2844 }
2845
2846 /* Add data area to the given declaration if a ghs data area pragma is
2847 currently in effect (#pragma ghs startXXX/endXXX). */
2848 static void
v850_insert_attributes(decl,attr_ptr)2849 v850_insert_attributes (decl, attr_ptr)
2850 tree decl;
2851 tree *attr_ptr ATTRIBUTE_UNUSED;
2852 {
2853 if (data_area_stack
2854 && data_area_stack->data_area
2855 && current_function_decl == NULL_TREE
2856 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2857 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2858 v850_set_data_area (decl, data_area_stack->data_area);
2859
2860 /* Initialize the default names of the v850 specific sections,
2861 if this has not been done before. */
2862
2863 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2864 {
2865 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2866 = build_string (sizeof (".sdata")-1, ".sdata");
2867
2868 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2869 = build_string (sizeof (".rosdata")-1, ".rosdata");
2870
2871 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2872 = build_string (sizeof (".tdata")-1, ".tdata");
2873
2874 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2875 = build_string (sizeof (".zdata")-1, ".zdata");
2876
2877 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2878 = build_string (sizeof (".rozdata")-1, ".rozdata");
2879 }
2880
2881 if (current_function_decl == NULL_TREE
2882 && (TREE_CODE (decl) == VAR_DECL
2883 || TREE_CODE (decl) == CONST_DECL
2884 || TREE_CODE (decl) == FUNCTION_DECL)
2885 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2886 && !DECL_SECTION_NAME (decl))
2887 {
2888 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2889 tree chosen_section;
2890
2891 if (TREE_CODE (decl) == FUNCTION_DECL)
2892 kind = GHS_SECTION_KIND_TEXT;
2893 else
2894 {
2895 /* First choose a section kind based on the data area of the decl. */
2896 switch (v850_get_data_area (decl))
2897 {
2898 default:
2899 abort ();
2900
2901 case DATA_AREA_SDA:
2902 kind = ((TREE_READONLY (decl))
2903 ? GHS_SECTION_KIND_ROSDATA
2904 : GHS_SECTION_KIND_SDATA);
2905 break;
2906
2907 case DATA_AREA_TDA:
2908 kind = GHS_SECTION_KIND_TDATA;
2909 break;
2910
2911 case DATA_AREA_ZDA:
2912 kind = ((TREE_READONLY (decl))
2913 ? GHS_SECTION_KIND_ROZDATA
2914 : GHS_SECTION_KIND_ZDATA);
2915 break;
2916
2917 case DATA_AREA_NORMAL: /* default data area */
2918 if (TREE_READONLY (decl))
2919 kind = GHS_SECTION_KIND_RODATA;
2920 else if (DECL_INITIAL (decl))
2921 kind = GHS_SECTION_KIND_DATA;
2922 else
2923 kind = GHS_SECTION_KIND_BSS;
2924 }
2925 }
2926
2927 /* Now, if the section kind has been explicitly renamed,
2928 then attach a section attribute. */
2929 chosen_section = GHS_current_section_names [(int) kind];
2930
2931 /* Otherwise, if this kind of section needs an explicit section
2932 attribute, then also attach one. */
2933 if (chosen_section == NULL)
2934 chosen_section = GHS_default_section_names [(int) kind];
2935
2936 if (chosen_section)
2937 {
2938 /* Only set the section name if specified by a pragma, because
2939 otherwise it will force those variables to get allocated storage
2940 in this module, rather than by the linker. */
2941 DECL_SECTION_NAME (decl) = chosen_section;
2942 }
2943 }
2944 }
2945
2946 /* Return nonzero if the given RTX is suitable
2947 for collapsing into a DISPOSE instruction. */
2948
2949 int
pattern_is_ok_for_dispose(op,mode)2950 pattern_is_ok_for_dispose (op, mode)
2951 rtx op;
2952 enum machine_mode mode ATTRIBUTE_UNUSED;
2953 {
2954 int count = XVECLEN (op, 0);
2955 int i;
2956
2957 /* If there are no registers to restore then
2958 the dispose instruction is not suitable. */
2959 if (count <= 2)
2960 return 0;
2961
2962 /* The pattern matching has already established that we are performing a
2963 function epilogue and that we are popping at least one register. We must
2964 now check the remaining entries in the vector to make sure that they are
2965 also register pops. There is no good reason why there should ever be
2966 anything else in this vector, but being paranoid always helps...
2967
2968 The test below performs the C equivalent of this machine description
2969 pattern match:
2970
2971 (set (match_operand:SI n "register_is_ok_for_epilogue" "r")
2972 (mem:SI (plus:SI (reg:SI 3)
2973 (match_operand:SI n "immediate_operand" "i"))))
2974 */
2975
2976 for (i = 3; i < count; i++)
2977 {
2978 rtx vector_element = XVECEXP (op, 0, i);
2979 rtx dest;
2980 rtx src;
2981 rtx plus;
2982
2983 if (GET_CODE (vector_element) != SET)
2984 return 0;
2985
2986 dest = SET_DEST (vector_element);
2987 src = SET_SRC (vector_element);
2988
2989 if ( GET_CODE (dest) != REG
2990 || GET_MODE (dest) != SImode
2991 || ! register_is_ok_for_epilogue (dest, SImode)
2992 || GET_CODE (src) != MEM
2993 || GET_MODE (src) != SImode)
2994 return 0;
2995
2996 plus = XEXP (src, 0);
2997
2998 if ( GET_CODE (plus) != PLUS
2999 || GET_CODE (XEXP (plus, 0)) != REG
3000 || GET_MODE (XEXP (plus, 0)) != SImode
3001 || REGNO (XEXP (plus, 0)) != STACK_POINTER_REGNUM
3002 || GET_CODE (XEXP (plus, 1)) != CONST_INT)
3003 return 0;
3004 }
3005
3006 return 1;
3007 }
3008
3009 /* Construct a DISPOSE instruction that is the equivalent of
3010 the given RTX. We have already verified that this should
3011 be possible. */
3012
3013 char *
construct_dispose_instruction(op)3014 construct_dispose_instruction (op)
3015 rtx op;
3016 {
3017 int count = XVECLEN (op, 0);
3018 int stack_bytes;
3019 unsigned long int mask;
3020 int i;
3021 static char buff[ 100 ]; /* XXX */
3022 int use_callt = 0;
3023
3024 if (count <= 2)
3025 {
3026 error ("Bogus DISPOSE construction: %d\n", count);
3027 return NULL;
3028 }
3029
3030 /* Work out how many bytes to pop off the
3031 stack before retrieving registers. */
3032 if (GET_CODE (XVECEXP (op, 0, 1)) != SET)
3033 abort ();
3034 if (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) != PLUS)
3035 abort ();
3036 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) != CONST_INT)
3037 abort ();
3038
3039 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
3040
3041 /* Each pop will remove 4 bytes from the stack... */
3042 stack_bytes -= (count - 2) * 4;
3043
3044 /* Make sure that the amount we are popping
3045 will fit into the DISPOSE instruction. */
3046 if (stack_bytes > 128)
3047 {
3048 error ("Too much stack space to dispose of: %d", stack_bytes);
3049 return NULL;
3050 }
3051
3052 /* Now compute the bit mask of registers to push. */
3053 mask = 0;
3054
3055 for (i = 2; i < count; i++)
3056 {
3057 rtx vector_element = XVECEXP (op, 0, i);
3058
3059 if (GET_CODE (vector_element) != SET)
3060 abort ();
3061 if (GET_CODE (SET_DEST (vector_element)) != REG)
3062 abort ();
3063 if (! register_is_ok_for_epilogue (SET_DEST (vector_element), SImode))
3064 abort ();
3065
3066 if (REGNO (SET_DEST (vector_element)) == 2)
3067 use_callt = 1;
3068 else
3069 mask |= 1 << REGNO (SET_DEST (vector_element));
3070 }
3071
3072 if (! TARGET_DISABLE_CALLT
3073 && (use_callt || stack_bytes == 0 || stack_bytes == 16))
3074 {
3075 if (use_callt)
3076 {
3077 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
3078 return buff;
3079 }
3080 else
3081 {
3082 for (i = 20; i < 32; i++)
3083 if (mask & (1 << i))
3084 break;
3085
3086 if (i == 31)
3087 sprintf (buff, "callt ctoff(__callt_return_r31c)");
3088 else
3089 sprintf (buff, "callt ctoff(__callt_return_r%d_r%d%s)",
3090 i, (mask & (1 << 31)) ? 31 : 29, stack_bytes ? "c" : "");
3091 }
3092 }
3093 else
3094 {
3095 static char regs [100]; /* XXX */
3096 int done_one;
3097
3098 /* Generate the DISPOSE instruction. Note we could just issue the
3099 bit mask as a number as the assembler can cope with this, but for
3100 the sake of our readers we turn it into a textual description. */
3101 regs[0] = 0;
3102 done_one = 0;
3103
3104 for (i = 20; i < 32; i++)
3105 {
3106 if (mask & (1 << i))
3107 {
3108 int first;
3109
3110 if (done_one)
3111 strcat (regs, ", ");
3112 else
3113 done_one = 1;
3114
3115 first = i;
3116 strcat (regs, reg_names[ first ]);
3117
3118 for (i++; i < 32; i++)
3119 if ((mask & (1 << i)) == 0)
3120 break;
3121
3122 if (i > first + 1)
3123 {
3124 strcat (regs, " - ");
3125 strcat (regs, reg_names[ i - 1 ] );
3126 }
3127 }
3128 }
3129
3130 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
3131 }
3132
3133 return buff;
3134 }
3135
3136 /* Return nonzero if the given RTX is suitable
3137 for collapsing into a PREPARE instruction. */
3138
3139 int
pattern_is_ok_for_prepare(op,mode)3140 pattern_is_ok_for_prepare (op, mode)
3141 rtx op;
3142 enum machine_mode mode ATTRIBUTE_UNUSED;
3143 {
3144 int count = XVECLEN (op, 0);
3145 int i;
3146
3147 /* If there are no registers to restore then the prepare instruction
3148 is not suitable. */
3149 if (count <= 1)
3150 return 0;
3151
3152 /* The pattern matching has already established that we are adjusting the
3153 stack and pushing at least one register. We must now check that the
3154 remaining entries in the vector to make sure that they are also register
3155 pushes.
3156
3157 The test below performs the C equivalent of this machine description
3158 pattern match:
3159
3160 (set (mem:SI (plus:SI (reg:SI 3)
3161 (match_operand:SI 2 "immediate_operand" "i")))
3162 (match_operand:SI 3 "register_is_ok_for_epilogue" "r"))
3163
3164 */
3165
3166 for (i = 2; i < count; i++)
3167 {
3168 rtx vector_element = XVECEXP (op, 0, i);
3169 rtx dest;
3170 rtx src;
3171 rtx plus;
3172
3173 if (GET_CODE (vector_element) != SET)
3174 return 0;
3175
3176 dest = SET_DEST (vector_element);
3177 src = SET_SRC (vector_element);
3178
3179 if ( GET_CODE (dest) != MEM
3180 || GET_MODE (dest) != SImode
3181 || GET_CODE (src) != REG
3182 || GET_MODE (src) != SImode
3183 || ! register_is_ok_for_epilogue (src, SImode)
3184 )
3185 return 0;
3186
3187 plus = XEXP (dest, 0);
3188
3189 if ( GET_CODE (plus) != PLUS
3190 || GET_CODE (XEXP (plus, 0)) != REG
3191 || GET_MODE (XEXP (plus, 0)) != SImode
3192 || REGNO (XEXP (plus, 0)) != STACK_POINTER_REGNUM
3193 || GET_CODE (XEXP (plus, 1)) != CONST_INT)
3194 return 0;
3195
3196 /* If the register is being pushed somewhere other than the stack
3197 space just aquired by the first operand then abandon this quest.
3198 Note: the test is <= becuase both values are negative. */
3199 if (INTVAL (XEXP (plus, 1))
3200 <= INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)))
3201 return 0;
3202 }
3203
3204 return 1;
3205 }
3206
3207 /* Construct a PREPARE instruction that is the equivalent of
3208 the given RTL. We have already verified that this should
3209 be possible. */
3210
3211 char *
construct_prepare_instruction(op)3212 construct_prepare_instruction (op)
3213 rtx op;
3214 {
3215 int count = XVECLEN (op, 0);
3216 int stack_bytes;
3217 unsigned long int mask;
3218 int i;
3219 static char buff[ 100 ]; /* XXX */
3220 int use_callt = 0;
3221
3222 if (count <= 1)
3223 {
3224 error ("Bogus PREPEARE construction: %d\n", count);
3225 return NULL;
3226 }
3227
3228 /* Work out how many bytes to push onto
3229 the stack after storing the registers. */
3230 if (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3231 abort ();
3232 if (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != PLUS)
3233 abort ();
3234 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) != CONST_INT)
3235 abort ();
3236
3237 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
3238
3239 /* Each push will put 4 bytes from the stack. */
3240 stack_bytes += (count - 1) * 4;
3241
3242 /* Make sure that the amount we are popping
3243 will fit into the DISPOSE instruction. */
3244 if (stack_bytes < -128)
3245 {
3246 error ("Too much stack space to prepare: %d", stack_bytes);
3247 return NULL;
3248 }
3249
3250 /* Now compute the bit mask of registers to push. */
3251 mask = 0;
3252 for (i = 1; i < count; i++)
3253 {
3254 rtx vector_element = XVECEXP (op, 0, i);
3255
3256 if (GET_CODE (vector_element) != SET)
3257 abort ();
3258 if (GET_CODE (SET_SRC (vector_element)) != REG)
3259 abort ();
3260 if (! register_is_ok_for_epilogue (SET_SRC (vector_element), SImode))
3261 abort ();
3262
3263 if (REGNO (SET_SRC (vector_element)) == 2)
3264 use_callt = 1;
3265 else
3266 mask |= 1 << REGNO (SET_SRC (vector_element));
3267 }
3268
3269 if ((! TARGET_DISABLE_CALLT)
3270 && (use_callt || stack_bytes == 0 || stack_bytes == -16))
3271 {
3272 if (use_callt)
3273 {
3274 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
3275 return buff;
3276 }
3277
3278 for (i = 20; i < 32; i++)
3279 if (mask & (1 << i))
3280 break;
3281
3282 if (i == 31)
3283 sprintf (buff, "callt ctoff(__callt_save_r31c)");
3284 else
3285 sprintf (buff, "callt ctoff(__callt_save_r%d_r%d%s)",
3286 i, (mask & (1 << 31)) ? 31 : 29, stack_bytes ? "c" : "");
3287 }
3288 else
3289 {
3290 static char regs [100]; /* XXX */
3291 int done_one;
3292
3293
3294 /* Generate the PREPARE instruction. Note we could just issue the
3295 bit mask as a number as the assembler can cope with this, but for
3296 the sake of our readers we turn it into a textual description. */
3297 regs[0] = 0;
3298 done_one = 0;
3299
3300 for (i = 20; i < 32; i++)
3301 {
3302 if (mask & (1 << i))
3303 {
3304 int first;
3305
3306 if (done_one)
3307 strcat (regs, ", ");
3308 else
3309 done_one = 1;
3310
3311 first = i;
3312 strcat (regs, reg_names[ first ]);
3313
3314 for (i++; i < 32; i++)
3315 if ((mask & (1 << i)) == 0)
3316 break;
3317
3318 if (i > first + 1)
3319 {
3320 strcat (regs, " - ");
3321 strcat (regs, reg_names[ i - 1 ] );
3322 }
3323 }
3324 }
3325
3326 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
3327 }
3328
3329 return buff;
3330 }
3331
3332 /* Implement `va_arg'. */
3333
3334 rtx
v850_va_arg(valist,type)3335 v850_va_arg (valist, type)
3336 tree valist, type;
3337 {
3338 HOST_WIDE_INT size, rsize;
3339 tree addr, incr;
3340 rtx addr_rtx;
3341 int indirect;
3342
3343 /* Round up sizeof(type) to a word. */
3344 size = int_size_in_bytes (type);
3345 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3346 indirect = 0;
3347
3348 if (size > 8)
3349 {
3350 size = rsize = UNITS_PER_WORD;
3351 indirect = 1;
3352 }
3353
3354 addr = save_expr (valist);
3355 incr = fold (build (PLUS_EXPR, ptr_type_node, addr,
3356 build_int_2 (rsize, 0)));
3357
3358 incr = build (MODIFY_EXPR, ptr_type_node, valist, incr);
3359 TREE_SIDE_EFFECTS (incr) = 1;
3360 expand_expr (incr, const0_rtx, VOIDmode, EXPAND_NORMAL);
3361
3362 addr_rtx = expand_expr (addr, NULL, Pmode, EXPAND_NORMAL);
3363
3364 if (indirect)
3365 {
3366 addr_rtx = force_reg (Pmode, addr_rtx);
3367 addr_rtx = gen_rtx_MEM (Pmode, addr_rtx);
3368 set_mem_alias_set (addr_rtx, get_varargs_alias_set ());
3369 }
3370
3371 return addr_rtx;
3372 }
3373
3374 /* Return an RTX indicating where the return address to the
3375 calling function can be found. */
3376
3377 rtx
v850_return_addr(count)3378 v850_return_addr (count)
3379 int count;
3380 {
3381 if (count != 0)
3382 return const0_rtx;
3383
3384 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
3385 }
3386
3387 static void
v850_select_section(exp,reloc,align)3388 v850_select_section (exp, reloc, align)
3389 tree exp;
3390 int reloc ATTRIBUTE_UNUSED;
3391 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
3392 {
3393 if (TREE_CODE (exp) == VAR_DECL)
3394 {
3395 int is_const;
3396 if (!TREE_READONLY (exp)
3397 || TREE_SIDE_EFFECTS (exp)
3398 || !DECL_INITIAL (exp)
3399 || (DECL_INITIAL (exp) != error_mark_node
3400 && !TREE_CONSTANT (DECL_INITIAL (exp))))
3401 is_const = FALSE;
3402 else
3403 is_const = TRUE;
3404
3405 switch (v850_get_data_area (exp))
3406 {
3407 case DATA_AREA_ZDA:
3408 if (is_const)
3409 rozdata_section ();
3410 else
3411 zdata_section ();
3412 break;
3413
3414 case DATA_AREA_TDA:
3415 tdata_section ();
3416 break;
3417
3418 case DATA_AREA_SDA:
3419 if (is_const)
3420 rosdata_section ();
3421 else
3422 sdata_section ();
3423 break;
3424
3425 default:
3426 if (is_const)
3427 readonly_data_section ();
3428 else
3429 data_section ();
3430 break;
3431 }
3432 }
3433 else if (TREE_CODE (exp) == STRING_CST)
3434 {
3435 if (! flag_writable_strings)
3436 readonly_data_section ();
3437 else
3438 data_section ();
3439 }
3440 else
3441 readonly_data_section ();
3442 }
3443