1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
3 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "recog.h"
38 #include "expr.h"
39 #include "function.h"
40 #include "toplev.h"
41 #include "ggc.h"
42 #include "integrate.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46
47 #ifndef streq
48 #define streq(a,b) (strcmp (a, b) == 0)
49 #endif
50
51 /* Function prototypes for stupid compilers: */
52 static void const_double_split (rtx, HOST_WIDE_INT *, HOST_WIDE_INT *);
53 static int const_costs_int (HOST_WIDE_INT, int);
54 static int const_costs (rtx, enum rtx_code);
55 static bool v850_rtx_costs (rtx, int, int, int *);
56 static void substitute_ep_register (rtx, rtx, int, int, rtx *, rtx *);
57 static void v850_reorg (void);
58 static int ep_memory_offset (enum machine_mode, int);
59 static void v850_set_data_area (tree, v850_data_area);
60 const struct attribute_spec v850_attribute_table[];
61 static tree v850_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
62 static tree v850_handle_data_area_attribute (tree *, tree, tree, int, bool *);
63 static void v850_insert_attributes (tree, tree *);
64 static void v850_select_section (tree, int, unsigned HOST_WIDE_INT);
65 static void v850_encode_data_area (tree, rtx);
66 static void v850_encode_section_info (tree, rtx, int);
67
68 /* Information about the various small memory areas. */
69 struct small_memory_info small_memory[ (int)SMALL_MEMORY_max ] =
70 {
71 /* name value max physical max */
72 { "tda", (char *)0, 0, 256 },
73 { "sda", (char *)0, 0, 65536 },
74 { "zda", (char *)0, 0, 32768 },
75 };
76
77 /* Names of the various data areas used on the v850. */
78 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
79 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
80
81 /* Track the current data area set by the data area pragma (which
82 can be nested). Tested by check_default_data_area. */
83 data_area_stack_element * data_area_stack = NULL;
84
85 /* True if we don't need to check any more if the current
86 function is an interrupt handler. */
87 static int v850_interrupt_cache_p = FALSE;
88
89 /* Whether current function is an interrupt handler. */
90 static int v850_interrupt_p = FALSE;
91
92 /* Initialize the GCC target structure. */
93 #undef TARGET_ASM_ALIGNED_HI_OP
94 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
95
96 #undef TARGET_ATTRIBUTE_TABLE
97 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
98
99 #undef TARGET_INSERT_ATTRIBUTES
100 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
101
102 #undef TARGET_ASM_SELECT_SECTION
103 #define TARGET_ASM_SELECT_SECTION v850_select_section
104
105 #undef TARGET_ENCODE_SECTION_INFO
106 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
107
108 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
109 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
110
111 #undef TARGET_RTX_COSTS
112 #define TARGET_RTX_COSTS v850_rtx_costs
113 #undef TARGET_ADDRESS_COST
114 #define TARGET_ADDRESS_COST hook_int_rtx_0
115
116 #undef TARGET_MACHINE_DEPENDENT_REORG
117 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
118
119 struct gcc_target targetm = TARGET_INITIALIZER;
120
121 /* Sometimes certain combinations of command options do not make
122 sense on a particular target machine. You can define a macro
123 `OVERRIDE_OPTIONS' to take account of this. This macro, if
124 defined, is executed once just after all the command options have
125 been parsed.
126
127 Don't use this macro to turn on various extra optimizations for
128 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
129
130 void
override_options(void)131 override_options (void)
132 {
133 int i;
134 extern int atoi (const char *);
135
136 /* Parse -m{s,t,z}da=nnn switches */
137 for (i = 0; i < (int)SMALL_MEMORY_max; i++)
138 {
139 if (small_memory[i].value)
140 {
141 if (!ISDIGIT (*small_memory[i].value))
142 error ("%s=%s is not numeric",
143 small_memory[i].name,
144 small_memory[i].value);
145 else
146 {
147 small_memory[i].max = atoi (small_memory[i].value);
148 if (small_memory[i].max > small_memory[i].physical_max)
149 error ("%s=%s is too large",
150 small_memory[i].name,
151 small_memory[i].value);
152 }
153 }
154 }
155
156 /* Make sure that the US_BIT_SET mask has been correctly initialized. */
157 if ((target_flags & MASK_US_MASK_SET) == 0)
158 {
159 target_flags |= MASK_US_MASK_SET;
160 target_flags &= ~MASK_US_BIT_SET;
161 }
162 }
163
164
165
166 /* Return an RTX to represent where a value with mode MODE will be returned
167 from a function. If the result is 0, the argument is pushed. */
168
169 rtx
function_arg(CUMULATIVE_ARGS * cum,enum machine_mode mode,tree type,int named)170 function_arg (CUMULATIVE_ARGS * cum,
171 enum machine_mode mode,
172 tree type,
173 int named)
174 {
175 rtx result = 0;
176 int size, align;
177
178 if (TARGET_GHS && !named)
179 return NULL_RTX;
180
181 if (mode == BLKmode)
182 size = int_size_in_bytes (type);
183 else
184 size = GET_MODE_SIZE (mode);
185
186 if (size < 1)
187 return 0;
188
189 if (type)
190 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
191 else
192 align = size;
193
194 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
195
196 if (cum->nbytes > 4 * UNITS_PER_WORD)
197 return 0;
198
199 if (type == NULL_TREE
200 && cum->nbytes + size > 4 * UNITS_PER_WORD)
201 return 0;
202
203 switch (cum->nbytes / UNITS_PER_WORD)
204 {
205 case 0:
206 result = gen_rtx_REG (mode, 6);
207 break;
208 case 1:
209 result = gen_rtx_REG (mode, 7);
210 break;
211 case 2:
212 result = gen_rtx_REG (mode, 8);
213 break;
214 case 3:
215 result = gen_rtx_REG (mode, 9);
216 break;
217 default:
218 result = 0;
219 }
220
221 return result;
222 }
223
224
225 /* Return the number of words which must be put into registers
226 for values which are part in registers and part in memory. */
227
228 int
function_arg_partial_nregs(CUMULATIVE_ARGS * cum,enum machine_mode mode,tree type,int named)229 function_arg_partial_nregs (CUMULATIVE_ARGS * cum,
230 enum machine_mode mode,
231 tree type,
232 int named)
233 {
234 int size, align;
235
236 if (TARGET_GHS && !named)
237 return 0;
238
239 if (mode == BLKmode)
240 size = int_size_in_bytes (type);
241 else
242 size = GET_MODE_SIZE (mode);
243
244 if (type)
245 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
246 else
247 align = size;
248
249 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
250
251 if (cum->nbytes > 4 * UNITS_PER_WORD)
252 return 0;
253
254 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
255 return 0;
256
257 if (type == NULL_TREE
258 && cum->nbytes + size > 4 * UNITS_PER_WORD)
259 return 0;
260
261 return (4 * UNITS_PER_WORD - cum->nbytes) / UNITS_PER_WORD;
262 }
263
264
265 /* Return the high and low words of a CONST_DOUBLE */
266
267 static void
const_double_split(rtx x,HOST_WIDE_INT * p_high,HOST_WIDE_INT * p_low)268 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
269 {
270 if (GET_CODE (x) == CONST_DOUBLE)
271 {
272 long t[2];
273 REAL_VALUE_TYPE rv;
274
275 switch (GET_MODE (x))
276 {
277 case DFmode:
278 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
279 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
280 *p_high = t[1]; /* since v850 is little endian */
281 *p_low = t[0]; /* high is second word */
282 return;
283
284 case SFmode:
285 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
286 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
287 *p_low = 0;
288 return;
289
290 case VOIDmode:
291 case DImode:
292 *p_high = CONST_DOUBLE_HIGH (x);
293 *p_low = CONST_DOUBLE_LOW (x);
294 return;
295
296 default:
297 break;
298 }
299 }
300
301 fatal_insn ("const_double_split got a bad insn:", x);
302 }
303
304
305 /* Return the cost of the rtx R with code CODE. */
306
307 static int
const_costs_int(HOST_WIDE_INT value,int zero_cost)308 const_costs_int (HOST_WIDE_INT value, int zero_cost)
309 {
310 if (CONST_OK_FOR_I (value))
311 return zero_cost;
312 else if (CONST_OK_FOR_J (value))
313 return 1;
314 else if (CONST_OK_FOR_K (value))
315 return 2;
316 else
317 return 4;
318 }
319
320 static int
const_costs(rtx r,enum rtx_code c)321 const_costs (rtx r, enum rtx_code c)
322 {
323 HOST_WIDE_INT high, low;
324
325 switch (c)
326 {
327 case CONST_INT:
328 return const_costs_int (INTVAL (r), 0);
329
330 case CONST_DOUBLE:
331 const_double_split (r, &high, &low);
332 if (GET_MODE (r) == SFmode)
333 return const_costs_int (high, 1);
334 else
335 return const_costs_int (high, 1) + const_costs_int (low, 1);
336
337 case SYMBOL_REF:
338 case LABEL_REF:
339 case CONST:
340 return 2;
341
342 case HIGH:
343 return 1;
344
345 default:
346 return 4;
347 }
348 }
349
350 static bool
v850_rtx_costs(rtx x,int code,int outer_code ATTRIBUTE_UNUSED,int * total)351 v850_rtx_costs (rtx x,
352 int code,
353 int outer_code ATTRIBUTE_UNUSED,
354 int * total)
355 {
356 switch (code)
357 {
358 case CONST_INT:
359 case CONST_DOUBLE:
360 case CONST:
361 case SYMBOL_REF:
362 case LABEL_REF:
363 *total = COSTS_N_INSNS (const_costs (x, code));
364 return true;
365
366 case MOD:
367 case DIV:
368 case UMOD:
369 case UDIV:
370 if (TARGET_V850E && optimize_size)
371 *total = 6;
372 else
373 *total = 60;
374 return true;
375
376 case MULT:
377 if (TARGET_V850E
378 && ( GET_MODE (x) == SImode
379 || GET_MODE (x) == HImode
380 || GET_MODE (x) == QImode))
381 {
382 if (GET_CODE (XEXP (x, 1)) == REG)
383 *total = 4;
384 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
385 {
386 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
387 *total = 6;
388 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
389 *total = 10;
390 }
391 }
392 else
393 *total = 20;
394 return true;
395
396 default:
397 return false;
398 }
399 }
400
401 /* Print operand X using operand code CODE to assembly language output file
402 FILE. */
403
404 void
print_operand(FILE * file,rtx x,int code)405 print_operand (FILE * file, rtx x, int code)
406 {
407 HOST_WIDE_INT high, low;
408
409 switch (code)
410 {
411 case 'c':
412 /* We use 'c' operands with symbols for .vtinherit */
413 if (GET_CODE (x) == SYMBOL_REF)
414 {
415 output_addr_const(file, x);
416 break;
417 }
418 /* fall through */
419 case 'b':
420 case 'B':
421 case 'C':
422 switch ((code == 'B' || code == 'C')
423 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
424 {
425 case NE:
426 if (code == 'c' || code == 'C')
427 fprintf (file, "nz");
428 else
429 fprintf (file, "ne");
430 break;
431 case EQ:
432 if (code == 'c' || code == 'C')
433 fprintf (file, "z");
434 else
435 fprintf (file, "e");
436 break;
437 case GE:
438 fprintf (file, "ge");
439 break;
440 case GT:
441 fprintf (file, "gt");
442 break;
443 case LE:
444 fprintf (file, "le");
445 break;
446 case LT:
447 fprintf (file, "lt");
448 break;
449 case GEU:
450 fprintf (file, "nl");
451 break;
452 case GTU:
453 fprintf (file, "h");
454 break;
455 case LEU:
456 fprintf (file, "nh");
457 break;
458 case LTU:
459 fprintf (file, "l");
460 break;
461 default:
462 abort ();
463 }
464 break;
465 case 'F': /* high word of CONST_DOUBLE */
466 if (GET_CODE (x) == CONST_INT)
467 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
468 else if (GET_CODE (x) == CONST_DOUBLE)
469 {
470 const_double_split (x, &high, &low);
471 fprintf (file, "%ld", (long) high);
472 }
473 else
474 abort ();
475 break;
476 case 'G': /* low word of CONST_DOUBLE */
477 if (GET_CODE (x) == CONST_INT)
478 fprintf (file, "%ld", (long) INTVAL (x));
479 else if (GET_CODE (x) == CONST_DOUBLE)
480 {
481 const_double_split (x, &high, &low);
482 fprintf (file, "%ld", (long) low);
483 }
484 else
485 abort ();
486 break;
487 case 'L':
488 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
489 break;
490 case 'M':
491 fprintf (file, "%d", exact_log2 (INTVAL (x)));
492 break;
493 case 'O':
494 if (special_symbolref_operand (x, VOIDmode))
495 {
496 if (GET_CODE (x) == SYMBOL_REF)
497 ;
498 else if (GET_CODE (x) == CONST)
499 x = XEXP (XEXP (x, 0), 0);
500 else
501 abort ();
502
503 if (SYMBOL_REF_ZDA_P (x))
504 fprintf (file, "zdaoff");
505 else if (SYMBOL_REF_SDA_P (x))
506 fprintf (file, "sdaoff");
507 else if (SYMBOL_REF_TDA_P (x))
508 fprintf (file, "tdaoff");
509 else
510 abort ();
511 }
512 else
513 abort ();
514 break;
515 case 'P':
516 if (special_symbolref_operand (x, VOIDmode))
517 output_addr_const (file, x);
518 else
519 abort ();
520 break;
521 case 'Q':
522 if (special_symbolref_operand (x, VOIDmode))
523 {
524 if (GET_CODE (x) == SYMBOL_REF)
525 ;
526 else if (GET_CODE (x) == CONST)
527 x = XEXP (XEXP (x, 0), 0);
528 else
529 abort ();
530
531 if (SYMBOL_REF_ZDA_P (x))
532 fprintf (file, "r0");
533 else if (SYMBOL_REF_SDA_P (x))
534 fprintf (file, "gp");
535 else if (SYMBOL_REF_TDA_P (x))
536 fprintf (file, "ep");
537 else
538 abort ();
539 }
540 else
541 abort ();
542 break;
543 case 'R': /* 2nd word of a double. */
544 switch (GET_CODE (x))
545 {
546 case REG:
547 fprintf (file, reg_names[REGNO (x) + 1]);
548 break;
549 case MEM:
550 x = XEXP (adjust_address (x, SImode, 4), 0);
551 print_operand_address (file, x);
552 if (GET_CODE (x) == CONST_INT)
553 fprintf (file, "[r0]");
554 break;
555
556 default:
557 break;
558 }
559 break;
560 case 'S':
561 {
562 /* if it's a reference to a TDA variable, use sst/sld vs. st/ld */
563 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
564 fputs ("s", file);
565
566 break;
567 }
568 case 'T':
569 {
570 /* Like an 'S' operand above, but for unsigned loads only. */
571 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
572 fputs ("s", file);
573
574 break;
575 }
576 case 'W': /* print the instruction suffix */
577 switch (GET_MODE (x))
578 {
579 default:
580 abort ();
581
582 case QImode: fputs (".b", file); break;
583 case HImode: fputs (".h", file); break;
584 case SImode: fputs (".w", file); break;
585 case SFmode: fputs (".w", file); break;
586 }
587 break;
588 case '.': /* register r0 */
589 fputs (reg_names[0], file);
590 break;
591 case 'z': /* reg or zero */
592 if (x == const0_rtx)
593 fputs (reg_names[0], file);
594 else if (GET_CODE (x) == REG)
595 fputs (reg_names[REGNO (x)], file);
596 else
597 abort ();
598 break;
599 default:
600 switch (GET_CODE (x))
601 {
602 case MEM:
603 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
604 output_address (gen_rtx_PLUS (SImode, gen_rtx (REG, SImode, 0),
605 XEXP (x, 0)));
606 else
607 output_address (XEXP (x, 0));
608 break;
609
610 case REG:
611 fputs (reg_names[REGNO (x)], file);
612 break;
613 case SUBREG:
614 fputs (reg_names[subreg_regno (x)], file);
615 break;
616 case CONST_INT:
617 case SYMBOL_REF:
618 case CONST:
619 case LABEL_REF:
620 case CODE_LABEL:
621 print_operand_address (file, x);
622 break;
623 default:
624 abort ();
625 }
626 break;
627
628 }
629 }
630
631
632 /* Output assembly language output for the address ADDR to FILE. */
633
634 void
print_operand_address(FILE * file,rtx addr)635 print_operand_address (FILE * file, rtx addr)
636 {
637 switch (GET_CODE (addr))
638 {
639 case REG:
640 fprintf (file, "0[");
641 print_operand (file, addr, 0);
642 fprintf (file, "]");
643 break;
644 case LO_SUM:
645 if (GET_CODE (XEXP (addr, 0)) == REG)
646 {
647 /* reg,foo */
648 fprintf (file, "lo(");
649 print_operand (file, XEXP (addr, 1), 0);
650 fprintf (file, ")[");
651 print_operand (file, XEXP (addr, 0), 0);
652 fprintf (file, "]");
653 }
654 break;
655 case PLUS:
656 if (GET_CODE (XEXP (addr, 0)) == REG
657 || GET_CODE (XEXP (addr, 0)) == SUBREG)
658 {
659 /* reg,foo */
660 print_operand (file, XEXP (addr, 1), 0);
661 fprintf (file, "[");
662 print_operand (file, XEXP (addr, 0), 0);
663 fprintf (file, "]");
664 }
665 else
666 {
667 print_operand (file, XEXP (addr, 0), 0);
668 fprintf (file, "+");
669 print_operand (file, XEXP (addr, 1), 0);
670 }
671 break;
672 case SYMBOL_REF:
673 {
674 const char *off_name = NULL;
675 const char *reg_name = NULL;
676
677 if (SYMBOL_REF_ZDA_P (addr))
678 {
679 off_name = "zdaoff";
680 reg_name = "r0";
681 }
682 else if (SYMBOL_REF_SDA_P (addr))
683 {
684 off_name = "sdaoff";
685 reg_name = "gp";
686 }
687 else if (SYMBOL_REF_TDA_P (addr))
688 {
689 off_name = "tdaoff";
690 reg_name = "ep";
691 }
692
693 if (off_name)
694 fprintf (file, "%s(", off_name);
695 output_addr_const (file, addr);
696 if (reg_name)
697 fprintf (file, ")[%s]", reg_name);
698 }
699 break;
700 case CONST:
701 if (special_symbolref_operand (addr, VOIDmode))
702 {
703 rtx x = XEXP (XEXP (addr, 0), 0);
704 const char *off_name;
705 const char *reg_name;
706
707 if (SYMBOL_REF_ZDA_P (x))
708 {
709 off_name = "zdaoff";
710 reg_name = "r0";
711 }
712 else if (SYMBOL_REF_SDA_P (x))
713 {
714 off_name = "sdaoff";
715 reg_name = "gp";
716 }
717 else if (SYMBOL_REF_TDA_P (x))
718 {
719 off_name = "tdaoff";
720 reg_name = "ep";
721 }
722 else
723 abort ();
724
725 fprintf (file, "%s(", off_name);
726 output_addr_const (file, addr);
727 fprintf (file, ")[%s]", reg_name);
728 }
729 else
730 output_addr_const (file, addr);
731 break;
732 default:
733 output_addr_const (file, addr);
734 break;
735 }
736 }
737
738 /* When assemble_integer is used to emit the offsets for a switch
739 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
740 output_addr_const will normally barf at this, but it is OK to omit
741 the truncate and just emit the difference of the two labels. The
742 .hword directive will automatically handle the truncation for us.
743
744 Returns 1 if rtx was handled, 0 otherwise. */
745
746 int
v850_output_addr_const_extra(FILE * file,rtx x)747 v850_output_addr_const_extra (FILE * file, rtx x)
748 {
749 if (GET_CODE (x) != TRUNCATE)
750 return 0;
751
752 x = XEXP (x, 0);
753
754 /* We must also handle the case where the switch table was passed a
755 constant value and so has been collapsed. In this case the first
756 label will have been deleted. In such a case it is OK to emit
757 nothing, since the table will not be used.
758 (cf gcc.c-torture/compile/990801-1.c). */
759 if (GET_CODE (x) == MINUS
760 && GET_CODE (XEXP (x, 0)) == LABEL_REF
761 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
762 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
763 return 1;
764
765 output_addr_const (file, x);
766 return 1;
767 }
768
769 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
770 point value. */
771
772 const char *
output_move_single(rtx * operands)773 output_move_single (rtx * operands)
774 {
775 rtx dst = operands[0];
776 rtx src = operands[1];
777
778 if (REG_P (dst))
779 {
780 if (REG_P (src))
781 return "mov %1,%0";
782
783 else if (GET_CODE (src) == CONST_INT)
784 {
785 HOST_WIDE_INT value = INTVAL (src);
786
787 if (CONST_OK_FOR_J (value)) /* Signed 5 bit immediate. */
788 return "mov %1,%0";
789
790 else if (CONST_OK_FOR_K (value)) /* Signed 16 bit immediate. */
791 return "movea lo(%1),%.,%0";
792
793 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
794 return "movhi hi(%1),%.,%0";
795
796 /* A random constant. */
797 else if (TARGET_V850E)
798 return "mov %1,%0";
799 else
800 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
801 }
802
803 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
804 {
805 HOST_WIDE_INT high, low;
806
807 const_double_split (src, &high, &low);
808
809 if (CONST_OK_FOR_J (high)) /* Signed 5 bit immediate. */
810 return "mov %F1,%0";
811
812 else if (CONST_OK_FOR_K (high)) /* Signed 16 bit immediate. */
813 return "movea lo(%F1),%.,%0";
814
815 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
816 return "movhi hi(%F1),%.,%0";
817
818 /* A random constant. */
819 else if (TARGET_V850E)
820 return "mov %F1,%0";
821
822 else
823 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
824 }
825
826 else if (GET_CODE (src) == MEM)
827 return "%S1ld%W1 %1,%0";
828
829 else if (special_symbolref_operand (src, VOIDmode))
830 return "movea %O1(%P1),%Q1,%0";
831
832 else if (GET_CODE (src) == LABEL_REF
833 || GET_CODE (src) == SYMBOL_REF
834 || GET_CODE (src) == CONST)
835 {
836 if (TARGET_V850E)
837 return "mov hilo(%1),%0";
838 else
839 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
840 }
841
842 else if (GET_CODE (src) == HIGH)
843 return "movhi hi(%1),%.,%0";
844
845 else if (GET_CODE (src) == LO_SUM)
846 {
847 operands[2] = XEXP (src, 0);
848 operands[3] = XEXP (src, 1);
849 return "movea lo(%3),%2,%0";
850 }
851 }
852
853 else if (GET_CODE (dst) == MEM)
854 {
855 if (REG_P (src))
856 return "%S0st%W0 %1,%0";
857
858 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
859 return "%S0st%W0 %.,%0";
860
861 else if (GET_CODE (src) == CONST_DOUBLE
862 && CONST0_RTX (GET_MODE (dst)) == src)
863 return "%S0st%W0 %.,%0";
864 }
865
866 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
867 return "";
868 }
869
870
871 /* Return appropriate code to load up an 8 byte integer or
872 floating point value */
873
874 const char *
output_move_double(rtx * operands)875 output_move_double (rtx * operands)
876 {
877 enum machine_mode mode = GET_MODE (operands[0]);
878 rtx dst = operands[0];
879 rtx src = operands[1];
880
881 if (register_operand (dst, mode)
882 && register_operand (src, mode))
883 {
884 if (REGNO (src) + 1 == REGNO (dst))
885 return "mov %R1,%R0\n\tmov %1,%0";
886 else
887 return "mov %1,%0\n\tmov %R1,%R0";
888 }
889
890 /* Storing 0 */
891 if (GET_CODE (dst) == MEM
892 && ((GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
893 || (GET_CODE (src) == CONST_DOUBLE && CONST_DOUBLE_OK_FOR_G (src))))
894 return "st.w %.,%0\n\tst.w %.,%R0";
895
896 if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE)
897 {
898 HOST_WIDE_INT high_low[2];
899 int i;
900 rtx xop[10];
901
902 if (GET_CODE (src) == CONST_DOUBLE)
903 const_double_split (src, &high_low[1], &high_low[0]);
904 else
905 {
906 high_low[0] = INTVAL (src);
907 high_low[1] = (INTVAL (src) >= 0) ? 0 : -1;
908 }
909
910 for (i = 0; i < 2; i++)
911 {
912 xop[0] = gen_rtx_REG (SImode, REGNO (dst)+i);
913 xop[1] = GEN_INT (high_low[i]);
914 output_asm_insn (output_move_single (xop), xop);
915 }
916
917 return "";
918 }
919
920 if (GET_CODE (src) == MEM)
921 {
922 int ptrreg = -1;
923 int dreg = REGNO (dst);
924 rtx inside = XEXP (src, 0);
925
926 if (GET_CODE (inside) == REG)
927 ptrreg = REGNO (inside);
928 else if (GET_CODE (inside) == SUBREG)
929 ptrreg = subreg_regno (inside);
930 else if (GET_CODE (inside) == PLUS)
931 ptrreg = REGNO (XEXP (inside, 0));
932 else if (GET_CODE (inside) == LO_SUM)
933 ptrreg = REGNO (XEXP (inside, 0));
934
935 if (dreg == ptrreg)
936 return "ld.w %R1,%R0\n\tld.w %1,%0";
937 }
938
939 if (GET_CODE (src) == MEM)
940 return "ld.w %1,%0\n\tld.w %R1,%R0";
941
942 if (GET_CODE (dst) == MEM)
943 return "st.w %1,%0\n\tst.w %R1,%R0";
944
945 return "mov %1,%0\n\tmov %R1,%R0";
946 }
947
948
949 /* Return maximum offset supported for a short EP memory reference of mode
950 MODE and signedness UNSIGNEDP. */
951
952 static int
ep_memory_offset(enum machine_mode mode,int unsignedp ATTRIBUTE_UNUSED)953 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
954 {
955 int max_offset = 0;
956
957 switch (mode)
958 {
959 case QImode:
960 if (TARGET_SMALL_SLD)
961 max_offset = (1 << 4);
962 else if (TARGET_V850E
963 && ( ( unsignedp && ! TARGET_US_BIT_SET)
964 || (! unsignedp && TARGET_US_BIT_SET)))
965 max_offset = (1 << 4);
966 else
967 max_offset = (1 << 7);
968 break;
969
970 case HImode:
971 if (TARGET_SMALL_SLD)
972 max_offset = (1 << 5);
973 else if (TARGET_V850E
974 && ( ( unsignedp && ! TARGET_US_BIT_SET)
975 || (! unsignedp && TARGET_US_BIT_SET)))
976 max_offset = (1 << 5);
977 else
978 max_offset = (1 << 8);
979 break;
980
981 case SImode:
982 case SFmode:
983 max_offset = (1 << 8);
984 break;
985
986 default:
987 break;
988 }
989
990 return max_offset;
991 }
992
993 /* Return true if OP is a valid short EP memory reference */
994
995 int
ep_memory_operand(rtx op,enum machine_mode mode,int unsigned_load)996 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
997 {
998 rtx addr, op0, op1;
999 int max_offset;
1000 int mask;
1001
1002 if (GET_CODE (op) != MEM)
1003 return FALSE;
1004
1005 max_offset = ep_memory_offset (mode, unsigned_load);
1006
1007 mask = GET_MODE_SIZE (mode) - 1;
1008
1009 addr = XEXP (op, 0);
1010 if (GET_CODE (addr) == CONST)
1011 addr = XEXP (addr, 0);
1012
1013 switch (GET_CODE (addr))
1014 {
1015 default:
1016 break;
1017
1018 case SYMBOL_REF:
1019 return SYMBOL_REF_TDA_P (addr);
1020
1021 case REG:
1022 return REGNO (addr) == EP_REGNUM;
1023
1024 case PLUS:
1025 op0 = XEXP (addr, 0);
1026 op1 = XEXP (addr, 1);
1027 if (GET_CODE (op1) == CONST_INT
1028 && INTVAL (op1) < max_offset
1029 && INTVAL (op1) >= 0
1030 && (INTVAL (op1) & mask) == 0)
1031 {
1032 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1033 return TRUE;
1034
1035 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1036 return TRUE;
1037 }
1038 break;
1039 }
1040
1041 return FALSE;
1042 }
1043
1044 /* Return true if OP is either a register or 0 */
1045
1046 int
reg_or_0_operand(rtx op,enum machine_mode mode)1047 reg_or_0_operand (rtx op, enum machine_mode mode)
1048 {
1049 if (GET_CODE (op) == CONST_INT)
1050 return INTVAL (op) == 0;
1051
1052 else if (GET_CODE (op) == CONST_DOUBLE)
1053 return CONST_DOUBLE_OK_FOR_G (op);
1054
1055 else
1056 return register_operand (op, mode);
1057 }
1058
1059 /* Return true if OP is either a register or a signed five bit integer */
1060
1061 int
reg_or_int5_operand(rtx op,enum machine_mode mode)1062 reg_or_int5_operand (rtx op, enum machine_mode mode)
1063 {
1064 if (GET_CODE (op) == CONST_INT)
1065 return CONST_OK_FOR_J (INTVAL (op));
1066
1067 else
1068 return register_operand (op, mode);
1069 }
1070
1071 /* Return true if OP is either a register or a signed nine bit integer. */
1072
1073 int
reg_or_int9_operand(rtx op,enum machine_mode mode)1074 reg_or_int9_operand (rtx op, enum machine_mode mode)
1075 {
1076 if (GET_CODE (op) == CONST_INT)
1077 return CONST_OK_FOR_O (INTVAL (op));
1078
1079 return register_operand (op, mode);
1080 }
1081
1082 /* Return true if OP is either a register or a const integer. */
1083
1084 int
reg_or_const_operand(rtx op,enum machine_mode mode)1085 reg_or_const_operand (rtx op, enum machine_mode mode)
1086 {
1087 if (GET_CODE (op) == CONST_INT)
1088 return TRUE;
1089
1090 return register_operand (op, mode);
1091 }
1092
1093 /* Return true if OP is a valid call operand. */
1094
1095 int
call_address_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)1096 call_address_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1097 {
1098 /* Only registers are valid call operands if TARGET_LONG_CALLS. */
1099 if (TARGET_LONG_CALLS)
1100 return GET_CODE (op) == REG;
1101 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == REG);
1102 }
1103
1104 int
special_symbolref_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)1105 special_symbolref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1106 {
1107 if (GET_CODE (op) == CONST
1108 && GET_CODE (XEXP (op, 0)) == PLUS
1109 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
1110 && CONST_OK_FOR_K (INTVAL (XEXP (XEXP (op, 0), 1))))
1111 op = XEXP (XEXP (op, 0), 0);
1112
1113 if (GET_CODE (op) == SYMBOL_REF)
1114 return (SYMBOL_REF_FLAGS (op)
1115 & (SYMBOL_FLAG_ZDA | SYMBOL_FLAG_TDA | SYMBOL_FLAG_SDA)) != 0;
1116
1117 return FALSE;
1118 }
1119
1120 int
movsi_source_operand(rtx op,enum machine_mode mode)1121 movsi_source_operand (rtx op, enum machine_mode mode)
1122 {
1123 /* Some constants, as well as symbolic operands
1124 must be done with HIGH & LO_SUM patterns. */
1125 if (CONSTANT_P (op)
1126 && GET_CODE (op) != HIGH
1127 && GET_CODE (op) != CONSTANT_P_RTX
1128 && !(GET_CODE (op) == CONST_INT
1129 && (CONST_OK_FOR_J (INTVAL (op))
1130 || CONST_OK_FOR_K (INTVAL (op))
1131 || CONST_OK_FOR_L (INTVAL (op)))))
1132 return special_symbolref_operand (op, mode);
1133 else
1134 return general_operand (op, mode);
1135 }
1136
1137 int
power_of_two_operand(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)1138 power_of_two_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1139 {
1140 if (GET_CODE (op) != CONST_INT)
1141 return 0;
1142
1143 if (exact_log2 (INTVAL (op)) == -1)
1144 return 0;
1145 return 1;
1146 }
1147
1148 int
not_power_of_two_operand(rtx op,enum machine_mode mode)1149 not_power_of_two_operand (rtx op, enum machine_mode mode)
1150 {
1151 unsigned int mask;
1152
1153 if (mode == QImode)
1154 mask = 0xff;
1155 else if (mode == HImode)
1156 mask = 0xffff;
1157 else if (mode == SImode)
1158 mask = 0xffffffff;
1159 else
1160 return 0;
1161
1162 if (GET_CODE (op) != CONST_INT)
1163 return 0;
1164
1165 if (exact_log2 (~INTVAL (op) & mask) == -1)
1166 return 0;
1167 return 1;
1168 }
1169
1170
1171 /* Substitute memory references involving a pointer, to use the ep pointer,
1172 taking care to save and preserve the ep. */
1173
1174 static void
substitute_ep_register(rtx first_insn,rtx last_insn,int uses,int regno,rtx * p_r1,rtx * p_ep)1175 substitute_ep_register (rtx first_insn,
1176 rtx last_insn,
1177 int uses,
1178 int regno,
1179 rtx * p_r1,
1180 rtx * p_ep)
1181 {
1182 rtx reg = gen_rtx_REG (Pmode, regno);
1183 rtx insn;
1184
1185 if (!*p_r1)
1186 {
1187 regs_ever_live[1] = 1;
1188 *p_r1 = gen_rtx_REG (Pmode, 1);
1189 *p_ep = gen_rtx_REG (Pmode, 30);
1190 }
1191
1192 if (TARGET_DEBUG)
1193 fprintf (stderr, "\
1194 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1195 2 * (uses - 3), uses, reg_names[regno],
1196 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1197 INSN_UID (first_insn), INSN_UID (last_insn));
1198
1199 if (GET_CODE (first_insn) == NOTE)
1200 first_insn = next_nonnote_insn (first_insn);
1201
1202 last_insn = next_nonnote_insn (last_insn);
1203 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1204 {
1205 if (GET_CODE (insn) == INSN)
1206 {
1207 rtx pattern = single_set (insn);
1208
1209 /* Replace the memory references. */
1210 if (pattern)
1211 {
1212 rtx *p_mem;
1213 /* Memory operands are signed by default. */
1214 int unsignedp = FALSE;
1215
1216 if (GET_CODE (SET_DEST (pattern)) == MEM
1217 && GET_CODE (SET_SRC (pattern)) == MEM)
1218 p_mem = (rtx *)0;
1219
1220 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1221 p_mem = &SET_DEST (pattern);
1222
1223 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1224 p_mem = &SET_SRC (pattern);
1225
1226 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1227 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1228 p_mem = &XEXP (SET_SRC (pattern), 0);
1229
1230 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1231 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1232 {
1233 p_mem = &XEXP (SET_SRC (pattern), 0);
1234 unsignedp = TRUE;
1235 }
1236 else
1237 p_mem = (rtx *)0;
1238
1239 if (p_mem)
1240 {
1241 rtx addr = XEXP (*p_mem, 0);
1242
1243 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1244 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1245
1246 else if (GET_CODE (addr) == PLUS
1247 && GET_CODE (XEXP (addr, 0)) == REG
1248 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1249 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1250 && ((INTVAL (XEXP (addr, 1)))
1251 < ep_memory_offset (GET_MODE (*p_mem),
1252 unsignedp))
1253 && ((INTVAL (XEXP (addr, 1))) >= 0))
1254 *p_mem = change_address (*p_mem, VOIDmode,
1255 gen_rtx_PLUS (Pmode,
1256 *p_ep,
1257 XEXP (addr, 1)));
1258 }
1259 }
1260 }
1261 }
1262
1263 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1264 insn = prev_nonnote_insn (first_insn);
1265 if (insn && GET_CODE (insn) == INSN
1266 && GET_CODE (PATTERN (insn)) == SET
1267 && SET_DEST (PATTERN (insn)) == *p_ep
1268 && SET_SRC (PATTERN (insn)) == *p_r1)
1269 delete_insn (insn);
1270 else
1271 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1272
1273 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1274 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1275 }
1276
1277
1278 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1279 the -mep mode to copy heavily used pointers to ep to use the implicit
1280 addressing. */
1281
1282 static void
v850_reorg(void)1283 v850_reorg (void)
1284 {
1285 struct
1286 {
1287 int uses;
1288 rtx first_insn;
1289 rtx last_insn;
1290 }
1291 regs[FIRST_PSEUDO_REGISTER];
1292
1293 int i;
1294 int use_ep = FALSE;
1295 rtx r1 = NULL_RTX;
1296 rtx ep = NULL_RTX;
1297 rtx insn;
1298 rtx pattern;
1299
1300 /* If not ep mode, just return now. */
1301 if (!TARGET_EP)
1302 return;
1303
1304 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1305 {
1306 regs[i].uses = 0;
1307 regs[i].first_insn = NULL_RTX;
1308 regs[i].last_insn = NULL_RTX;
1309 }
1310
1311 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1312 {
1313 switch (GET_CODE (insn))
1314 {
1315 /* End of basic block */
1316 default:
1317 if (!use_ep)
1318 {
1319 int max_uses = -1;
1320 int max_regno = -1;
1321
1322 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1323 {
1324 if (max_uses < regs[i].uses)
1325 {
1326 max_uses = regs[i].uses;
1327 max_regno = i;
1328 }
1329 }
1330
1331 if (max_uses > 3)
1332 substitute_ep_register (regs[max_regno].first_insn,
1333 regs[max_regno].last_insn,
1334 max_uses, max_regno, &r1, &ep);
1335 }
1336
1337 use_ep = FALSE;
1338 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1339 {
1340 regs[i].uses = 0;
1341 regs[i].first_insn = NULL_RTX;
1342 regs[i].last_insn = NULL_RTX;
1343 }
1344 break;
1345
1346 case NOTE:
1347 break;
1348
1349 case INSN:
1350 pattern = single_set (insn);
1351
1352 /* See if there are any memory references we can shorten */
1353 if (pattern)
1354 {
1355 rtx src = SET_SRC (pattern);
1356 rtx dest = SET_DEST (pattern);
1357 rtx mem;
1358 /* Memory operands are signed by default. */
1359 int unsignedp = FALSE;
1360
1361 /* We might have (SUBREG (MEM)) here, so just get rid of the
1362 subregs to make this code simpler. */
1363 if (GET_CODE (dest) == SUBREG
1364 && (GET_CODE (SUBREG_REG (dest)) == MEM
1365 || GET_CODE (SUBREG_REG (dest)) == REG))
1366 alter_subreg (&dest);
1367 if (GET_CODE (src) == SUBREG
1368 && (GET_CODE (SUBREG_REG (src)) == MEM
1369 || GET_CODE (SUBREG_REG (src)) == REG))
1370 alter_subreg (&src);
1371
1372 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1373 mem = NULL_RTX;
1374
1375 else if (GET_CODE (dest) == MEM)
1376 mem = dest;
1377
1378 else if (GET_CODE (src) == MEM)
1379 mem = src;
1380
1381 else if (GET_CODE (src) == SIGN_EXTEND
1382 && GET_CODE (XEXP (src, 0)) == MEM)
1383 mem = XEXP (src, 0);
1384
1385 else if (GET_CODE (src) == ZERO_EXTEND
1386 && GET_CODE (XEXP (src, 0)) == MEM)
1387 {
1388 mem = XEXP (src, 0);
1389 unsignedp = TRUE;
1390 }
1391 else
1392 mem = NULL_RTX;
1393
1394 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1395 use_ep = TRUE;
1396
1397 else if (!use_ep && mem
1398 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1399 {
1400 rtx addr = XEXP (mem, 0);
1401 int regno = -1;
1402 int short_p;
1403
1404 if (GET_CODE (addr) == REG)
1405 {
1406 short_p = TRUE;
1407 regno = REGNO (addr);
1408 }
1409
1410 else if (GET_CODE (addr) == PLUS
1411 && GET_CODE (XEXP (addr, 0)) == REG
1412 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1413 && ((INTVAL (XEXP (addr, 1)))
1414 < ep_memory_offset (GET_MODE (mem), unsignedp))
1415 && ((INTVAL (XEXP (addr, 1))) >= 0))
1416 {
1417 short_p = TRUE;
1418 regno = REGNO (XEXP (addr, 0));
1419 }
1420
1421 else
1422 short_p = FALSE;
1423
1424 if (short_p)
1425 {
1426 regs[regno].uses++;
1427 regs[regno].last_insn = insn;
1428 if (!regs[regno].first_insn)
1429 regs[regno].first_insn = insn;
1430 }
1431 }
1432
1433 /* Loading up a register in the basic block zaps any savings
1434 for the register */
1435 if (GET_CODE (dest) == REG)
1436 {
1437 enum machine_mode mode = GET_MODE (dest);
1438 int regno;
1439 int endregno;
1440
1441 regno = REGNO (dest);
1442 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1443
1444 if (!use_ep)
1445 {
1446 /* See if we can use the pointer before this
1447 modification. */
1448 int max_uses = -1;
1449 int max_regno = -1;
1450
1451 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1452 {
1453 if (max_uses < regs[i].uses)
1454 {
1455 max_uses = regs[i].uses;
1456 max_regno = i;
1457 }
1458 }
1459
1460 if (max_uses > 3
1461 && max_regno >= regno
1462 && max_regno < endregno)
1463 {
1464 substitute_ep_register (regs[max_regno].first_insn,
1465 regs[max_regno].last_insn,
1466 max_uses, max_regno, &r1,
1467 &ep);
1468
1469 /* Since we made a substitution, zap all remembered
1470 registers. */
1471 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1472 {
1473 regs[i].uses = 0;
1474 regs[i].first_insn = NULL_RTX;
1475 regs[i].last_insn = NULL_RTX;
1476 }
1477 }
1478 }
1479
1480 for (i = regno; i < endregno; i++)
1481 {
1482 regs[i].uses = 0;
1483 regs[i].first_insn = NULL_RTX;
1484 regs[i].last_insn = NULL_RTX;
1485 }
1486 }
1487 }
1488 }
1489 }
1490 }
1491
1492
1493 /* # of registers saved by the interrupt handler. */
1494 #define INTERRUPT_FIXED_NUM 4
1495
1496 /* # of bytes for registers saved by the interrupt handler. */
1497 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1498
1499 /* # of registers saved in register parameter area. */
1500 #define INTERRUPT_REGPARM_NUM 4
1501 /* # of words saved for other registers. */
1502 #define INTERRUPT_ALL_SAVE_NUM \
1503 (30 - INTERRUPT_FIXED_NUM + INTERRUPT_REGPARM_NUM)
1504
1505 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1506
1507 int
compute_register_save_size(long * p_reg_saved)1508 compute_register_save_size (long * p_reg_saved)
1509 {
1510 int size = 0;
1511 int i;
1512 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1513 int call_p = regs_ever_live [LINK_POINTER_REGNUM];
1514 long reg_saved = 0;
1515
1516 /* Count the return pointer if we need to save it. */
1517 if (current_function_profile && !call_p)
1518 regs_ever_live [LINK_POINTER_REGNUM] = call_p = 1;
1519
1520 /* Count space for the register saves. */
1521 if (interrupt_handler)
1522 {
1523 for (i = 0; i <= 31; i++)
1524 switch (i)
1525 {
1526 default:
1527 if (regs_ever_live[i] || call_p)
1528 {
1529 size += 4;
1530 reg_saved |= 1L << i;
1531 }
1532 break;
1533
1534 /* We don't save/restore r0 or the stack pointer */
1535 case 0:
1536 case STACK_POINTER_REGNUM:
1537 break;
1538
1539 /* For registers with fixed use, we save them, set them to the
1540 appropriate value, and then restore them.
1541 These registers are handled specially, so don't list them
1542 on the list of registers to save in the prologue. */
1543 case 1: /* temp used to hold ep */
1544 case 4: /* gp */
1545 case 10: /* temp used to call interrupt save/restore */
1546 case EP_REGNUM: /* ep */
1547 size += 4;
1548 break;
1549 }
1550 }
1551 else
1552 {
1553 /* Find the first register that needs to be saved. */
1554 for (i = 0; i <= 31; i++)
1555 if (regs_ever_live[i] && ((! call_used_regs[i])
1556 || i == LINK_POINTER_REGNUM))
1557 break;
1558
1559 /* If it is possible that an out-of-line helper function might be
1560 used to generate the prologue for the current function, then we
1561 need to cover the possibility that such a helper function will
1562 be used, despite the fact that there might be gaps in the list of
1563 registers that need to be saved. To detect this we note that the
1564 helper functions always push at least register r29 (provided
1565 that the function is not an interrupt handler). */
1566
1567 if (TARGET_PROLOG_FUNCTION
1568 && (i == 2 || ((i >= 20) && (i < 30))))
1569 {
1570 if (i == 2)
1571 {
1572 size += 4;
1573 reg_saved |= 1L << i;
1574
1575 i = 20;
1576 }
1577
1578 /* Helper functions save all registers between the starting
1579 register and the last register, regardless of whether they
1580 are actually used by the function or not. */
1581 for (; i <= 29; i++)
1582 {
1583 size += 4;
1584 reg_saved |= 1L << i;
1585 }
1586
1587 if (regs_ever_live [LINK_POINTER_REGNUM])
1588 {
1589 size += 4;
1590 reg_saved |= 1L << LINK_POINTER_REGNUM;
1591 }
1592 }
1593 else
1594 {
1595 for (; i <= 31; i++)
1596 if (regs_ever_live[i] && ((! call_used_regs[i])
1597 || i == LINK_POINTER_REGNUM))
1598 {
1599 size += 4;
1600 reg_saved |= 1L << i;
1601 }
1602 }
1603 }
1604
1605 if (p_reg_saved)
1606 *p_reg_saved = reg_saved;
1607
1608 return size;
1609 }
1610
1611 int
compute_frame_size(int size,long * p_reg_saved)1612 compute_frame_size (int size, long * p_reg_saved)
1613 {
1614 return (size
1615 + compute_register_save_size (p_reg_saved)
1616 + current_function_outgoing_args_size);
1617 }
1618
1619
1620 void
expand_prologue(void)1621 expand_prologue (void)
1622 {
1623 unsigned int i;
1624 int offset;
1625 unsigned int size = get_frame_size ();
1626 unsigned int actual_fsize;
1627 unsigned int init_stack_alloc = 0;
1628 rtx save_regs[32];
1629 rtx save_all;
1630 unsigned int num_save;
1631 unsigned int default_stack;
1632 int code;
1633 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1634 long reg_saved = 0;
1635
1636 actual_fsize = compute_frame_size (size, ®_saved);
1637
1638 /* Save/setup global registers for interrupt functions right now. */
1639 if (interrupt_handler)
1640 {
1641 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
1642 emit_insn (gen_callt_save_interrupt ());
1643 else
1644 emit_insn (gen_save_interrupt ());
1645
1646 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1647
1648 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1649 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1650 }
1651
1652 /* Save arg registers to the stack if necessary. */
1653 else if (current_function_args_info.anonymous_args)
1654 {
1655 if (TARGET_PROLOG_FUNCTION && TARGET_V850E && !TARGET_DISABLE_CALLT)
1656 emit_insn (gen_save_r6_r9_v850e ());
1657 else if (TARGET_PROLOG_FUNCTION && ! TARGET_LONG_CALLS)
1658 emit_insn (gen_save_r6_r9 ());
1659 else
1660 {
1661 offset = 0;
1662 for (i = 6; i < 10; i++)
1663 {
1664 emit_move_insn (gen_rtx_MEM (SImode,
1665 plus_constant (stack_pointer_rtx,
1666 offset)),
1667 gen_rtx_REG (SImode, i));
1668 offset += 4;
1669 }
1670 }
1671 }
1672
1673 /* Identify all of the saved registers. */
1674 num_save = 0;
1675 default_stack = 0;
1676 for (i = 1; i < 31; i++)
1677 {
1678 if (((1L << i) & reg_saved) != 0)
1679 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1680 }
1681
1682 /* If the return pointer is saved, the helper functions also allocate
1683 16 bytes of stack for arguments to be saved in. */
1684 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1685 {
1686 save_regs[num_save++] = gen_rtx_REG (Pmode, LINK_POINTER_REGNUM);
1687 default_stack = 16;
1688 }
1689
1690 /* See if we have an insn that allocates stack space and saves the particular
1691 registers we want to. */
1692 save_all = NULL_RTX;
1693 if (TARGET_PROLOG_FUNCTION && num_save > 0 && actual_fsize >= default_stack)
1694 {
1695 int alloc_stack = (4 * num_save) + default_stack;
1696 int unalloc_stack = actual_fsize - alloc_stack;
1697 int save_func_len = 4;
1698 int save_normal_len;
1699
1700 if (unalloc_stack)
1701 save_func_len += CONST_OK_FOR_J (unalloc_stack) ? 2 : 4;
1702
1703 /* see if we would have used ep to save the stack */
1704 if (TARGET_EP && num_save > 3 && (unsigned)actual_fsize < 255)
1705 save_normal_len = (3 * 2) + (2 * num_save);
1706 else
1707 save_normal_len = 4 * num_save;
1708
1709 save_normal_len += CONST_OK_FOR_J (actual_fsize) ? 2 : 4;
1710
1711 /* Don't bother checking if we don't actually save any space.
1712 This happens for instance if one register is saved and additional
1713 stack space is allocated. */
1714 if (save_func_len < save_normal_len)
1715 {
1716 save_all = gen_rtx_PARALLEL
1717 (VOIDmode,
1718 rtvec_alloc (num_save + 1
1719 + (TARGET_V850 ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1720
1721 XVECEXP (save_all, 0, 0)
1722 = gen_rtx_SET (VOIDmode,
1723 stack_pointer_rtx,
1724 plus_constant (stack_pointer_rtx, -alloc_stack));
1725
1726 offset = - default_stack;
1727 for (i = 0; i < num_save; i++)
1728 {
1729 XVECEXP (save_all, 0, i+1)
1730 = gen_rtx_SET (VOIDmode,
1731 gen_rtx_MEM (Pmode,
1732 plus_constant (stack_pointer_rtx,
1733 offset)),
1734 save_regs[i]);
1735 offset -= 4;
1736 }
1737
1738 if (TARGET_V850)
1739 {
1740 XVECEXP (save_all, 0, num_save + 1)
1741 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1742
1743 if (TARGET_LONG_CALLS)
1744 XVECEXP (save_all, 0, num_save + 2)
1745 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1746 }
1747
1748 code = recog (save_all, NULL_RTX, NULL);
1749 if (code >= 0)
1750 {
1751 rtx insn = emit_insn (save_all);
1752 INSN_CODE (insn) = code;
1753 actual_fsize -= alloc_stack;
1754
1755 if (TARGET_DEBUG)
1756 fprintf (stderr, "\
1757 Saved %d bytes via prologue function (%d vs. %d) for function %s\n",
1758 save_normal_len - save_func_len,
1759 save_normal_len, save_func_len,
1760 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)));
1761 }
1762 else
1763 save_all = NULL_RTX;
1764 }
1765 }
1766
1767 /* If no prolog save function is available, store the registers the old
1768 fashioned way (one by one). */
1769 if (!save_all)
1770 {
1771 /* Special case interrupt functions that save all registers for a call. */
1772 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1773 {
1774 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
1775 emit_insn (gen_callt_save_all_interrupt ());
1776 else
1777 emit_insn (gen_save_all_interrupt ());
1778 }
1779 else
1780 {
1781 /* If the stack is too big, allocate it in chunks so we can do the
1782 register saves. We use the register save size so we use the ep
1783 register. */
1784 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1785 init_stack_alloc = compute_register_save_size (NULL);
1786 else
1787 init_stack_alloc = actual_fsize;
1788
1789 /* Save registers at the beginning of the stack frame. */
1790 offset = init_stack_alloc - 4;
1791
1792 if (init_stack_alloc)
1793 emit_insn (gen_addsi3 (stack_pointer_rtx,
1794 stack_pointer_rtx,
1795 GEN_INT (-init_stack_alloc)));
1796
1797 /* Save the return pointer first. */
1798 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1799 {
1800 emit_move_insn (gen_rtx_MEM (SImode,
1801 plus_constant (stack_pointer_rtx,
1802 offset)),
1803 save_regs[--num_save]);
1804 offset -= 4;
1805 }
1806
1807 for (i = 0; i < num_save; i++)
1808 {
1809 emit_move_insn (gen_rtx_MEM (SImode,
1810 plus_constant (stack_pointer_rtx,
1811 offset)),
1812 save_regs[i]);
1813 offset -= 4;
1814 }
1815 }
1816 }
1817
1818 /* Allocate the rest of the stack that was not allocated above (either it is
1819 > 32K or we just called a function to save the registers and needed more
1820 stack. */
1821 if (actual_fsize > init_stack_alloc)
1822 {
1823 int diff = actual_fsize - init_stack_alloc;
1824 if (CONST_OK_FOR_K (diff))
1825 emit_insn (gen_addsi3 (stack_pointer_rtx,
1826 stack_pointer_rtx,
1827 GEN_INT (-diff)));
1828 else
1829 {
1830 rtx reg = gen_rtx_REG (Pmode, 12);
1831 emit_move_insn (reg, GEN_INT (-diff));
1832 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
1833 }
1834 }
1835
1836 /* If we need a frame pointer, set it up now. */
1837 if (frame_pointer_needed)
1838 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1839 }
1840
1841
1842 void
expand_epilogue(void)1843 expand_epilogue (void)
1844 {
1845 unsigned int i;
1846 int offset;
1847 unsigned int size = get_frame_size ();
1848 long reg_saved = 0;
1849 unsigned int actual_fsize = compute_frame_size (size, ®_saved);
1850 unsigned int init_stack_free = 0;
1851 rtx restore_regs[32];
1852 rtx restore_all;
1853 unsigned int num_restore;
1854 unsigned int default_stack;
1855 int code;
1856 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1857
1858 /* Eliminate the initial stack stored by interrupt functions. */
1859 if (interrupt_handler)
1860 {
1861 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1862 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1863 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1864 }
1865
1866 /* Cut off any dynamic stack created. */
1867 if (frame_pointer_needed)
1868 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1869
1870 /* Identify all of the saved registers. */
1871 num_restore = 0;
1872 default_stack = 0;
1873 for (i = 1; i < 31; i++)
1874 {
1875 if (((1L << i) & reg_saved) != 0)
1876 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1877 }
1878
1879 /* If the return pointer is saved, the helper functions also allocate
1880 16 bytes of stack for arguments to be saved in. */
1881 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1882 {
1883 restore_regs[num_restore++] = gen_rtx_REG (Pmode, LINK_POINTER_REGNUM);
1884 default_stack = 16;
1885 }
1886
1887 /* See if we have an insn that restores the particular registers we
1888 want to. */
1889 restore_all = NULL_RTX;
1890
1891 if (TARGET_PROLOG_FUNCTION
1892 && num_restore > 0
1893 && actual_fsize >= default_stack
1894 && !interrupt_handler)
1895 {
1896 int alloc_stack = (4 * num_restore) + default_stack;
1897 int unalloc_stack = actual_fsize - alloc_stack;
1898 int restore_func_len = 4;
1899 int restore_normal_len;
1900
1901 if (unalloc_stack)
1902 restore_func_len += CONST_OK_FOR_J (unalloc_stack) ? 2 : 4;
1903
1904 /* See if we would have used ep to restore the registers. */
1905 if (TARGET_EP && num_restore > 3 && (unsigned)actual_fsize < 255)
1906 restore_normal_len = (3 * 2) + (2 * num_restore);
1907 else
1908 restore_normal_len = 4 * num_restore;
1909
1910 restore_normal_len += (CONST_OK_FOR_J (actual_fsize) ? 2 : 4) + 2;
1911
1912 /* Don't bother checking if we don't actually save any space. */
1913 if (restore_func_len < restore_normal_len)
1914 {
1915 restore_all = gen_rtx_PARALLEL (VOIDmode,
1916 rtvec_alloc (num_restore + 2));
1917 XVECEXP (restore_all, 0, 0) = gen_rtx_RETURN (VOIDmode);
1918 XVECEXP (restore_all, 0, 1)
1919 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1920 gen_rtx_PLUS (Pmode,
1921 stack_pointer_rtx,
1922 GEN_INT (alloc_stack)));
1923
1924 offset = alloc_stack - 4;
1925 for (i = 0; i < num_restore; i++)
1926 {
1927 XVECEXP (restore_all, 0, i+2)
1928 = gen_rtx_SET (VOIDmode,
1929 restore_regs[i],
1930 gen_rtx_MEM (Pmode,
1931 plus_constant (stack_pointer_rtx,
1932 offset)));
1933 offset -= 4;
1934 }
1935
1936 code = recog (restore_all, NULL_RTX, NULL);
1937
1938 if (code >= 0)
1939 {
1940 rtx insn;
1941
1942 actual_fsize -= alloc_stack;
1943 if (actual_fsize)
1944 {
1945 if (CONST_OK_FOR_K (actual_fsize))
1946 emit_insn (gen_addsi3 (stack_pointer_rtx,
1947 stack_pointer_rtx,
1948 GEN_INT (actual_fsize)));
1949 else
1950 {
1951 rtx reg = gen_rtx_REG (Pmode, 12);
1952 emit_move_insn (reg, GEN_INT (actual_fsize));
1953 emit_insn (gen_addsi3 (stack_pointer_rtx,
1954 stack_pointer_rtx,
1955 reg));
1956 }
1957 }
1958
1959 insn = emit_jump_insn (restore_all);
1960 INSN_CODE (insn) = code;
1961
1962 if (TARGET_DEBUG)
1963 fprintf (stderr, "\
1964 Saved %d bytes via epilogue function (%d vs. %d) in function %s\n",
1965 restore_normal_len - restore_func_len,
1966 restore_normal_len, restore_func_len,
1967 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)));
1968 }
1969 else
1970 restore_all = NULL_RTX;
1971 }
1972 }
1973
1974 /* If no epilog save function is available, restore the registers the
1975 old fashioned way (one by one). */
1976 if (!restore_all)
1977 {
1978 /* If the stack is large, we need to cut it down in 2 pieces. */
1979 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1980 init_stack_free = 4 * num_restore;
1981 else
1982 init_stack_free = actual_fsize;
1983
1984 /* Deallocate the rest of the stack if it is > 32K. */
1985 if (actual_fsize > init_stack_free)
1986 {
1987 int diff;
1988
1989 diff = actual_fsize - ((interrupt_handler) ? 0 : init_stack_free);
1990
1991 if (CONST_OK_FOR_K (diff))
1992 emit_insn (gen_addsi3 (stack_pointer_rtx,
1993 stack_pointer_rtx,
1994 GEN_INT (diff)));
1995 else
1996 {
1997 rtx reg = gen_rtx_REG (Pmode, 12);
1998 emit_move_insn (reg, GEN_INT (diff));
1999 emit_insn (gen_addsi3 (stack_pointer_rtx,
2000 stack_pointer_rtx,
2001 reg));
2002 }
2003 }
2004
2005 /* Special case interrupt functions that save all registers
2006 for a call. */
2007 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
2008 {
2009 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
2010 emit_insn (gen_callt_restore_all_interrupt ());
2011 else
2012 emit_insn (gen_restore_all_interrupt ());
2013 }
2014 else
2015 {
2016 /* Restore registers from the beginning of the stack frame. */
2017 offset = init_stack_free - 4;
2018
2019 /* Restore the return pointer first. */
2020 if (num_restore > 0
2021 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
2022 {
2023 emit_move_insn (restore_regs[--num_restore],
2024 gen_rtx_MEM (SImode,
2025 plus_constant (stack_pointer_rtx,
2026 offset)));
2027 offset -= 4;
2028 }
2029
2030 for (i = 0; i < num_restore; i++)
2031 {
2032 emit_move_insn (restore_regs[i],
2033 gen_rtx_MEM (SImode,
2034 plus_constant (stack_pointer_rtx,
2035 offset)));
2036
2037 emit_insn (gen_rtx_USE (VOIDmode, restore_regs[i]));
2038 offset -= 4;
2039 }
2040
2041 /* Cut back the remainder of the stack. */
2042 if (init_stack_free)
2043 emit_insn (gen_addsi3 (stack_pointer_rtx,
2044 stack_pointer_rtx,
2045 GEN_INT (init_stack_free)));
2046 }
2047
2048 /* And return or use reti for interrupt handlers. */
2049 if (interrupt_handler)
2050 {
2051 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
2052 emit_insn (gen_callt_return_interrupt ());
2053 else
2054 emit_jump_insn (gen_return_interrupt ());
2055 }
2056 else if (actual_fsize)
2057 emit_jump_insn (gen_return_internal ());
2058 else
2059 emit_jump_insn (gen_return ());
2060 }
2061
2062 v850_interrupt_cache_p = FALSE;
2063 v850_interrupt_p = FALSE;
2064 }
2065
2066
2067 /* Update the condition code from the insn. */
2068
2069 void
notice_update_cc(rtx body,rtx insn)2070 notice_update_cc (rtx body, rtx insn)
2071 {
2072 switch (get_attr_cc (insn))
2073 {
2074 case CC_NONE:
2075 /* Insn does not affect CC at all. */
2076 break;
2077
2078 case CC_NONE_0HIT:
2079 /* Insn does not change CC, but the 0'th operand has been changed. */
2080 if (cc_status.value1 != 0
2081 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
2082 cc_status.value1 = 0;
2083 break;
2084
2085 case CC_SET_ZN:
2086 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2087 V,C is in an unusable state. */
2088 CC_STATUS_INIT;
2089 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2090 cc_status.value1 = recog_data.operand[0];
2091 break;
2092
2093 case CC_SET_ZNV:
2094 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2095 C is in an unusable state. */
2096 CC_STATUS_INIT;
2097 cc_status.flags |= CC_NO_CARRY;
2098 cc_status.value1 = recog_data.operand[0];
2099 break;
2100
2101 case CC_COMPARE:
2102 /* The insn is a compare instruction. */
2103 CC_STATUS_INIT;
2104 cc_status.value1 = SET_SRC (body);
2105 break;
2106
2107 case CC_CLOBBER:
2108 /* Insn doesn't leave CC in a usable state. */
2109 CC_STATUS_INIT;
2110 break;
2111 }
2112 }
2113
2114 /* Retrieve the data area that has been chosen for the given decl. */
2115
2116 v850_data_area
v850_get_data_area(tree decl)2117 v850_get_data_area (tree decl)
2118 {
2119 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2120 return DATA_AREA_SDA;
2121
2122 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2123 return DATA_AREA_TDA;
2124
2125 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2126 return DATA_AREA_ZDA;
2127
2128 return DATA_AREA_NORMAL;
2129 }
2130
2131 /* Store the indicated data area in the decl's attributes. */
2132
2133 static void
v850_set_data_area(tree decl,v850_data_area data_area)2134 v850_set_data_area (tree decl, v850_data_area data_area)
2135 {
2136 tree name;
2137
2138 switch (data_area)
2139 {
2140 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2141 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2142 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2143 default:
2144 return;
2145 }
2146
2147 DECL_ATTRIBUTES (decl) = tree_cons
2148 (name, NULL, DECL_ATTRIBUTES (decl));
2149 }
2150
2151 const struct attribute_spec v850_attribute_table[] =
2152 {
2153 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2154 { "interrupt_handler", 0, 0, true, false, false, v850_handle_interrupt_attribute },
2155 { "interrupt", 0, 0, true, false, false, v850_handle_interrupt_attribute },
2156 { "sda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2157 { "tda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2158 { "zda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2159 { NULL, 0, 0, false, false, false, NULL }
2160 };
2161
2162 /* Handle an "interrupt" attribute; arguments as in
2163 struct attribute_spec.handler. */
2164 static tree
v850_handle_interrupt_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)2165 v850_handle_interrupt_attribute (tree * node,
2166 tree name,
2167 tree args ATTRIBUTE_UNUSED,
2168 int flags ATTRIBUTE_UNUSED,
2169 bool * no_add_attrs)
2170 {
2171 if (TREE_CODE (*node) != FUNCTION_DECL)
2172 {
2173 warning ("`%s' attribute only applies to functions",
2174 IDENTIFIER_POINTER (name));
2175 *no_add_attrs = true;
2176 }
2177
2178 return NULL_TREE;
2179 }
2180
2181 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2182 struct attribute_spec.handler. */
2183 static tree
v850_handle_data_area_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)2184 v850_handle_data_area_attribute (tree* node,
2185 tree name,
2186 tree args ATTRIBUTE_UNUSED,
2187 int flags ATTRIBUTE_UNUSED,
2188 bool * no_add_attrs)
2189 {
2190 v850_data_area data_area;
2191 v850_data_area area;
2192 tree decl = *node;
2193
2194 /* Implement data area attribute. */
2195 if (is_attribute_p ("sda", name))
2196 data_area = DATA_AREA_SDA;
2197 else if (is_attribute_p ("tda", name))
2198 data_area = DATA_AREA_TDA;
2199 else if (is_attribute_p ("zda", name))
2200 data_area = DATA_AREA_ZDA;
2201 else
2202 abort ();
2203
2204 switch (TREE_CODE (decl))
2205 {
2206 case VAR_DECL:
2207 if (current_function_decl != NULL_TREE)
2208 {
2209 error ("%Jdata area attributes cannot be specified for "
2210 "local variables", decl, decl);
2211 *no_add_attrs = true;
2212 }
2213
2214 /* Drop through. */
2215
2216 case FUNCTION_DECL:
2217 area = v850_get_data_area (decl);
2218 if (area != DATA_AREA_NORMAL && data_area != area)
2219 {
2220 error ("%Jdata area of '%D' conflicts with previous declaration",
2221 decl, decl);
2222 *no_add_attrs = true;
2223 }
2224 break;
2225
2226 default:
2227 break;
2228 }
2229
2230 return NULL_TREE;
2231 }
2232
2233
2234 /* Return nonzero if FUNC is an interrupt function as specified
2235 by the "interrupt" attribute. */
2236
2237 int
v850_interrupt_function_p(tree func)2238 v850_interrupt_function_p (tree func)
2239 {
2240 tree a;
2241 int ret = 0;
2242
2243 if (v850_interrupt_cache_p)
2244 return v850_interrupt_p;
2245
2246 if (TREE_CODE (func) != FUNCTION_DECL)
2247 return 0;
2248
2249 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2250 if (a != NULL_TREE)
2251 ret = 1;
2252
2253 else
2254 {
2255 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2256 ret = a != NULL_TREE;
2257 }
2258
2259 /* Its not safe to trust global variables until after function inlining has
2260 been done. */
2261 if (reload_completed | reload_in_progress)
2262 v850_interrupt_p = ret;
2263
2264 return ret;
2265 }
2266
2267
2268 static void
v850_encode_data_area(tree decl,rtx symbol)2269 v850_encode_data_area (tree decl, rtx symbol)
2270 {
2271 int flags;
2272
2273 /* Map explicit sections into the appropriate attribute */
2274 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2275 {
2276 if (DECL_SECTION_NAME (decl))
2277 {
2278 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2279
2280 if (streq (name, ".zdata") || streq (name, ".zbss"))
2281 v850_set_data_area (decl, DATA_AREA_ZDA);
2282
2283 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2284 v850_set_data_area (decl, DATA_AREA_SDA);
2285
2286 else if (streq (name, ".tdata"))
2287 v850_set_data_area (decl, DATA_AREA_TDA);
2288 }
2289
2290 /* If no attribute, support -m{zda,sda,tda}=n */
2291 else
2292 {
2293 int size = int_size_in_bytes (TREE_TYPE (decl));
2294 if (size <= 0)
2295 ;
2296
2297 else if (size <= small_memory [(int) SMALL_MEMORY_TDA].max)
2298 v850_set_data_area (decl, DATA_AREA_TDA);
2299
2300 else if (size <= small_memory [(int) SMALL_MEMORY_SDA].max)
2301 v850_set_data_area (decl, DATA_AREA_SDA);
2302
2303 else if (size <= small_memory [(int) SMALL_MEMORY_ZDA].max)
2304 v850_set_data_area (decl, DATA_AREA_ZDA);
2305 }
2306
2307 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2308 return;
2309 }
2310
2311 flags = SYMBOL_REF_FLAGS (symbol);
2312 switch (v850_get_data_area (decl))
2313 {
2314 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2315 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2316 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2317 default: abort ();
2318 }
2319 SYMBOL_REF_FLAGS (symbol) = flags;
2320 }
2321
2322 static void
v850_encode_section_info(tree decl,rtx rtl,int first)2323 v850_encode_section_info (tree decl, rtx rtl, int first)
2324 {
2325 default_encode_section_info (decl, rtl, first);
2326
2327 if (TREE_CODE (decl) == VAR_DECL
2328 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2329 v850_encode_data_area (decl, XEXP (rtl, 0));
2330 }
2331
2332 /* Return true if the given RTX is a register which can be restored
2333 by a function epilogue. */
2334 int
register_is_ok_for_epilogue(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2335 register_is_ok_for_epilogue (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2336 {
2337 /* The save/restore routines can only cope with registers 20 - 31. */
2338 return ((GET_CODE (op) == REG)
2339 && (((REGNO (op) >= 20) && REGNO (op) <= 31)));
2340 }
2341
2342 /* Return nonzero if the given RTX is suitable for collapsing into
2343 jump to a function epilogue. */
2344 int
pattern_is_ok_for_epilogue(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2345 pattern_is_ok_for_epilogue (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2346 {
2347 int count = XVECLEN (op, 0);
2348 int i;
2349
2350 /* If there are no registers to restore then the function epilogue
2351 is not suitable. */
2352 if (count <= 2)
2353 return 0;
2354
2355 /* The pattern matching has already established that we are performing a
2356 function epilogue and that we are popping at least one register. We must
2357 now check the remaining entries in the vector to make sure that they are
2358 also register pops. There is no good reason why there should ever be
2359 anything else in this vector, but being paranoid always helps...
2360
2361 The test below performs the C equivalent of this machine description
2362 pattern match:
2363
2364 (set (match_operand:SI n "register_is_ok_for_epilogue" "r")
2365 (mem:SI (plus:SI (reg:SI 3) (match_operand:SI n "immediate_operand" "i"))))
2366 */
2367
2368 for (i = 3; i < count; i++)
2369 {
2370 rtx vector_element = XVECEXP (op, 0, i);
2371 rtx dest;
2372 rtx src;
2373 rtx plus;
2374
2375 if (GET_CODE (vector_element) != SET)
2376 return 0;
2377
2378 dest = SET_DEST (vector_element);
2379 src = SET_SRC (vector_element);
2380
2381 if (GET_CODE (dest) != REG
2382 || GET_MODE (dest) != SImode
2383 || ! register_is_ok_for_epilogue (dest, SImode)
2384 || GET_CODE (src) != MEM
2385 || GET_MODE (src) != SImode)
2386 return 0;
2387
2388 plus = XEXP (src, 0);
2389
2390 if (GET_CODE (plus) != PLUS
2391 || GET_CODE (XEXP (plus, 0)) != REG
2392 || GET_MODE (XEXP (plus, 0)) != SImode
2393 || REGNO (XEXP (plus, 0)) != STACK_POINTER_REGNUM
2394 || GET_CODE (XEXP (plus, 1)) != CONST_INT)
2395 return 0;
2396 }
2397
2398 return 1;
2399 }
2400
2401 /* Construct a JR instruction to a routine that will perform the equivalent of
2402 the RTL passed in as an argument. This RTL is a function epilogue that
2403 pops registers off the stack and possibly releases some extra stack space
2404 as well. The code has already verified that the RTL matches these
2405 requirements. */
2406 char *
construct_restore_jr(rtx op)2407 construct_restore_jr (rtx op)
2408 {
2409 int count = XVECLEN (op, 0);
2410 int stack_bytes;
2411 unsigned long int mask;
2412 unsigned long int first;
2413 unsigned long int last;
2414 int i;
2415 static char buff [100]; /* XXX */
2416
2417 if (count <= 2)
2418 {
2419 error ("bogus JR construction: %d\n", count);
2420 return NULL;
2421 }
2422
2423 /* Work out how many bytes to pop off the stack before retrieving
2424 registers. */
2425 if (GET_CODE (XVECEXP (op, 0, 1)) != SET)
2426 abort ();
2427 if (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) != PLUS)
2428 abort ();
2429 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) != CONST_INT)
2430 abort ();
2431
2432 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2433
2434 /* Each pop will remove 4 bytes from the stack.... */
2435 stack_bytes -= (count - 2) * 4;
2436
2437 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2438 if (stack_bytes != 0 && stack_bytes != 16)
2439 {
2440 error ("bad amount of stack space removal: %d", stack_bytes);
2441 return NULL;
2442 }
2443
2444 /* Now compute the bit mask of registers to push. */
2445 mask = 0;
2446 for (i = 2; i < count; i++)
2447 {
2448 rtx vector_element = XVECEXP (op, 0, i);
2449
2450 if (GET_CODE (vector_element) != SET)
2451 abort ();
2452 if (GET_CODE (SET_DEST (vector_element)) != REG)
2453 abort ();
2454 if (! register_is_ok_for_epilogue (SET_DEST (vector_element), SImode))
2455 abort ();
2456
2457 mask |= 1 << REGNO (SET_DEST (vector_element));
2458 }
2459
2460 /* Scan for the first register to pop. */
2461 for (first = 0; first < 32; first++)
2462 {
2463 if (mask & (1 << first))
2464 break;
2465 }
2466
2467 if (first >= 32)
2468 abort ();
2469
2470 /* Discover the last register to pop. */
2471 if (mask & (1 << LINK_POINTER_REGNUM))
2472 {
2473 if (stack_bytes != 16)
2474 abort ();
2475
2476 last = LINK_POINTER_REGNUM;
2477 }
2478 else
2479 {
2480 if (stack_bytes != 0)
2481 abort ();
2482
2483 if ((mask & (1 << 29)) == 0)
2484 abort ();
2485
2486 last = 29;
2487 }
2488
2489 /* Note, it is possible to have gaps in the register mask.
2490 We ignore this here, and generate a JR anyway. We will
2491 be popping more registers than is strictly necessary, but
2492 it does save code space. */
2493
2494 if (TARGET_LONG_CALLS)
2495 {
2496 char name[40];
2497
2498 if (first == last)
2499 sprintf (name, "__return_%s", reg_names [first]);
2500 else
2501 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2502
2503 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2504 name, name);
2505 }
2506 else
2507 {
2508 if (first == last)
2509 sprintf (buff, "jr __return_%s", reg_names [first]);
2510 else
2511 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2512 }
2513
2514 return buff;
2515 }
2516
2517
2518 /* Return nonzero if the given RTX is suitable for collapsing into
2519 a jump to a function prologue. */
2520 int
pattern_is_ok_for_prologue(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2521 pattern_is_ok_for_prologue (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2522 {
2523 int count = XVECLEN (op, 0);
2524 int i;
2525 rtx vector_element;
2526
2527 /* If there are no registers to save then the function prologue
2528 is not suitable. */
2529 if (count <= 2)
2530 return 0;
2531
2532 /* The pattern matching has already established that we are adjusting the
2533 stack and pushing at least one register. We must now check that the
2534 remaining entries in the vector to make sure that they are also register
2535 pushes, except for the last entry which should be a CLOBBER of r10.
2536
2537 The test below performs the C equivalent of this machine description
2538 pattern match:
2539
2540 (set (mem:SI (plus:SI (reg:SI 3)
2541 (match_operand:SI 2 "immediate_operand" "i")))
2542 (match_operand:SI 3 "register_is_ok_for_epilogue" "r"))
2543
2544 */
2545
2546 for (i = 2; i < count - (TARGET_LONG_CALLS ? 2: 1); i++)
2547 {
2548 rtx dest;
2549 rtx src;
2550 rtx plus;
2551
2552 vector_element = XVECEXP (op, 0, i);
2553
2554 if (GET_CODE (vector_element) != SET)
2555 return 0;
2556
2557 dest = SET_DEST (vector_element);
2558 src = SET_SRC (vector_element);
2559
2560 if (GET_CODE (dest) != MEM
2561 || GET_MODE (dest) != SImode
2562 || GET_CODE (src) != REG
2563 || GET_MODE (src) != SImode
2564 || ! register_is_ok_for_epilogue (src, SImode))
2565 return 0;
2566
2567 plus = XEXP (dest, 0);
2568
2569 if ( GET_CODE (plus) != PLUS
2570 || GET_CODE (XEXP (plus, 0)) != REG
2571 || GET_MODE (XEXP (plus, 0)) != SImode
2572 || REGNO (XEXP (plus, 0)) != STACK_POINTER_REGNUM
2573 || GET_CODE (XEXP (plus, 1)) != CONST_INT)
2574 return 0;
2575
2576 /* If the register is being pushed somewhere other than the stack
2577 space just acquired by the first operand then abandon this quest.
2578 Note: the test is <= because both values are negative. */
2579 if (INTVAL (XEXP (plus, 1))
2580 <= INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)))
2581 {
2582 return 0;
2583 }
2584 }
2585
2586 /* Make sure that the last entries in the vector are clobbers. */
2587 for (; i < count; i++)
2588 {
2589 vector_element = XVECEXP (op, 0, i);
2590
2591 if (GET_CODE (vector_element) != CLOBBER
2592 || GET_CODE (XEXP (vector_element, 0)) != REG
2593 || !(REGNO (XEXP (vector_element, 0)) == 10
2594 || (TARGET_LONG_CALLS ? (REGNO (XEXP (vector_element, 0)) == 11) : 0 )))
2595 return 0;
2596 }
2597
2598 return 1;
2599 }
2600
2601 /* Construct a JARL instruction to a routine that will perform the equivalent
2602 of the RTL passed as a parameter. This RTL is a function prologue that
2603 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2604 some stack space as well. The code has already verified that the RTL
2605 matches these requirements. */
2606 char *
construct_save_jarl(rtx op)2607 construct_save_jarl (rtx op)
2608 {
2609 int count = XVECLEN (op, 0);
2610 int stack_bytes;
2611 unsigned long int mask;
2612 unsigned long int first;
2613 unsigned long int last;
2614 int i;
2615 static char buff [100]; /* XXX */
2616
2617 if (count <= 2)
2618 {
2619 error ("bogus JARL construction: %d\n", count);
2620 return NULL;
2621 }
2622
2623 /* Paranoia. */
2624 if (GET_CODE (XVECEXP (op, 0, 0)) != SET)
2625 abort ();
2626 if (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != PLUS)
2627 abort ();
2628 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) != REG)
2629 abort ();
2630 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) != CONST_INT)
2631 abort ();
2632
2633 /* Work out how many bytes to push onto the stack after storing the
2634 registers. */
2635 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2636
2637 /* Each push will put 4 bytes from the stack.... */
2638 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2639
2640 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2641 if (stack_bytes != 0 && stack_bytes != -16)
2642 {
2643 error ("bad amount of stack space removal: %d", stack_bytes);
2644 return NULL;
2645 }
2646
2647 /* Now compute the bit mask of registers to push. */
2648 mask = 0;
2649 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2650 {
2651 rtx vector_element = XVECEXP (op, 0, i);
2652
2653 if (GET_CODE (vector_element) != SET)
2654 abort ();
2655 if (GET_CODE (SET_SRC (vector_element)) != REG)
2656 abort ();
2657 if (! register_is_ok_for_epilogue (SET_SRC (vector_element), SImode))
2658 abort ();
2659
2660 mask |= 1 << REGNO (SET_SRC (vector_element));
2661 }
2662
2663 /* Scan for the first register to push. */
2664 for (first = 0; first < 32; first++)
2665 {
2666 if (mask & (1 << first))
2667 break;
2668 }
2669
2670 if (first >= 32)
2671 abort ();
2672
2673 /* Discover the last register to push. */
2674 if (mask & (1 << LINK_POINTER_REGNUM))
2675 {
2676 if (stack_bytes != -16)
2677 abort ();
2678
2679 last = LINK_POINTER_REGNUM;
2680 }
2681 else
2682 {
2683 if (stack_bytes != 0)
2684 abort ();
2685 if ((mask & (1 << 29)) == 0)
2686 abort ();
2687
2688 last = 29;
2689 }
2690
2691 /* Note, it is possible to have gaps in the register mask.
2692 We ignore this here, and generate a JARL anyway. We will
2693 be pushing more registers than is strictly necessary, but
2694 it does save code space. */
2695
2696 if (TARGET_LONG_CALLS)
2697 {
2698 char name[40];
2699
2700 if (first == last)
2701 sprintf (name, "__save_%s", reg_names [first]);
2702 else
2703 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2704
2705 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2706 name, name);
2707 }
2708 else
2709 {
2710 if (first == last)
2711 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2712 else
2713 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2714 reg_names [last]);
2715 }
2716
2717 return buff;
2718 }
2719
2720 extern tree last_assemble_variable_decl;
2721 extern int size_directive_output;
2722
2723 /* A version of asm_output_aligned_bss() that copes with the special
2724 data areas of the v850. */
2725 void
v850_output_aligned_bss(FILE * file,tree decl,const char * name,int size,int align)2726 v850_output_aligned_bss (FILE * file,
2727 tree decl,
2728 const char * name,
2729 int size,
2730 int align)
2731 {
2732 switch (v850_get_data_area (decl))
2733 {
2734 case DATA_AREA_ZDA:
2735 zbss_section ();
2736 break;
2737
2738 case DATA_AREA_SDA:
2739 sbss_section ();
2740 break;
2741
2742 case DATA_AREA_TDA:
2743 tdata_section ();
2744
2745 default:
2746 bss_section ();
2747 break;
2748 }
2749
2750 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2751 #ifdef ASM_DECLARE_OBJECT_NAME
2752 last_assemble_variable_decl = decl;
2753 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2754 #else
2755 /* Standard thing is just output label for the object. */
2756 ASM_OUTPUT_LABEL (file, name);
2757 #endif /* ASM_DECLARE_OBJECT_NAME */
2758 ASM_OUTPUT_SKIP (file, size ? size : 1);
2759 }
2760
2761 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2762 void
v850_output_common(FILE * file,tree decl,const char * name,int size,int align)2763 v850_output_common (FILE * file,
2764 tree decl,
2765 const char * name,
2766 int size,
2767 int align)
2768 {
2769 if (decl == NULL_TREE)
2770 {
2771 fprintf (file, "%s", COMMON_ASM_OP);
2772 }
2773 else
2774 {
2775 switch (v850_get_data_area (decl))
2776 {
2777 case DATA_AREA_ZDA:
2778 fprintf (file, "%s", ZCOMMON_ASM_OP);
2779 break;
2780
2781 case DATA_AREA_SDA:
2782 fprintf (file, "%s", SCOMMON_ASM_OP);
2783 break;
2784
2785 case DATA_AREA_TDA:
2786 fprintf (file, "%s", TCOMMON_ASM_OP);
2787 break;
2788
2789 default:
2790 fprintf (file, "%s", COMMON_ASM_OP);
2791 break;
2792 }
2793 }
2794
2795 assemble_name (file, name);
2796 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2797 }
2798
2799 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2800 void
v850_output_local(FILE * file,tree decl,const char * name,int size,int align)2801 v850_output_local (FILE * file,
2802 tree decl,
2803 const char * name,
2804 int size,
2805 int align)
2806 {
2807 fprintf (file, "%s", LOCAL_ASM_OP);
2808 assemble_name (file, name);
2809 fprintf (file, "\n");
2810
2811 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2812 }
2813
2814 /* Add data area to the given declaration if a ghs data area pragma is
2815 currently in effect (#pragma ghs startXXX/endXXX). */
2816 static void
v850_insert_attributes(tree decl,tree * attr_ptr ATTRIBUTE_UNUSED)2817 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2818 {
2819 if (data_area_stack
2820 && data_area_stack->data_area
2821 && current_function_decl == NULL_TREE
2822 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2823 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2824 v850_set_data_area (decl, data_area_stack->data_area);
2825
2826 /* Initialize the default names of the v850 specific sections,
2827 if this has not been done before. */
2828
2829 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2830 {
2831 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2832 = build_string (sizeof (".sdata")-1, ".sdata");
2833
2834 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2835 = build_string (sizeof (".rosdata")-1, ".rosdata");
2836
2837 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2838 = build_string (sizeof (".tdata")-1, ".tdata");
2839
2840 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2841 = build_string (sizeof (".zdata")-1, ".zdata");
2842
2843 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2844 = build_string (sizeof (".rozdata")-1, ".rozdata");
2845 }
2846
2847 if (current_function_decl == NULL_TREE
2848 && (TREE_CODE (decl) == VAR_DECL
2849 || TREE_CODE (decl) == CONST_DECL
2850 || TREE_CODE (decl) == FUNCTION_DECL)
2851 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2852 && !DECL_SECTION_NAME (decl))
2853 {
2854 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2855 tree chosen_section;
2856
2857 if (TREE_CODE (decl) == FUNCTION_DECL)
2858 kind = GHS_SECTION_KIND_TEXT;
2859 else
2860 {
2861 /* First choose a section kind based on the data area of the decl. */
2862 switch (v850_get_data_area (decl))
2863 {
2864 default:
2865 abort ();
2866
2867 case DATA_AREA_SDA:
2868 kind = ((TREE_READONLY (decl))
2869 ? GHS_SECTION_KIND_ROSDATA
2870 : GHS_SECTION_KIND_SDATA);
2871 break;
2872
2873 case DATA_AREA_TDA:
2874 kind = GHS_SECTION_KIND_TDATA;
2875 break;
2876
2877 case DATA_AREA_ZDA:
2878 kind = ((TREE_READONLY (decl))
2879 ? GHS_SECTION_KIND_ROZDATA
2880 : GHS_SECTION_KIND_ZDATA);
2881 break;
2882
2883 case DATA_AREA_NORMAL: /* default data area */
2884 if (TREE_READONLY (decl))
2885 kind = GHS_SECTION_KIND_RODATA;
2886 else if (DECL_INITIAL (decl))
2887 kind = GHS_SECTION_KIND_DATA;
2888 else
2889 kind = GHS_SECTION_KIND_BSS;
2890 }
2891 }
2892
2893 /* Now, if the section kind has been explicitly renamed,
2894 then attach a section attribute. */
2895 chosen_section = GHS_current_section_names [(int) kind];
2896
2897 /* Otherwise, if this kind of section needs an explicit section
2898 attribute, then also attach one. */
2899 if (chosen_section == NULL)
2900 chosen_section = GHS_default_section_names [(int) kind];
2901
2902 if (chosen_section)
2903 {
2904 /* Only set the section name if specified by a pragma, because
2905 otherwise it will force those variables to get allocated storage
2906 in this module, rather than by the linker. */
2907 DECL_SECTION_NAME (decl) = chosen_section;
2908 }
2909 }
2910 }
2911
2912 /* Return nonzero if the given RTX is suitable
2913 for collapsing into a DISPOSE instruction. */
2914
2915 int
pattern_is_ok_for_dispose(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2916 pattern_is_ok_for_dispose (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2917 {
2918 int count = XVECLEN (op, 0);
2919 int i;
2920
2921 /* If there are no registers to restore then
2922 the dispose instruction is not suitable. */
2923 if (count <= 2)
2924 return 0;
2925
2926 /* The pattern matching has already established that we are performing a
2927 function epilogue and that we are popping at least one register. We must
2928 now check the remaining entries in the vector to make sure that they are
2929 also register pops. There is no good reason why there should ever be
2930 anything else in this vector, but being paranoid always helps...
2931
2932 The test below performs the C equivalent of this machine description
2933 pattern match:
2934
2935 (set (match_operand:SI n "register_is_ok_for_epilogue" "r")
2936 (mem:SI (plus:SI (reg:SI 3)
2937 (match_operand:SI n "immediate_operand" "i"))))
2938 */
2939
2940 for (i = 3; i < count; i++)
2941 {
2942 rtx vector_element = XVECEXP (op, 0, i);
2943 rtx dest;
2944 rtx src;
2945 rtx plus;
2946
2947 if (GET_CODE (vector_element) != SET)
2948 return 0;
2949
2950 dest = SET_DEST (vector_element);
2951 src = SET_SRC (vector_element);
2952
2953 if ( GET_CODE (dest) != REG
2954 || GET_MODE (dest) != SImode
2955 || ! register_is_ok_for_epilogue (dest, SImode)
2956 || GET_CODE (src) != MEM
2957 || GET_MODE (src) != SImode)
2958 return 0;
2959
2960 plus = XEXP (src, 0);
2961
2962 if ( GET_CODE (plus) != PLUS
2963 || GET_CODE (XEXP (plus, 0)) != REG
2964 || GET_MODE (XEXP (plus, 0)) != SImode
2965 || REGNO (XEXP (plus, 0)) != STACK_POINTER_REGNUM
2966 || GET_CODE (XEXP (plus, 1)) != CONST_INT)
2967 return 0;
2968 }
2969
2970 return 1;
2971 }
2972
2973 /* Construct a DISPOSE instruction that is the equivalent of
2974 the given RTX. We have already verified that this should
2975 be possible. */
2976
2977 char *
construct_dispose_instruction(rtx op)2978 construct_dispose_instruction (rtx op)
2979 {
2980 int count = XVECLEN (op, 0);
2981 int stack_bytes;
2982 unsigned long int mask;
2983 int i;
2984 static char buff[ 100 ]; /* XXX */
2985 int use_callt = 0;
2986
2987 if (count <= 2)
2988 {
2989 error ("Bogus DISPOSE construction: %d\n", count);
2990 return NULL;
2991 }
2992
2993 /* Work out how many bytes to pop off the
2994 stack before retrieving registers. */
2995 if (GET_CODE (XVECEXP (op, 0, 1)) != SET)
2996 abort ();
2997 if (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) != PLUS)
2998 abort ();
2999 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) != CONST_INT)
3000 abort ();
3001
3002 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
3003
3004 /* Each pop will remove 4 bytes from the stack.... */
3005 stack_bytes -= (count - 2) * 4;
3006
3007 /* Make sure that the amount we are popping
3008 will fit into the DISPOSE instruction. */
3009 if (stack_bytes > 128)
3010 {
3011 error ("Too much stack space to dispose of: %d", stack_bytes);
3012 return NULL;
3013 }
3014
3015 /* Now compute the bit mask of registers to push. */
3016 mask = 0;
3017
3018 for (i = 2; i < count; i++)
3019 {
3020 rtx vector_element = XVECEXP (op, 0, i);
3021
3022 if (GET_CODE (vector_element) != SET)
3023 abort ();
3024 if (GET_CODE (SET_DEST (vector_element)) != REG)
3025 abort ();
3026 if (! register_is_ok_for_epilogue (SET_DEST (vector_element), SImode))
3027 abort ();
3028
3029 if (REGNO (SET_DEST (vector_element)) == 2)
3030 use_callt = 1;
3031 else
3032 mask |= 1 << REGNO (SET_DEST (vector_element));
3033 }
3034
3035 if (! TARGET_DISABLE_CALLT
3036 && (use_callt || stack_bytes == 0 || stack_bytes == 16))
3037 {
3038 if (use_callt)
3039 {
3040 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
3041 return buff;
3042 }
3043 else
3044 {
3045 for (i = 20; i < 32; i++)
3046 if (mask & (1 << i))
3047 break;
3048
3049 if (i == 31)
3050 sprintf (buff, "callt ctoff(__callt_return_r31c)");
3051 else
3052 sprintf (buff, "callt ctoff(__callt_return_r%d_r%d%s)",
3053 i, (mask & (1 << 31)) ? 31 : 29, stack_bytes ? "c" : "");
3054 }
3055 }
3056 else
3057 {
3058 static char regs [100]; /* XXX */
3059 int done_one;
3060
3061 /* Generate the DISPOSE instruction. Note we could just issue the
3062 bit mask as a number as the assembler can cope with this, but for
3063 the sake of our readers we turn it into a textual description. */
3064 regs[0] = 0;
3065 done_one = 0;
3066
3067 for (i = 20; i < 32; i++)
3068 {
3069 if (mask & (1 << i))
3070 {
3071 int first;
3072
3073 if (done_one)
3074 strcat (regs, ", ");
3075 else
3076 done_one = 1;
3077
3078 first = i;
3079 strcat (regs, reg_names[ first ]);
3080
3081 for (i++; i < 32; i++)
3082 if ((mask & (1 << i)) == 0)
3083 break;
3084
3085 if (i > first + 1)
3086 {
3087 strcat (regs, " - ");
3088 strcat (regs, reg_names[ i - 1 ] );
3089 }
3090 }
3091 }
3092
3093 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
3094 }
3095
3096 return buff;
3097 }
3098
3099 /* Return nonzero if the given RTX is suitable
3100 for collapsing into a PREPARE instruction. */
3101
3102 int
pattern_is_ok_for_prepare(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)3103 pattern_is_ok_for_prepare (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3104 {
3105 int count = XVECLEN (op, 0);
3106 int i;
3107
3108 /* If there are no registers to restore then the prepare instruction
3109 is not suitable. */
3110 if (count <= 1)
3111 return 0;
3112
3113 /* The pattern matching has already established that we are adjusting the
3114 stack and pushing at least one register. We must now check that the
3115 remaining entries in the vector to make sure that they are also register
3116 pushes.
3117
3118 The test below performs the C equivalent of this machine description
3119 pattern match:
3120
3121 (set (mem:SI (plus:SI (reg:SI 3)
3122 (match_operand:SI 2 "immediate_operand" "i")))
3123 (match_operand:SI 3 "register_is_ok_for_epilogue" "r"))
3124
3125 */
3126
3127 for (i = 2; i < count; i++)
3128 {
3129 rtx vector_element = XVECEXP (op, 0, i);
3130 rtx dest;
3131 rtx src;
3132 rtx plus;
3133
3134 if (GET_CODE (vector_element) != SET)
3135 return 0;
3136
3137 dest = SET_DEST (vector_element);
3138 src = SET_SRC (vector_element);
3139
3140 if ( GET_CODE (dest) != MEM
3141 || GET_MODE (dest) != SImode
3142 || GET_CODE (src) != REG
3143 || GET_MODE (src) != SImode
3144 || ! register_is_ok_for_epilogue (src, SImode)
3145 )
3146 return 0;
3147
3148 plus = XEXP (dest, 0);
3149
3150 if ( GET_CODE (plus) != PLUS
3151 || GET_CODE (XEXP (plus, 0)) != REG
3152 || GET_MODE (XEXP (plus, 0)) != SImode
3153 || REGNO (XEXP (plus, 0)) != STACK_POINTER_REGNUM
3154 || GET_CODE (XEXP (plus, 1)) != CONST_INT)
3155 return 0;
3156
3157 /* If the register is being pushed somewhere other than the stack
3158 space just acquired by the first operand then abandon this quest.
3159 Note: the test is <= because both values are negative. */
3160 if (INTVAL (XEXP (plus, 1))
3161 <= INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)))
3162 return 0;
3163 }
3164
3165 return 1;
3166 }
3167
3168 /* Construct a PREPARE instruction that is the equivalent of
3169 the given RTL. We have already verified that this should
3170 be possible. */
3171
3172 char *
construct_prepare_instruction(rtx op)3173 construct_prepare_instruction (rtx op)
3174 {
3175 int count = XVECLEN (op, 0);
3176 int stack_bytes;
3177 unsigned long int mask;
3178 int i;
3179 static char buff[ 100 ]; /* XXX */
3180 int use_callt = 0;
3181
3182 if (count <= 1)
3183 {
3184 error ("Bogus PREPEARE construction: %d\n", count);
3185 return NULL;
3186 }
3187
3188 /* Work out how many bytes to push onto
3189 the stack after storing the registers. */
3190 if (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3191 abort ();
3192 if (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != PLUS)
3193 abort ();
3194 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) != CONST_INT)
3195 abort ();
3196
3197 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
3198
3199 /* Each push will put 4 bytes from the stack. */
3200 stack_bytes += (count - 1) * 4;
3201
3202 /* Make sure that the amount we are popping
3203 will fit into the DISPOSE instruction. */
3204 if (stack_bytes < -128)
3205 {
3206 error ("Too much stack space to prepare: %d", stack_bytes);
3207 return NULL;
3208 }
3209
3210 /* Now compute the bit mask of registers to push. */
3211 mask = 0;
3212 for (i = 1; i < count; i++)
3213 {
3214 rtx vector_element = XVECEXP (op, 0, i);
3215
3216 if (GET_CODE (vector_element) != SET)
3217 abort ();
3218 if (GET_CODE (SET_SRC (vector_element)) != REG)
3219 abort ();
3220 if (! register_is_ok_for_epilogue (SET_SRC (vector_element), SImode))
3221 abort ();
3222
3223 if (REGNO (SET_SRC (vector_element)) == 2)
3224 use_callt = 1;
3225 else
3226 mask |= 1 << REGNO (SET_SRC (vector_element));
3227 }
3228
3229 if ((! TARGET_DISABLE_CALLT)
3230 && (use_callt || stack_bytes == 0 || stack_bytes == -16))
3231 {
3232 if (use_callt)
3233 {
3234 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
3235 return buff;
3236 }
3237
3238 for (i = 20; i < 32; i++)
3239 if (mask & (1 << i))
3240 break;
3241
3242 if (i == 31)
3243 sprintf (buff, "callt ctoff(__callt_save_r31c)");
3244 else
3245 sprintf (buff, "callt ctoff(__callt_save_r%d_r%d%s)",
3246 i, (mask & (1 << 31)) ? 31 : 29, stack_bytes ? "c" : "");
3247 }
3248 else
3249 {
3250 static char regs [100]; /* XXX */
3251 int done_one;
3252
3253
3254 /* Generate the PREPARE instruction. Note we could just issue the
3255 bit mask as a number as the assembler can cope with this, but for
3256 the sake of our readers we turn it into a textual description. */
3257 regs[0] = 0;
3258 done_one = 0;
3259
3260 for (i = 20; i < 32; i++)
3261 {
3262 if (mask & (1 << i))
3263 {
3264 int first;
3265
3266 if (done_one)
3267 strcat (regs, ", ");
3268 else
3269 done_one = 1;
3270
3271 first = i;
3272 strcat (regs, reg_names[ first ]);
3273
3274 for (i++; i < 32; i++)
3275 if ((mask & (1 << i)) == 0)
3276 break;
3277
3278 if (i > first + 1)
3279 {
3280 strcat (regs, " - ");
3281 strcat (regs, reg_names[ i - 1 ] );
3282 }
3283 }
3284 }
3285
3286 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
3287 }
3288
3289 return buff;
3290 }
3291
3292 /* Implement `va_arg'. */
3293
3294 rtx
v850_va_arg(tree valist,tree type)3295 v850_va_arg (tree valist, tree type)
3296 {
3297 HOST_WIDE_INT size, rsize;
3298 tree addr, incr;
3299 rtx addr_rtx;
3300 int indirect;
3301
3302 /* Round up sizeof(type) to a word. */
3303 size = int_size_in_bytes (type);
3304 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3305 indirect = 0;
3306
3307 if (size > 8)
3308 {
3309 size = rsize = UNITS_PER_WORD;
3310 indirect = 1;
3311 }
3312
3313 addr = save_expr (valist);
3314 incr = fold (build (PLUS_EXPR, ptr_type_node, addr,
3315 build_int_2 (rsize, 0)));
3316
3317 incr = build (MODIFY_EXPR, ptr_type_node, valist, incr);
3318 TREE_SIDE_EFFECTS (incr) = 1;
3319 expand_expr (incr, const0_rtx, VOIDmode, EXPAND_NORMAL);
3320
3321 addr_rtx = expand_expr (addr, NULL, Pmode, EXPAND_NORMAL);
3322
3323 if (indirect)
3324 {
3325 addr_rtx = force_reg (Pmode, addr_rtx);
3326 addr_rtx = gen_rtx_MEM (Pmode, addr_rtx);
3327 set_mem_alias_set (addr_rtx, get_varargs_alias_set ());
3328 }
3329
3330 return addr_rtx;
3331 }
3332
3333 /* Return an RTX indicating where the return address to the
3334 calling function can be found. */
3335
3336 rtx
v850_return_addr(int count)3337 v850_return_addr (int count)
3338 {
3339 if (count != 0)
3340 return const0_rtx;
3341
3342 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
3343 }
3344
3345 static void
v850_select_section(tree exp,int reloc ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)3346 v850_select_section (tree exp,
3347 int reloc ATTRIBUTE_UNUSED,
3348 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
3349 {
3350 if (TREE_CODE (exp) == VAR_DECL)
3351 {
3352 int is_const;
3353 if (!TREE_READONLY (exp)
3354 || TREE_SIDE_EFFECTS (exp)
3355 || !DECL_INITIAL (exp)
3356 || (DECL_INITIAL (exp) != error_mark_node
3357 && !TREE_CONSTANT (DECL_INITIAL (exp))))
3358 is_const = FALSE;
3359 else
3360 is_const = TRUE;
3361
3362 switch (v850_get_data_area (exp))
3363 {
3364 case DATA_AREA_ZDA:
3365 if (is_const)
3366 rozdata_section ();
3367 else
3368 zdata_section ();
3369 break;
3370
3371 case DATA_AREA_TDA:
3372 tdata_section ();
3373 break;
3374
3375 case DATA_AREA_SDA:
3376 if (is_const)
3377 rosdata_section ();
3378 else
3379 sdata_section ();
3380 break;
3381
3382 default:
3383 if (is_const)
3384 readonly_data_section ();
3385 else
3386 data_section ();
3387 break;
3388 }
3389 }
3390 else if (TREE_CODE (exp) == STRING_CST)
3391 {
3392 if (! flag_writable_strings)
3393 readonly_data_section ();
3394 else
3395 data_section ();
3396 }
3397 else
3398 readonly_data_section ();
3399 }
3400