1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "recog.h"
38 #include "expr.h"
39 #include "function.h"
40 #include "toplev.h"
41 #include "ggc.h"
42 #include "integrate.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46
47 #ifndef streq
48 #define streq(a,b) (strcmp (a, b) == 0)
49 #endif
50
51 /* Function prototypes for stupid compilers: */
52 static bool v850_handle_option (size_t, const char *, int);
53 static void const_double_split (rtx, HOST_WIDE_INT *, HOST_WIDE_INT *);
54 static int const_costs_int (HOST_WIDE_INT, int);
55 static int const_costs (rtx, enum rtx_code);
56 static bool v850_rtx_costs (rtx, int, int, int *);
57 static void substitute_ep_register (rtx, rtx, int, int, rtx *, rtx *);
58 static void v850_reorg (void);
59 static int ep_memory_offset (enum machine_mode, int);
60 static void v850_set_data_area (tree, v850_data_area);
61 const struct attribute_spec v850_attribute_table[];
62 static tree v850_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
63 static tree v850_handle_data_area_attribute (tree *, tree, tree, int, bool *);
64 static void v850_insert_attributes (tree, tree *);
65 static void v850_asm_init_sections (void);
66 static section *v850_select_section (tree, int, unsigned HOST_WIDE_INT);
67 static void v850_encode_data_area (tree, rtx);
68 static void v850_encode_section_info (tree, rtx, int);
69 static bool v850_return_in_memory (tree, tree);
70 static void v850_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
71 tree, int *, int);
72 static bool v850_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
73 tree, bool);
74 static int v850_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
75 tree, bool);
76
77 /* Information about the various small memory areas. */
78 struct small_memory_info small_memory[ (int)SMALL_MEMORY_max ] =
79 {
80 /* name max physical max */
81 { "tda", 0, 256 },
82 { "sda", 0, 65536 },
83 { "zda", 0, 32768 },
84 };
85
86 /* Names of the various data areas used on the v850. */
87 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
88 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
89
90 /* Track the current data area set by the data area pragma (which
91 can be nested). Tested by check_default_data_area. */
92 data_area_stack_element * data_area_stack = NULL;
93
94 /* True if we don't need to check any more if the current
95 function is an interrupt handler. */
96 static int v850_interrupt_cache_p = FALSE;
97
98 /* Whether current function is an interrupt handler. */
99 static int v850_interrupt_p = FALSE;
100
101 static GTY(()) section *rosdata_section;
102 static GTY(()) section *rozdata_section;
103 static GTY(()) section *tdata_section;
104 static GTY(()) section *zdata_section;
105 static GTY(()) section *zbss_section;
106
107 /* Initialize the GCC target structure. */
108 #undef TARGET_ASM_ALIGNED_HI_OP
109 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
110
111 #undef TARGET_ATTRIBUTE_TABLE
112 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
113
114 #undef TARGET_INSERT_ATTRIBUTES
115 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
116
117 #undef TARGET_ASM_SELECT_SECTION
118 #define TARGET_ASM_SELECT_SECTION v850_select_section
119
120 /* The assembler supports switchable .bss sections, but
121 v850_select_section doesn't yet make use of them. */
122 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
123 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
124
125 #undef TARGET_ENCODE_SECTION_INFO
126 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
127
128 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
129 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
130
131 #undef TARGET_DEFAULT_TARGET_FLAGS
132 #define TARGET_DEFAULT_TARGET_FLAGS (MASK_DEFAULT | MASK_APP_REGS)
133 #undef TARGET_HANDLE_OPTION
134 #define TARGET_HANDLE_OPTION v850_handle_option
135
136 #undef TARGET_RTX_COSTS
137 #define TARGET_RTX_COSTS v850_rtx_costs
138
139 #undef TARGET_ADDRESS_COST
140 #define TARGET_ADDRESS_COST hook_int_rtx_0
141
142 #undef TARGET_MACHINE_DEPENDENT_REORG
143 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
144
145 #undef TARGET_PROMOTE_PROTOTYPES
146 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
147
148 #undef TARGET_RETURN_IN_MEMORY
149 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
150
151 #undef TARGET_PASS_BY_REFERENCE
152 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
153
154 #undef TARGET_CALLEE_COPIES
155 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
156
157 #undef TARGET_SETUP_INCOMING_VARARGS
158 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
159
160 #undef TARGET_ARG_PARTIAL_BYTES
161 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
162
163 struct gcc_target targetm = TARGET_INITIALIZER;
164
165 /* Set the maximum size of small memory area TYPE to the value given
166 by VALUE. Return true if VALUE was syntactically correct. VALUE
167 starts with the argument separator: either "-" or "=". */
168
169 static bool
v850_handle_memory_option(enum small_memory_type type,const char * value)170 v850_handle_memory_option (enum small_memory_type type, const char *value)
171 {
172 int i, size;
173
174 if (*value != '-' && *value != '=')
175 return false;
176
177 value++;
178 for (i = 0; value[i]; i++)
179 if (!ISDIGIT (value[i]))
180 return false;
181
182 size = atoi (value);
183 if (size > small_memory[type].physical_max)
184 error ("value passed to %<-m%s%> is too large", small_memory[type].name);
185 else
186 small_memory[type].max = size;
187 return true;
188 }
189
190 /* Implement TARGET_HANDLE_OPTION. */
191
192 static bool
v850_handle_option(size_t code,const char * arg,int value ATTRIBUTE_UNUSED)193 v850_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
194 {
195 switch (code)
196 {
197 case OPT_mspace:
198 target_flags |= MASK_EP | MASK_PROLOG_FUNCTION;
199 return true;
200
201 case OPT_mv850:
202 target_flags &= ~(MASK_CPU ^ MASK_V850);
203 return true;
204
205 case OPT_mv850e:
206 case OPT_mv850e1:
207 target_flags &= ~(MASK_CPU ^ MASK_V850E);
208 return true;
209
210 case OPT_mtda:
211 return v850_handle_memory_option (SMALL_MEMORY_TDA, arg);
212
213 case OPT_msda:
214 return v850_handle_memory_option (SMALL_MEMORY_SDA, arg);
215
216 case OPT_mzda:
217 return v850_handle_memory_option (SMALL_MEMORY_ZDA, arg);
218
219 default:
220 return true;
221 }
222 }
223
224 static bool
v850_pass_by_reference(CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,enum machine_mode mode,tree type,bool named ATTRIBUTE_UNUSED)225 v850_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
226 enum machine_mode mode, tree type,
227 bool named ATTRIBUTE_UNUSED)
228 {
229 unsigned HOST_WIDE_INT size;
230
231 if (type)
232 size = int_size_in_bytes (type);
233 else
234 size = GET_MODE_SIZE (mode);
235
236 return size > 8;
237 }
238
239 /* Return an RTX to represent where a value with mode MODE will be returned
240 from a function. If the result is 0, the argument is pushed. */
241
242 rtx
function_arg(CUMULATIVE_ARGS * cum,enum machine_mode mode,tree type,int named)243 function_arg (CUMULATIVE_ARGS * cum,
244 enum machine_mode mode,
245 tree type,
246 int named)
247 {
248 rtx result = 0;
249 int size, align;
250
251 if (TARGET_GHS && !named)
252 return NULL_RTX;
253
254 if (mode == BLKmode)
255 size = int_size_in_bytes (type);
256 else
257 size = GET_MODE_SIZE (mode);
258
259 if (size < 1)
260 return 0;
261
262 if (type)
263 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
264 else
265 align = size;
266
267 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
268
269 if (cum->nbytes > 4 * UNITS_PER_WORD)
270 return 0;
271
272 if (type == NULL_TREE
273 && cum->nbytes + size > 4 * UNITS_PER_WORD)
274 return 0;
275
276 switch (cum->nbytes / UNITS_PER_WORD)
277 {
278 case 0:
279 result = gen_rtx_REG (mode, 6);
280 break;
281 case 1:
282 result = gen_rtx_REG (mode, 7);
283 break;
284 case 2:
285 result = gen_rtx_REG (mode, 8);
286 break;
287 case 3:
288 result = gen_rtx_REG (mode, 9);
289 break;
290 default:
291 result = 0;
292 }
293
294 return result;
295 }
296
297
298 /* Return the number of bytes which must be put into registers
299 for values which are part in registers and part in memory. */
300
301 static int
v850_arg_partial_bytes(CUMULATIVE_ARGS * cum,enum machine_mode mode,tree type,bool named)302 v850_arg_partial_bytes (CUMULATIVE_ARGS * cum, enum machine_mode mode,
303 tree type, bool named)
304 {
305 int size, align;
306
307 if (TARGET_GHS && !named)
308 return 0;
309
310 if (mode == BLKmode)
311 size = int_size_in_bytes (type);
312 else
313 size = GET_MODE_SIZE (mode);
314
315 if (type)
316 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
317 else
318 align = size;
319
320 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
321
322 if (cum->nbytes > 4 * UNITS_PER_WORD)
323 return 0;
324
325 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
326 return 0;
327
328 if (type == NULL_TREE
329 && cum->nbytes + size > 4 * UNITS_PER_WORD)
330 return 0;
331
332 return 4 * UNITS_PER_WORD - cum->nbytes;
333 }
334
335
336 /* Return the high and low words of a CONST_DOUBLE */
337
338 static void
const_double_split(rtx x,HOST_WIDE_INT * p_high,HOST_WIDE_INT * p_low)339 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
340 {
341 if (GET_CODE (x) == CONST_DOUBLE)
342 {
343 long t[2];
344 REAL_VALUE_TYPE rv;
345
346 switch (GET_MODE (x))
347 {
348 case DFmode:
349 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
350 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
351 *p_high = t[1]; /* since v850 is little endian */
352 *p_low = t[0]; /* high is second word */
353 return;
354
355 case SFmode:
356 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
357 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
358 *p_low = 0;
359 return;
360
361 case VOIDmode:
362 case DImode:
363 *p_high = CONST_DOUBLE_HIGH (x);
364 *p_low = CONST_DOUBLE_LOW (x);
365 return;
366
367 default:
368 break;
369 }
370 }
371
372 fatal_insn ("const_double_split got a bad insn:", x);
373 }
374
375
376 /* Return the cost of the rtx R with code CODE. */
377
378 static int
const_costs_int(HOST_WIDE_INT value,int zero_cost)379 const_costs_int (HOST_WIDE_INT value, int zero_cost)
380 {
381 if (CONST_OK_FOR_I (value))
382 return zero_cost;
383 else if (CONST_OK_FOR_J (value))
384 return 1;
385 else if (CONST_OK_FOR_K (value))
386 return 2;
387 else
388 return 4;
389 }
390
391 static int
const_costs(rtx r,enum rtx_code c)392 const_costs (rtx r, enum rtx_code c)
393 {
394 HOST_WIDE_INT high, low;
395
396 switch (c)
397 {
398 case CONST_INT:
399 return const_costs_int (INTVAL (r), 0);
400
401 case CONST_DOUBLE:
402 const_double_split (r, &high, &low);
403 if (GET_MODE (r) == SFmode)
404 return const_costs_int (high, 1);
405 else
406 return const_costs_int (high, 1) + const_costs_int (low, 1);
407
408 case SYMBOL_REF:
409 case LABEL_REF:
410 case CONST:
411 return 2;
412
413 case HIGH:
414 return 1;
415
416 default:
417 return 4;
418 }
419 }
420
421 static bool
v850_rtx_costs(rtx x,int code,int outer_code ATTRIBUTE_UNUSED,int * total)422 v850_rtx_costs (rtx x,
423 int code,
424 int outer_code ATTRIBUTE_UNUSED,
425 int * total)
426 {
427 switch (code)
428 {
429 case CONST_INT:
430 case CONST_DOUBLE:
431 case CONST:
432 case SYMBOL_REF:
433 case LABEL_REF:
434 *total = COSTS_N_INSNS (const_costs (x, code));
435 return true;
436
437 case MOD:
438 case DIV:
439 case UMOD:
440 case UDIV:
441 if (TARGET_V850E && optimize_size)
442 *total = 6;
443 else
444 *total = 60;
445 return true;
446
447 case MULT:
448 if (TARGET_V850E
449 && ( GET_MODE (x) == SImode
450 || GET_MODE (x) == HImode
451 || GET_MODE (x) == QImode))
452 {
453 if (GET_CODE (XEXP (x, 1)) == REG)
454 *total = 4;
455 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
456 {
457 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
458 *total = 6;
459 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
460 *total = 10;
461 }
462 }
463 else
464 *total = 20;
465 return true;
466
467 default:
468 return false;
469 }
470 }
471
472 /* Print operand X using operand code CODE to assembly language output file
473 FILE. */
474
475 void
print_operand(FILE * file,rtx x,int code)476 print_operand (FILE * file, rtx x, int code)
477 {
478 HOST_WIDE_INT high, low;
479
480 switch (code)
481 {
482 case 'c':
483 /* We use 'c' operands with symbols for .vtinherit */
484 if (GET_CODE (x) == SYMBOL_REF)
485 {
486 output_addr_const(file, x);
487 break;
488 }
489 /* fall through */
490 case 'b':
491 case 'B':
492 case 'C':
493 switch ((code == 'B' || code == 'C')
494 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
495 {
496 case NE:
497 if (code == 'c' || code == 'C')
498 fprintf (file, "nz");
499 else
500 fprintf (file, "ne");
501 break;
502 case EQ:
503 if (code == 'c' || code == 'C')
504 fprintf (file, "z");
505 else
506 fprintf (file, "e");
507 break;
508 case GE:
509 fprintf (file, "ge");
510 break;
511 case GT:
512 fprintf (file, "gt");
513 break;
514 case LE:
515 fprintf (file, "le");
516 break;
517 case LT:
518 fprintf (file, "lt");
519 break;
520 case GEU:
521 fprintf (file, "nl");
522 break;
523 case GTU:
524 fprintf (file, "h");
525 break;
526 case LEU:
527 fprintf (file, "nh");
528 break;
529 case LTU:
530 fprintf (file, "l");
531 break;
532 default:
533 gcc_unreachable ();
534 }
535 break;
536 case 'F': /* high word of CONST_DOUBLE */
537 switch (GET_CODE (x))
538 {
539 case CONST_INT:
540 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
541 break;
542
543 case CONST_DOUBLE:
544 const_double_split (x, &high, &low);
545 fprintf (file, "%ld", (long) high);
546 break;
547
548 default:
549 gcc_unreachable ();
550 }
551 break;
552 case 'G': /* low word of CONST_DOUBLE */
553 switch (GET_CODE (x))
554 {
555 case CONST_INT:
556 fprintf (file, "%ld", (long) INTVAL (x));
557 break;
558
559 case CONST_DOUBLE:
560 const_double_split (x, &high, &low);
561 fprintf (file, "%ld", (long) low);
562 break;
563
564 default:
565 gcc_unreachable ();
566 }
567 break;
568 case 'L':
569 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
570 break;
571 case 'M':
572 fprintf (file, "%d", exact_log2 (INTVAL (x)));
573 break;
574 case 'O':
575 gcc_assert (special_symbolref_operand (x, VOIDmode));
576
577 if (GET_CODE (x) == CONST)
578 x = XEXP (XEXP (x, 0), 0);
579 else
580 gcc_assert (GET_CODE (x) == SYMBOL_REF);
581
582 if (SYMBOL_REF_ZDA_P (x))
583 fprintf (file, "zdaoff");
584 else if (SYMBOL_REF_SDA_P (x))
585 fprintf (file, "sdaoff");
586 else if (SYMBOL_REF_TDA_P (x))
587 fprintf (file, "tdaoff");
588 else
589 gcc_unreachable ();
590 break;
591 case 'P':
592 gcc_assert (special_symbolref_operand (x, VOIDmode));
593 output_addr_const (file, x);
594 break;
595 case 'Q':
596 gcc_assert (special_symbolref_operand (x, VOIDmode));
597
598 if (GET_CODE (x) == CONST)
599 x = XEXP (XEXP (x, 0), 0);
600 else
601 gcc_assert (GET_CODE (x) == SYMBOL_REF);
602
603 if (SYMBOL_REF_ZDA_P (x))
604 fprintf (file, "r0");
605 else if (SYMBOL_REF_SDA_P (x))
606 fprintf (file, "gp");
607 else if (SYMBOL_REF_TDA_P (x))
608 fprintf (file, "ep");
609 else
610 gcc_unreachable ();
611 break;
612 case 'R': /* 2nd word of a double. */
613 switch (GET_CODE (x))
614 {
615 case REG:
616 fprintf (file, reg_names[REGNO (x) + 1]);
617 break;
618 case MEM:
619 x = XEXP (adjust_address (x, SImode, 4), 0);
620 print_operand_address (file, x);
621 if (GET_CODE (x) == CONST_INT)
622 fprintf (file, "[r0]");
623 break;
624
625 default:
626 break;
627 }
628 break;
629 case 'S':
630 {
631 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
632 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
633 fputs ("s", file);
634
635 break;
636 }
637 case 'T':
638 {
639 /* Like an 'S' operand above, but for unsigned loads only. */
640 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
641 fputs ("s", file);
642
643 break;
644 }
645 case 'W': /* print the instruction suffix */
646 switch (GET_MODE (x))
647 {
648 default:
649 gcc_unreachable ();
650
651 case QImode: fputs (".b", file); break;
652 case HImode: fputs (".h", file); break;
653 case SImode: fputs (".w", file); break;
654 case SFmode: fputs (".w", file); break;
655 }
656 break;
657 case '.': /* register r0 */
658 fputs (reg_names[0], file);
659 break;
660 case 'z': /* reg or zero */
661 if (GET_CODE (x) == REG)
662 fputs (reg_names[REGNO (x)], file);
663 else
664 {
665 gcc_assert (x == const0_rtx);
666 fputs (reg_names[0], file);
667 }
668 break;
669 default:
670 switch (GET_CODE (x))
671 {
672 case MEM:
673 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
674 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
675 XEXP (x, 0)));
676 else
677 output_address (XEXP (x, 0));
678 break;
679
680 case REG:
681 fputs (reg_names[REGNO (x)], file);
682 break;
683 case SUBREG:
684 fputs (reg_names[subreg_regno (x)], file);
685 break;
686 case CONST_INT:
687 case SYMBOL_REF:
688 case CONST:
689 case LABEL_REF:
690 case CODE_LABEL:
691 print_operand_address (file, x);
692 break;
693 default:
694 gcc_unreachable ();
695 }
696 break;
697
698 }
699 }
700
701
702 /* Output assembly language output for the address ADDR to FILE. */
703
704 void
print_operand_address(FILE * file,rtx addr)705 print_operand_address (FILE * file, rtx addr)
706 {
707 switch (GET_CODE (addr))
708 {
709 case REG:
710 fprintf (file, "0[");
711 print_operand (file, addr, 0);
712 fprintf (file, "]");
713 break;
714 case LO_SUM:
715 if (GET_CODE (XEXP (addr, 0)) == REG)
716 {
717 /* reg,foo */
718 fprintf (file, "lo(");
719 print_operand (file, XEXP (addr, 1), 0);
720 fprintf (file, ")[");
721 print_operand (file, XEXP (addr, 0), 0);
722 fprintf (file, "]");
723 }
724 break;
725 case PLUS:
726 if (GET_CODE (XEXP (addr, 0)) == REG
727 || GET_CODE (XEXP (addr, 0)) == SUBREG)
728 {
729 /* reg,foo */
730 print_operand (file, XEXP (addr, 1), 0);
731 fprintf (file, "[");
732 print_operand (file, XEXP (addr, 0), 0);
733 fprintf (file, "]");
734 }
735 else
736 {
737 print_operand (file, XEXP (addr, 0), 0);
738 fprintf (file, "+");
739 print_operand (file, XEXP (addr, 1), 0);
740 }
741 break;
742 case SYMBOL_REF:
743 {
744 const char *off_name = NULL;
745 const char *reg_name = NULL;
746
747 if (SYMBOL_REF_ZDA_P (addr))
748 {
749 off_name = "zdaoff";
750 reg_name = "r0";
751 }
752 else if (SYMBOL_REF_SDA_P (addr))
753 {
754 off_name = "sdaoff";
755 reg_name = "gp";
756 }
757 else if (SYMBOL_REF_TDA_P (addr))
758 {
759 off_name = "tdaoff";
760 reg_name = "ep";
761 }
762
763 if (off_name)
764 fprintf (file, "%s(", off_name);
765 output_addr_const (file, addr);
766 if (reg_name)
767 fprintf (file, ")[%s]", reg_name);
768 }
769 break;
770 case CONST:
771 if (special_symbolref_operand (addr, VOIDmode))
772 {
773 rtx x = XEXP (XEXP (addr, 0), 0);
774 const char *off_name;
775 const char *reg_name;
776
777 if (SYMBOL_REF_ZDA_P (x))
778 {
779 off_name = "zdaoff";
780 reg_name = "r0";
781 }
782 else if (SYMBOL_REF_SDA_P (x))
783 {
784 off_name = "sdaoff";
785 reg_name = "gp";
786 }
787 else if (SYMBOL_REF_TDA_P (x))
788 {
789 off_name = "tdaoff";
790 reg_name = "ep";
791 }
792 else
793 gcc_unreachable ();
794
795 fprintf (file, "%s(", off_name);
796 output_addr_const (file, addr);
797 fprintf (file, ")[%s]", reg_name);
798 }
799 else
800 output_addr_const (file, addr);
801 break;
802 default:
803 output_addr_const (file, addr);
804 break;
805 }
806 }
807
808 /* When assemble_integer is used to emit the offsets for a switch
809 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
810 output_addr_const will normally barf at this, but it is OK to omit
811 the truncate and just emit the difference of the two labels. The
812 .hword directive will automatically handle the truncation for us.
813
814 Returns 1 if rtx was handled, 0 otherwise. */
815
816 int
v850_output_addr_const_extra(FILE * file,rtx x)817 v850_output_addr_const_extra (FILE * file, rtx x)
818 {
819 if (GET_CODE (x) != TRUNCATE)
820 return 0;
821
822 x = XEXP (x, 0);
823
824 /* We must also handle the case where the switch table was passed a
825 constant value and so has been collapsed. In this case the first
826 label will have been deleted. In such a case it is OK to emit
827 nothing, since the table will not be used.
828 (cf gcc.c-torture/compile/990801-1.c). */
829 if (GET_CODE (x) == MINUS
830 && GET_CODE (XEXP (x, 0)) == LABEL_REF
831 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
832 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
833 return 1;
834
835 output_addr_const (file, x);
836 return 1;
837 }
838
839 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
840 point value. */
841
842 const char *
output_move_single(rtx * operands)843 output_move_single (rtx * operands)
844 {
845 rtx dst = operands[0];
846 rtx src = operands[1];
847
848 if (REG_P (dst))
849 {
850 if (REG_P (src))
851 return "mov %1,%0";
852
853 else if (GET_CODE (src) == CONST_INT)
854 {
855 HOST_WIDE_INT value = INTVAL (src);
856
857 if (CONST_OK_FOR_J (value)) /* Signed 5 bit immediate. */
858 return "mov %1,%0";
859
860 else if (CONST_OK_FOR_K (value)) /* Signed 16 bit immediate. */
861 return "movea lo(%1),%.,%0";
862
863 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
864 return "movhi hi(%1),%.,%0";
865
866 /* A random constant. */
867 else if (TARGET_V850E)
868 return "mov %1,%0";
869 else
870 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
871 }
872
873 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
874 {
875 HOST_WIDE_INT high, low;
876
877 const_double_split (src, &high, &low);
878
879 if (CONST_OK_FOR_J (high)) /* Signed 5 bit immediate. */
880 return "mov %F1,%0";
881
882 else if (CONST_OK_FOR_K (high)) /* Signed 16 bit immediate. */
883 return "movea lo(%F1),%.,%0";
884
885 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
886 return "movhi hi(%F1),%.,%0";
887
888 /* A random constant. */
889 else if (TARGET_V850E)
890 return "mov %F1,%0";
891
892 else
893 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
894 }
895
896 else if (GET_CODE (src) == MEM)
897 return "%S1ld%W1 %1,%0";
898
899 else if (special_symbolref_operand (src, VOIDmode))
900 return "movea %O1(%P1),%Q1,%0";
901
902 else if (GET_CODE (src) == LABEL_REF
903 || GET_CODE (src) == SYMBOL_REF
904 || GET_CODE (src) == CONST)
905 {
906 if (TARGET_V850E)
907 return "mov hilo(%1),%0";
908 else
909 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
910 }
911
912 else if (GET_CODE (src) == HIGH)
913 return "movhi hi(%1),%.,%0";
914
915 else if (GET_CODE (src) == LO_SUM)
916 {
917 operands[2] = XEXP (src, 0);
918 operands[3] = XEXP (src, 1);
919 return "movea lo(%3),%2,%0";
920 }
921 }
922
923 else if (GET_CODE (dst) == MEM)
924 {
925 if (REG_P (src))
926 return "%S0st%W0 %1,%0";
927
928 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
929 return "%S0st%W0 %.,%0";
930
931 else if (GET_CODE (src) == CONST_DOUBLE
932 && CONST0_RTX (GET_MODE (dst)) == src)
933 return "%S0st%W0 %.,%0";
934 }
935
936 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
937 return "";
938 }
939
940
941 /* Return appropriate code to load up an 8 byte integer or
942 floating point value */
943
944 const char *
output_move_double(rtx * operands)945 output_move_double (rtx * operands)
946 {
947 enum machine_mode mode = GET_MODE (operands[0]);
948 rtx dst = operands[0];
949 rtx src = operands[1];
950
951 if (register_operand (dst, mode)
952 && register_operand (src, mode))
953 {
954 if (REGNO (src) + 1 == REGNO (dst))
955 return "mov %R1,%R0\n\tmov %1,%0";
956 else
957 return "mov %1,%0\n\tmov %R1,%R0";
958 }
959
960 /* Storing 0 */
961 if (GET_CODE (dst) == MEM
962 && ((GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
963 || (GET_CODE (src) == CONST_DOUBLE && CONST_DOUBLE_OK_FOR_G (src))))
964 return "st.w %.,%0\n\tst.w %.,%R0";
965
966 if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE)
967 {
968 HOST_WIDE_INT high_low[2];
969 int i;
970 rtx xop[10];
971
972 if (GET_CODE (src) == CONST_DOUBLE)
973 const_double_split (src, &high_low[1], &high_low[0]);
974 else
975 {
976 high_low[0] = INTVAL (src);
977 high_low[1] = (INTVAL (src) >= 0) ? 0 : -1;
978 }
979
980 for (i = 0; i < 2; i++)
981 {
982 xop[0] = gen_rtx_REG (SImode, REGNO (dst)+i);
983 xop[1] = GEN_INT (high_low[i]);
984 output_asm_insn (output_move_single (xop), xop);
985 }
986
987 return "";
988 }
989
990 if (GET_CODE (src) == MEM)
991 {
992 int ptrreg = -1;
993 int dreg = REGNO (dst);
994 rtx inside = XEXP (src, 0);
995
996 if (GET_CODE (inside) == REG)
997 ptrreg = REGNO (inside);
998 else if (GET_CODE (inside) == SUBREG)
999 ptrreg = subreg_regno (inside);
1000 else if (GET_CODE (inside) == PLUS)
1001 ptrreg = REGNO (XEXP (inside, 0));
1002 else if (GET_CODE (inside) == LO_SUM)
1003 ptrreg = REGNO (XEXP (inside, 0));
1004
1005 if (dreg == ptrreg)
1006 return "ld.w %R1,%R0\n\tld.w %1,%0";
1007 }
1008
1009 if (GET_CODE (src) == MEM)
1010 return "ld.w %1,%0\n\tld.w %R1,%R0";
1011
1012 if (GET_CODE (dst) == MEM)
1013 return "st.w %1,%0\n\tst.w %R1,%R0";
1014
1015 return "mov %1,%0\n\tmov %R1,%R0";
1016 }
1017
1018
1019 /* Return maximum offset supported for a short EP memory reference of mode
1020 MODE and signedness UNSIGNEDP. */
1021
1022 static int
ep_memory_offset(enum machine_mode mode,int unsignedp ATTRIBUTE_UNUSED)1023 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1024 {
1025 int max_offset = 0;
1026
1027 switch (mode)
1028 {
1029 case QImode:
1030 if (TARGET_SMALL_SLD)
1031 max_offset = (1 << 4);
1032 else if (TARGET_V850E
1033 && ( ( unsignedp && ! TARGET_US_BIT_SET)
1034 || (! unsignedp && TARGET_US_BIT_SET)))
1035 max_offset = (1 << 4);
1036 else
1037 max_offset = (1 << 7);
1038 break;
1039
1040 case HImode:
1041 if (TARGET_SMALL_SLD)
1042 max_offset = (1 << 5);
1043 else if (TARGET_V850E
1044 && ( ( unsignedp && ! TARGET_US_BIT_SET)
1045 || (! unsignedp && TARGET_US_BIT_SET)))
1046 max_offset = (1 << 5);
1047 else
1048 max_offset = (1 << 8);
1049 break;
1050
1051 case SImode:
1052 case SFmode:
1053 max_offset = (1 << 8);
1054 break;
1055
1056 default:
1057 break;
1058 }
1059
1060 return max_offset;
1061 }
1062
1063 /* Return true if OP is a valid short EP memory reference */
1064
1065 int
ep_memory_operand(rtx op,enum machine_mode mode,int unsigned_load)1066 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
1067 {
1068 rtx addr, op0, op1;
1069 int max_offset;
1070 int mask;
1071
1072 /* If we are not using the EP register on a per-function basis
1073 then do not allow this optimization at all. This is to
1074 prevent the use of the SLD/SST instructions which cannot be
1075 guaranteed to work properly due to a hardware bug. */
1076 if (!TARGET_EP)
1077 return FALSE;
1078
1079 if (GET_CODE (op) != MEM)
1080 return FALSE;
1081
1082 max_offset = ep_memory_offset (mode, unsigned_load);
1083
1084 mask = GET_MODE_SIZE (mode) - 1;
1085
1086 addr = XEXP (op, 0);
1087 if (GET_CODE (addr) == CONST)
1088 addr = XEXP (addr, 0);
1089
1090 switch (GET_CODE (addr))
1091 {
1092 default:
1093 break;
1094
1095 case SYMBOL_REF:
1096 return SYMBOL_REF_TDA_P (addr);
1097
1098 case REG:
1099 return REGNO (addr) == EP_REGNUM;
1100
1101 case PLUS:
1102 op0 = XEXP (addr, 0);
1103 op1 = XEXP (addr, 1);
1104 if (GET_CODE (op1) == CONST_INT
1105 && INTVAL (op1) < max_offset
1106 && INTVAL (op1) >= 0
1107 && (INTVAL (op1) & mask) == 0)
1108 {
1109 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1110 return TRUE;
1111
1112 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1113 return TRUE;
1114 }
1115 break;
1116 }
1117
1118 return FALSE;
1119 }
1120
1121 /* Substitute memory references involving a pointer, to use the ep pointer,
1122 taking care to save and preserve the ep. */
1123
1124 static void
substitute_ep_register(rtx first_insn,rtx last_insn,int uses,int regno,rtx * p_r1,rtx * p_ep)1125 substitute_ep_register (rtx first_insn,
1126 rtx last_insn,
1127 int uses,
1128 int regno,
1129 rtx * p_r1,
1130 rtx * p_ep)
1131 {
1132 rtx reg = gen_rtx_REG (Pmode, regno);
1133 rtx insn;
1134
1135 if (!*p_r1)
1136 {
1137 regs_ever_live[1] = 1;
1138 *p_r1 = gen_rtx_REG (Pmode, 1);
1139 *p_ep = gen_rtx_REG (Pmode, 30);
1140 }
1141
1142 if (TARGET_DEBUG)
1143 fprintf (stderr, "\
1144 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1145 2 * (uses - 3), uses, reg_names[regno],
1146 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1147 INSN_UID (first_insn), INSN_UID (last_insn));
1148
1149 if (GET_CODE (first_insn) == NOTE)
1150 first_insn = next_nonnote_insn (first_insn);
1151
1152 last_insn = next_nonnote_insn (last_insn);
1153 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1154 {
1155 if (GET_CODE (insn) == INSN)
1156 {
1157 rtx pattern = single_set (insn);
1158
1159 /* Replace the memory references. */
1160 if (pattern)
1161 {
1162 rtx *p_mem;
1163 /* Memory operands are signed by default. */
1164 int unsignedp = FALSE;
1165
1166 if (GET_CODE (SET_DEST (pattern)) == MEM
1167 && GET_CODE (SET_SRC (pattern)) == MEM)
1168 p_mem = (rtx *)0;
1169
1170 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1171 p_mem = &SET_DEST (pattern);
1172
1173 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1174 p_mem = &SET_SRC (pattern);
1175
1176 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1177 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1178 p_mem = &XEXP (SET_SRC (pattern), 0);
1179
1180 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1181 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1182 {
1183 p_mem = &XEXP (SET_SRC (pattern), 0);
1184 unsignedp = TRUE;
1185 }
1186 else
1187 p_mem = (rtx *)0;
1188
1189 if (p_mem)
1190 {
1191 rtx addr = XEXP (*p_mem, 0);
1192
1193 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1194 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1195
1196 else if (GET_CODE (addr) == PLUS
1197 && GET_CODE (XEXP (addr, 0)) == REG
1198 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1199 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1200 && ((INTVAL (XEXP (addr, 1)))
1201 < ep_memory_offset (GET_MODE (*p_mem),
1202 unsignedp))
1203 && ((INTVAL (XEXP (addr, 1))) >= 0))
1204 *p_mem = change_address (*p_mem, VOIDmode,
1205 gen_rtx_PLUS (Pmode,
1206 *p_ep,
1207 XEXP (addr, 1)));
1208 }
1209 }
1210 }
1211 }
1212
1213 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1214 insn = prev_nonnote_insn (first_insn);
1215 if (insn && GET_CODE (insn) == INSN
1216 && GET_CODE (PATTERN (insn)) == SET
1217 && SET_DEST (PATTERN (insn)) == *p_ep
1218 && SET_SRC (PATTERN (insn)) == *p_r1)
1219 delete_insn (insn);
1220 else
1221 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1222
1223 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1224 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1225 }
1226
1227
1228 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1229 the -mep mode to copy heavily used pointers to ep to use the implicit
1230 addressing. */
1231
1232 static void
v850_reorg(void)1233 v850_reorg (void)
1234 {
1235 struct
1236 {
1237 int uses;
1238 rtx first_insn;
1239 rtx last_insn;
1240 }
1241 regs[FIRST_PSEUDO_REGISTER];
1242
1243 int i;
1244 int use_ep = FALSE;
1245 rtx r1 = NULL_RTX;
1246 rtx ep = NULL_RTX;
1247 rtx insn;
1248 rtx pattern;
1249
1250 /* If not ep mode, just return now. */
1251 if (!TARGET_EP)
1252 return;
1253
1254 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1255 {
1256 regs[i].uses = 0;
1257 regs[i].first_insn = NULL_RTX;
1258 regs[i].last_insn = NULL_RTX;
1259 }
1260
1261 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1262 {
1263 switch (GET_CODE (insn))
1264 {
1265 /* End of basic block */
1266 default:
1267 if (!use_ep)
1268 {
1269 int max_uses = -1;
1270 int max_regno = -1;
1271
1272 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1273 {
1274 if (max_uses < regs[i].uses)
1275 {
1276 max_uses = regs[i].uses;
1277 max_regno = i;
1278 }
1279 }
1280
1281 if (max_uses > 3)
1282 substitute_ep_register (regs[max_regno].first_insn,
1283 regs[max_regno].last_insn,
1284 max_uses, max_regno, &r1, &ep);
1285 }
1286
1287 use_ep = FALSE;
1288 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1289 {
1290 regs[i].uses = 0;
1291 regs[i].first_insn = NULL_RTX;
1292 regs[i].last_insn = NULL_RTX;
1293 }
1294 break;
1295
1296 case NOTE:
1297 break;
1298
1299 case INSN:
1300 pattern = single_set (insn);
1301
1302 /* See if there are any memory references we can shorten */
1303 if (pattern)
1304 {
1305 rtx src = SET_SRC (pattern);
1306 rtx dest = SET_DEST (pattern);
1307 rtx mem;
1308 /* Memory operands are signed by default. */
1309 int unsignedp = FALSE;
1310
1311 /* We might have (SUBREG (MEM)) here, so just get rid of the
1312 subregs to make this code simpler. */
1313 if (GET_CODE (dest) == SUBREG
1314 && (GET_CODE (SUBREG_REG (dest)) == MEM
1315 || GET_CODE (SUBREG_REG (dest)) == REG))
1316 alter_subreg (&dest);
1317 if (GET_CODE (src) == SUBREG
1318 && (GET_CODE (SUBREG_REG (src)) == MEM
1319 || GET_CODE (SUBREG_REG (src)) == REG))
1320 alter_subreg (&src);
1321
1322 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1323 mem = NULL_RTX;
1324
1325 else if (GET_CODE (dest) == MEM)
1326 mem = dest;
1327
1328 else if (GET_CODE (src) == MEM)
1329 mem = src;
1330
1331 else if (GET_CODE (src) == SIGN_EXTEND
1332 && GET_CODE (XEXP (src, 0)) == MEM)
1333 mem = XEXP (src, 0);
1334
1335 else if (GET_CODE (src) == ZERO_EXTEND
1336 && GET_CODE (XEXP (src, 0)) == MEM)
1337 {
1338 mem = XEXP (src, 0);
1339 unsignedp = TRUE;
1340 }
1341 else
1342 mem = NULL_RTX;
1343
1344 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1345 use_ep = TRUE;
1346
1347 else if (!use_ep && mem
1348 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1349 {
1350 rtx addr = XEXP (mem, 0);
1351 int regno = -1;
1352 int short_p;
1353
1354 if (GET_CODE (addr) == REG)
1355 {
1356 short_p = TRUE;
1357 regno = REGNO (addr);
1358 }
1359
1360 else if (GET_CODE (addr) == PLUS
1361 && GET_CODE (XEXP (addr, 0)) == REG
1362 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1363 && ((INTVAL (XEXP (addr, 1)))
1364 < ep_memory_offset (GET_MODE (mem), unsignedp))
1365 && ((INTVAL (XEXP (addr, 1))) >= 0))
1366 {
1367 short_p = TRUE;
1368 regno = REGNO (XEXP (addr, 0));
1369 }
1370
1371 else
1372 short_p = FALSE;
1373
1374 if (short_p)
1375 {
1376 regs[regno].uses++;
1377 regs[regno].last_insn = insn;
1378 if (!regs[regno].first_insn)
1379 regs[regno].first_insn = insn;
1380 }
1381 }
1382
1383 /* Loading up a register in the basic block zaps any savings
1384 for the register */
1385 if (GET_CODE (dest) == REG)
1386 {
1387 enum machine_mode mode = GET_MODE (dest);
1388 int regno;
1389 int endregno;
1390
1391 regno = REGNO (dest);
1392 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1393
1394 if (!use_ep)
1395 {
1396 /* See if we can use the pointer before this
1397 modification. */
1398 int max_uses = -1;
1399 int max_regno = -1;
1400
1401 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1402 {
1403 if (max_uses < regs[i].uses)
1404 {
1405 max_uses = regs[i].uses;
1406 max_regno = i;
1407 }
1408 }
1409
1410 if (max_uses > 3
1411 && max_regno >= regno
1412 && max_regno < endregno)
1413 {
1414 substitute_ep_register (regs[max_regno].first_insn,
1415 regs[max_regno].last_insn,
1416 max_uses, max_regno, &r1,
1417 &ep);
1418
1419 /* Since we made a substitution, zap all remembered
1420 registers. */
1421 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1422 {
1423 regs[i].uses = 0;
1424 regs[i].first_insn = NULL_RTX;
1425 regs[i].last_insn = NULL_RTX;
1426 }
1427 }
1428 }
1429
1430 for (i = regno; i < endregno; i++)
1431 {
1432 regs[i].uses = 0;
1433 regs[i].first_insn = NULL_RTX;
1434 regs[i].last_insn = NULL_RTX;
1435 }
1436 }
1437 }
1438 }
1439 }
1440 }
1441
1442
1443 /* # of registers saved by the interrupt handler. */
1444 #define INTERRUPT_FIXED_NUM 4
1445
1446 /* # of bytes for registers saved by the interrupt handler. */
1447 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1448
1449 /* # of registers saved in register parameter area. */
1450 #define INTERRUPT_REGPARM_NUM 4
1451 /* # of words saved for other registers. */
1452 #define INTERRUPT_ALL_SAVE_NUM \
1453 (30 - INTERRUPT_FIXED_NUM + INTERRUPT_REGPARM_NUM)
1454
1455 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1456
1457 int
compute_register_save_size(long * p_reg_saved)1458 compute_register_save_size (long * p_reg_saved)
1459 {
1460 int size = 0;
1461 int i;
1462 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1463 int call_p = regs_ever_live [LINK_POINTER_REGNUM];
1464 long reg_saved = 0;
1465
1466 /* Count the return pointer if we need to save it. */
1467 if (current_function_profile && !call_p)
1468 regs_ever_live [LINK_POINTER_REGNUM] = call_p = 1;
1469
1470 /* Count space for the register saves. */
1471 if (interrupt_handler)
1472 {
1473 for (i = 0; i <= 31; i++)
1474 switch (i)
1475 {
1476 default:
1477 if (regs_ever_live[i] || call_p)
1478 {
1479 size += 4;
1480 reg_saved |= 1L << i;
1481 }
1482 break;
1483
1484 /* We don't save/restore r0 or the stack pointer */
1485 case 0:
1486 case STACK_POINTER_REGNUM:
1487 break;
1488
1489 /* For registers with fixed use, we save them, set them to the
1490 appropriate value, and then restore them.
1491 These registers are handled specially, so don't list them
1492 on the list of registers to save in the prologue. */
1493 case 1: /* temp used to hold ep */
1494 case 4: /* gp */
1495 case 10: /* temp used to call interrupt save/restore */
1496 case EP_REGNUM: /* ep */
1497 size += 4;
1498 break;
1499 }
1500 }
1501 else
1502 {
1503 /* Find the first register that needs to be saved. */
1504 for (i = 0; i <= 31; i++)
1505 if (regs_ever_live[i] && ((! call_used_regs[i])
1506 || i == LINK_POINTER_REGNUM))
1507 break;
1508
1509 /* If it is possible that an out-of-line helper function might be
1510 used to generate the prologue for the current function, then we
1511 need to cover the possibility that such a helper function will
1512 be used, despite the fact that there might be gaps in the list of
1513 registers that need to be saved. To detect this we note that the
1514 helper functions always push at least register r29 (provided
1515 that the function is not an interrupt handler). */
1516
1517 if (TARGET_PROLOG_FUNCTION
1518 && (i == 2 || ((i >= 20) && (i < 30))))
1519 {
1520 if (i == 2)
1521 {
1522 size += 4;
1523 reg_saved |= 1L << i;
1524
1525 i = 20;
1526 }
1527
1528 /* Helper functions save all registers between the starting
1529 register and the last register, regardless of whether they
1530 are actually used by the function or not. */
1531 for (; i <= 29; i++)
1532 {
1533 size += 4;
1534 reg_saved |= 1L << i;
1535 }
1536
1537 if (regs_ever_live [LINK_POINTER_REGNUM])
1538 {
1539 size += 4;
1540 reg_saved |= 1L << LINK_POINTER_REGNUM;
1541 }
1542 }
1543 else
1544 {
1545 for (; i <= 31; i++)
1546 if (regs_ever_live[i] && ((! call_used_regs[i])
1547 || i == LINK_POINTER_REGNUM))
1548 {
1549 size += 4;
1550 reg_saved |= 1L << i;
1551 }
1552 }
1553 }
1554
1555 if (p_reg_saved)
1556 *p_reg_saved = reg_saved;
1557
1558 return size;
1559 }
1560
1561 int
compute_frame_size(int size,long * p_reg_saved)1562 compute_frame_size (int size, long * p_reg_saved)
1563 {
1564 return (size
1565 + compute_register_save_size (p_reg_saved)
1566 + current_function_outgoing_args_size);
1567 }
1568
1569
1570 void
expand_prologue(void)1571 expand_prologue (void)
1572 {
1573 unsigned int i;
1574 int offset;
1575 unsigned int size = get_frame_size ();
1576 unsigned int actual_fsize;
1577 unsigned int init_stack_alloc = 0;
1578 rtx save_regs[32];
1579 rtx save_all;
1580 unsigned int num_save;
1581 unsigned int default_stack;
1582 int code;
1583 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1584 long reg_saved = 0;
1585
1586 actual_fsize = compute_frame_size (size, ®_saved);
1587
1588 /* Save/setup global registers for interrupt functions right now. */
1589 if (interrupt_handler)
1590 {
1591 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
1592 emit_insn (gen_callt_save_interrupt ());
1593 else
1594 emit_insn (gen_save_interrupt ());
1595
1596 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1597
1598 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1599 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1600 }
1601
1602 /* Save arg registers to the stack if necessary. */
1603 else if (current_function_args_info.anonymous_args)
1604 {
1605 if (TARGET_PROLOG_FUNCTION && TARGET_V850E && !TARGET_DISABLE_CALLT)
1606 emit_insn (gen_save_r6_r9_v850e ());
1607 else if (TARGET_PROLOG_FUNCTION && ! TARGET_LONG_CALLS)
1608 emit_insn (gen_save_r6_r9 ());
1609 else
1610 {
1611 offset = 0;
1612 for (i = 6; i < 10; i++)
1613 {
1614 emit_move_insn (gen_rtx_MEM (SImode,
1615 plus_constant (stack_pointer_rtx,
1616 offset)),
1617 gen_rtx_REG (SImode, i));
1618 offset += 4;
1619 }
1620 }
1621 }
1622
1623 /* Identify all of the saved registers. */
1624 num_save = 0;
1625 default_stack = 0;
1626 for (i = 1; i < 31; i++)
1627 {
1628 if (((1L << i) & reg_saved) != 0)
1629 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1630 }
1631
1632 /* If the return pointer is saved, the helper functions also allocate
1633 16 bytes of stack for arguments to be saved in. */
1634 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1635 {
1636 save_regs[num_save++] = gen_rtx_REG (Pmode, LINK_POINTER_REGNUM);
1637 default_stack = 16;
1638 }
1639
1640 /* See if we have an insn that allocates stack space and saves the particular
1641 registers we want to. */
1642 save_all = NULL_RTX;
1643 if (TARGET_PROLOG_FUNCTION && num_save > 0 && actual_fsize >= default_stack)
1644 {
1645 int alloc_stack = (4 * num_save) + default_stack;
1646 int unalloc_stack = actual_fsize - alloc_stack;
1647 int save_func_len = 4;
1648 int save_normal_len;
1649
1650 if (unalloc_stack)
1651 save_func_len += CONST_OK_FOR_J (unalloc_stack) ? 2 : 4;
1652
1653 /* see if we would have used ep to save the stack */
1654 if (TARGET_EP && num_save > 3 && (unsigned)actual_fsize < 255)
1655 save_normal_len = (3 * 2) + (2 * num_save);
1656 else
1657 save_normal_len = 4 * num_save;
1658
1659 save_normal_len += CONST_OK_FOR_J (actual_fsize) ? 2 : 4;
1660
1661 /* Don't bother checking if we don't actually save any space.
1662 This happens for instance if one register is saved and additional
1663 stack space is allocated. */
1664 if (save_func_len < save_normal_len)
1665 {
1666 save_all = gen_rtx_PARALLEL
1667 (VOIDmode,
1668 rtvec_alloc (num_save + 1
1669 + (TARGET_V850 ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1670
1671 XVECEXP (save_all, 0, 0)
1672 = gen_rtx_SET (VOIDmode,
1673 stack_pointer_rtx,
1674 plus_constant (stack_pointer_rtx, -alloc_stack));
1675
1676 offset = - default_stack;
1677 for (i = 0; i < num_save; i++)
1678 {
1679 XVECEXP (save_all, 0, i+1)
1680 = gen_rtx_SET (VOIDmode,
1681 gen_rtx_MEM (Pmode,
1682 plus_constant (stack_pointer_rtx,
1683 offset)),
1684 save_regs[i]);
1685 offset -= 4;
1686 }
1687
1688 if (TARGET_V850)
1689 {
1690 XVECEXP (save_all, 0, num_save + 1)
1691 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1692
1693 if (TARGET_LONG_CALLS)
1694 XVECEXP (save_all, 0, num_save + 2)
1695 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1696 }
1697
1698 code = recog (save_all, NULL_RTX, NULL);
1699 if (code >= 0)
1700 {
1701 rtx insn = emit_insn (save_all);
1702 INSN_CODE (insn) = code;
1703 actual_fsize -= alloc_stack;
1704
1705 if (TARGET_DEBUG)
1706 fprintf (stderr, "\
1707 Saved %d bytes via prologue function (%d vs. %d) for function %s\n",
1708 save_normal_len - save_func_len,
1709 save_normal_len, save_func_len,
1710 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)));
1711 }
1712 else
1713 save_all = NULL_RTX;
1714 }
1715 }
1716
1717 /* If no prolog save function is available, store the registers the old
1718 fashioned way (one by one). */
1719 if (!save_all)
1720 {
1721 /* Special case interrupt functions that save all registers for a call. */
1722 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1723 {
1724 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
1725 emit_insn (gen_callt_save_all_interrupt ());
1726 else
1727 emit_insn (gen_save_all_interrupt ());
1728 }
1729 else
1730 {
1731 /* If the stack is too big, allocate it in chunks so we can do the
1732 register saves. We use the register save size so we use the ep
1733 register. */
1734 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1735 init_stack_alloc = compute_register_save_size (NULL);
1736 else
1737 init_stack_alloc = actual_fsize;
1738
1739 /* Save registers at the beginning of the stack frame. */
1740 offset = init_stack_alloc - 4;
1741
1742 if (init_stack_alloc)
1743 emit_insn (gen_addsi3 (stack_pointer_rtx,
1744 stack_pointer_rtx,
1745 GEN_INT (-init_stack_alloc)));
1746
1747 /* Save the return pointer first. */
1748 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1749 {
1750 emit_move_insn (gen_rtx_MEM (SImode,
1751 plus_constant (stack_pointer_rtx,
1752 offset)),
1753 save_regs[--num_save]);
1754 offset -= 4;
1755 }
1756
1757 for (i = 0; i < num_save; i++)
1758 {
1759 emit_move_insn (gen_rtx_MEM (SImode,
1760 plus_constant (stack_pointer_rtx,
1761 offset)),
1762 save_regs[i]);
1763 offset -= 4;
1764 }
1765 }
1766 }
1767
1768 /* Allocate the rest of the stack that was not allocated above (either it is
1769 > 32K or we just called a function to save the registers and needed more
1770 stack. */
1771 if (actual_fsize > init_stack_alloc)
1772 {
1773 int diff = actual_fsize - init_stack_alloc;
1774 if (CONST_OK_FOR_K (diff))
1775 emit_insn (gen_addsi3 (stack_pointer_rtx,
1776 stack_pointer_rtx,
1777 GEN_INT (-diff)));
1778 else
1779 {
1780 rtx reg = gen_rtx_REG (Pmode, 12);
1781 emit_move_insn (reg, GEN_INT (-diff));
1782 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
1783 }
1784 }
1785
1786 /* If we need a frame pointer, set it up now. */
1787 if (frame_pointer_needed)
1788 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1789 }
1790
1791
1792 void
expand_epilogue(void)1793 expand_epilogue (void)
1794 {
1795 unsigned int i;
1796 int offset;
1797 unsigned int size = get_frame_size ();
1798 long reg_saved = 0;
1799 unsigned int actual_fsize = compute_frame_size (size, ®_saved);
1800 unsigned int init_stack_free = 0;
1801 rtx restore_regs[32];
1802 rtx restore_all;
1803 unsigned int num_restore;
1804 unsigned int default_stack;
1805 int code;
1806 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1807
1808 /* Eliminate the initial stack stored by interrupt functions. */
1809 if (interrupt_handler)
1810 {
1811 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1812 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1813 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1814 }
1815
1816 /* Cut off any dynamic stack created. */
1817 if (frame_pointer_needed)
1818 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1819
1820 /* Identify all of the saved registers. */
1821 num_restore = 0;
1822 default_stack = 0;
1823 for (i = 1; i < 31; i++)
1824 {
1825 if (((1L << i) & reg_saved) != 0)
1826 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1827 }
1828
1829 /* If the return pointer is saved, the helper functions also allocate
1830 16 bytes of stack for arguments to be saved in. */
1831 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1832 {
1833 restore_regs[num_restore++] = gen_rtx_REG (Pmode, LINK_POINTER_REGNUM);
1834 default_stack = 16;
1835 }
1836
1837 /* See if we have an insn that restores the particular registers we
1838 want to. */
1839 restore_all = NULL_RTX;
1840
1841 if (TARGET_PROLOG_FUNCTION
1842 && num_restore > 0
1843 && actual_fsize >= default_stack
1844 && !interrupt_handler)
1845 {
1846 int alloc_stack = (4 * num_restore) + default_stack;
1847 int unalloc_stack = actual_fsize - alloc_stack;
1848 int restore_func_len = 4;
1849 int restore_normal_len;
1850
1851 if (unalloc_stack)
1852 restore_func_len += CONST_OK_FOR_J (unalloc_stack) ? 2 : 4;
1853
1854 /* See if we would have used ep to restore the registers. */
1855 if (TARGET_EP && num_restore > 3 && (unsigned)actual_fsize < 255)
1856 restore_normal_len = (3 * 2) + (2 * num_restore);
1857 else
1858 restore_normal_len = 4 * num_restore;
1859
1860 restore_normal_len += (CONST_OK_FOR_J (actual_fsize) ? 2 : 4) + 2;
1861
1862 /* Don't bother checking if we don't actually save any space. */
1863 if (restore_func_len < restore_normal_len)
1864 {
1865 restore_all = gen_rtx_PARALLEL (VOIDmode,
1866 rtvec_alloc (num_restore + 2));
1867 XVECEXP (restore_all, 0, 0) = gen_rtx_RETURN (VOIDmode);
1868 XVECEXP (restore_all, 0, 1)
1869 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1870 gen_rtx_PLUS (Pmode,
1871 stack_pointer_rtx,
1872 GEN_INT (alloc_stack)));
1873
1874 offset = alloc_stack - 4;
1875 for (i = 0; i < num_restore; i++)
1876 {
1877 XVECEXP (restore_all, 0, i+2)
1878 = gen_rtx_SET (VOIDmode,
1879 restore_regs[i],
1880 gen_rtx_MEM (Pmode,
1881 plus_constant (stack_pointer_rtx,
1882 offset)));
1883 offset -= 4;
1884 }
1885
1886 code = recog (restore_all, NULL_RTX, NULL);
1887
1888 if (code >= 0)
1889 {
1890 rtx insn;
1891
1892 actual_fsize -= alloc_stack;
1893 if (actual_fsize)
1894 {
1895 if (CONST_OK_FOR_K (actual_fsize))
1896 emit_insn (gen_addsi3 (stack_pointer_rtx,
1897 stack_pointer_rtx,
1898 GEN_INT (actual_fsize)));
1899 else
1900 {
1901 rtx reg = gen_rtx_REG (Pmode, 12);
1902 emit_move_insn (reg, GEN_INT (actual_fsize));
1903 emit_insn (gen_addsi3 (stack_pointer_rtx,
1904 stack_pointer_rtx,
1905 reg));
1906 }
1907 }
1908
1909 insn = emit_jump_insn (restore_all);
1910 INSN_CODE (insn) = code;
1911
1912 if (TARGET_DEBUG)
1913 fprintf (stderr, "\
1914 Saved %d bytes via epilogue function (%d vs. %d) in function %s\n",
1915 restore_normal_len - restore_func_len,
1916 restore_normal_len, restore_func_len,
1917 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)));
1918 }
1919 else
1920 restore_all = NULL_RTX;
1921 }
1922 }
1923
1924 /* If no epilog save function is available, restore the registers the
1925 old fashioned way (one by one). */
1926 if (!restore_all)
1927 {
1928 /* If the stack is large, we need to cut it down in 2 pieces. */
1929 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1930 init_stack_free = 4 * num_restore;
1931 else
1932 init_stack_free = actual_fsize;
1933
1934 /* Deallocate the rest of the stack if it is > 32K. */
1935 if (actual_fsize > init_stack_free)
1936 {
1937 int diff;
1938
1939 diff = actual_fsize - ((interrupt_handler) ? 0 : init_stack_free);
1940
1941 if (CONST_OK_FOR_K (diff))
1942 emit_insn (gen_addsi3 (stack_pointer_rtx,
1943 stack_pointer_rtx,
1944 GEN_INT (diff)));
1945 else
1946 {
1947 rtx reg = gen_rtx_REG (Pmode, 12);
1948 emit_move_insn (reg, GEN_INT (diff));
1949 emit_insn (gen_addsi3 (stack_pointer_rtx,
1950 stack_pointer_rtx,
1951 reg));
1952 }
1953 }
1954
1955 /* Special case interrupt functions that save all registers
1956 for a call. */
1957 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1958 {
1959 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
1960 emit_insn (gen_callt_restore_all_interrupt ());
1961 else
1962 emit_insn (gen_restore_all_interrupt ());
1963 }
1964 else
1965 {
1966 /* Restore registers from the beginning of the stack frame. */
1967 offset = init_stack_free - 4;
1968
1969 /* Restore the return pointer first. */
1970 if (num_restore > 0
1971 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1972 {
1973 emit_move_insn (restore_regs[--num_restore],
1974 gen_rtx_MEM (SImode,
1975 plus_constant (stack_pointer_rtx,
1976 offset)));
1977 offset -= 4;
1978 }
1979
1980 for (i = 0; i < num_restore; i++)
1981 {
1982 emit_move_insn (restore_regs[i],
1983 gen_rtx_MEM (SImode,
1984 plus_constant (stack_pointer_rtx,
1985 offset)));
1986
1987 emit_insn (gen_rtx_USE (VOIDmode, restore_regs[i]));
1988 offset -= 4;
1989 }
1990
1991 /* Cut back the remainder of the stack. */
1992 if (init_stack_free)
1993 emit_insn (gen_addsi3 (stack_pointer_rtx,
1994 stack_pointer_rtx,
1995 GEN_INT (init_stack_free)));
1996 }
1997
1998 /* And return or use reti for interrupt handlers. */
1999 if (interrupt_handler)
2000 {
2001 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
2002 emit_insn (gen_callt_return_interrupt ());
2003 else
2004 emit_jump_insn (gen_return_interrupt ());
2005 }
2006 else if (actual_fsize)
2007 emit_jump_insn (gen_return_internal ());
2008 else
2009 emit_jump_insn (gen_return ());
2010 }
2011
2012 v850_interrupt_cache_p = FALSE;
2013 v850_interrupt_p = FALSE;
2014 }
2015
2016
2017 /* Update the condition code from the insn. */
2018
2019 void
notice_update_cc(rtx body,rtx insn)2020 notice_update_cc (rtx body, rtx insn)
2021 {
2022 switch (get_attr_cc (insn))
2023 {
2024 case CC_NONE:
2025 /* Insn does not affect CC at all. */
2026 break;
2027
2028 case CC_NONE_0HIT:
2029 /* Insn does not change CC, but the 0'th operand has been changed. */
2030 if (cc_status.value1 != 0
2031 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
2032 cc_status.value1 = 0;
2033 break;
2034
2035 case CC_SET_ZN:
2036 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2037 V,C is in an unusable state. */
2038 CC_STATUS_INIT;
2039 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2040 cc_status.value1 = recog_data.operand[0];
2041 break;
2042
2043 case CC_SET_ZNV:
2044 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2045 C is in an unusable state. */
2046 CC_STATUS_INIT;
2047 cc_status.flags |= CC_NO_CARRY;
2048 cc_status.value1 = recog_data.operand[0];
2049 break;
2050
2051 case CC_COMPARE:
2052 /* The insn is a compare instruction. */
2053 CC_STATUS_INIT;
2054 cc_status.value1 = SET_SRC (body);
2055 break;
2056
2057 case CC_CLOBBER:
2058 /* Insn doesn't leave CC in a usable state. */
2059 CC_STATUS_INIT;
2060 break;
2061 }
2062 }
2063
2064 /* Retrieve the data area that has been chosen for the given decl. */
2065
2066 v850_data_area
v850_get_data_area(tree decl)2067 v850_get_data_area (tree decl)
2068 {
2069 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2070 return DATA_AREA_SDA;
2071
2072 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2073 return DATA_AREA_TDA;
2074
2075 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2076 return DATA_AREA_ZDA;
2077
2078 return DATA_AREA_NORMAL;
2079 }
2080
2081 /* Store the indicated data area in the decl's attributes. */
2082
2083 static void
v850_set_data_area(tree decl,v850_data_area data_area)2084 v850_set_data_area (tree decl, v850_data_area data_area)
2085 {
2086 tree name;
2087
2088 switch (data_area)
2089 {
2090 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2091 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2092 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2093 default:
2094 return;
2095 }
2096
2097 DECL_ATTRIBUTES (decl) = tree_cons
2098 (name, NULL, DECL_ATTRIBUTES (decl));
2099 }
2100
2101 const struct attribute_spec v850_attribute_table[] =
2102 {
2103 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2104 { "interrupt_handler", 0, 0, true, false, false, v850_handle_interrupt_attribute },
2105 { "interrupt", 0, 0, true, false, false, v850_handle_interrupt_attribute },
2106 { "sda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2107 { "tda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2108 { "zda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2109 { NULL, 0, 0, false, false, false, NULL }
2110 };
2111
2112 /* Handle an "interrupt" attribute; arguments as in
2113 struct attribute_spec.handler. */
2114 static tree
v850_handle_interrupt_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)2115 v850_handle_interrupt_attribute (tree * node,
2116 tree name,
2117 tree args ATTRIBUTE_UNUSED,
2118 int flags ATTRIBUTE_UNUSED,
2119 bool * no_add_attrs)
2120 {
2121 if (TREE_CODE (*node) != FUNCTION_DECL)
2122 {
2123 warning (OPT_Wattributes, "%qs attribute only applies to functions",
2124 IDENTIFIER_POINTER (name));
2125 *no_add_attrs = true;
2126 }
2127
2128 return NULL_TREE;
2129 }
2130
2131 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2132 struct attribute_spec.handler. */
2133 static tree
v850_handle_data_area_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)2134 v850_handle_data_area_attribute (tree* node,
2135 tree name,
2136 tree args ATTRIBUTE_UNUSED,
2137 int flags ATTRIBUTE_UNUSED,
2138 bool * no_add_attrs)
2139 {
2140 v850_data_area data_area;
2141 v850_data_area area;
2142 tree decl = *node;
2143
2144 /* Implement data area attribute. */
2145 if (is_attribute_p ("sda", name))
2146 data_area = DATA_AREA_SDA;
2147 else if (is_attribute_p ("tda", name))
2148 data_area = DATA_AREA_TDA;
2149 else if (is_attribute_p ("zda", name))
2150 data_area = DATA_AREA_ZDA;
2151 else
2152 gcc_unreachable ();
2153
2154 switch (TREE_CODE (decl))
2155 {
2156 case VAR_DECL:
2157 if (current_function_decl != NULL_TREE)
2158 {
2159 error ("%Jdata area attributes cannot be specified for "
2160 "local variables", decl);
2161 *no_add_attrs = true;
2162 }
2163
2164 /* Drop through. */
2165
2166 case FUNCTION_DECL:
2167 area = v850_get_data_area (decl);
2168 if (area != DATA_AREA_NORMAL && data_area != area)
2169 {
2170 error ("data area of %q+D conflicts with previous declaration",
2171 decl);
2172 *no_add_attrs = true;
2173 }
2174 break;
2175
2176 default:
2177 break;
2178 }
2179
2180 return NULL_TREE;
2181 }
2182
2183
2184 /* Return nonzero if FUNC is an interrupt function as specified
2185 by the "interrupt" attribute. */
2186
2187 int
v850_interrupt_function_p(tree func)2188 v850_interrupt_function_p (tree func)
2189 {
2190 tree a;
2191 int ret = 0;
2192
2193 if (v850_interrupt_cache_p)
2194 return v850_interrupt_p;
2195
2196 if (TREE_CODE (func) != FUNCTION_DECL)
2197 return 0;
2198
2199 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2200 if (a != NULL_TREE)
2201 ret = 1;
2202
2203 else
2204 {
2205 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2206 ret = a != NULL_TREE;
2207 }
2208
2209 /* Its not safe to trust global variables until after function inlining has
2210 been done. */
2211 if (reload_completed | reload_in_progress)
2212 v850_interrupt_p = ret;
2213
2214 return ret;
2215 }
2216
2217
2218 static void
v850_encode_data_area(tree decl,rtx symbol)2219 v850_encode_data_area (tree decl, rtx symbol)
2220 {
2221 int flags;
2222
2223 /* Map explicit sections into the appropriate attribute */
2224 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2225 {
2226 if (DECL_SECTION_NAME (decl))
2227 {
2228 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2229
2230 if (streq (name, ".zdata") || streq (name, ".zbss"))
2231 v850_set_data_area (decl, DATA_AREA_ZDA);
2232
2233 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2234 v850_set_data_area (decl, DATA_AREA_SDA);
2235
2236 else if (streq (name, ".tdata"))
2237 v850_set_data_area (decl, DATA_AREA_TDA);
2238 }
2239
2240 /* If no attribute, support -m{zda,sda,tda}=n */
2241 else
2242 {
2243 int size = int_size_in_bytes (TREE_TYPE (decl));
2244 if (size <= 0)
2245 ;
2246
2247 else if (size <= small_memory [(int) SMALL_MEMORY_TDA].max)
2248 v850_set_data_area (decl, DATA_AREA_TDA);
2249
2250 else if (size <= small_memory [(int) SMALL_MEMORY_SDA].max)
2251 v850_set_data_area (decl, DATA_AREA_SDA);
2252
2253 else if (size <= small_memory [(int) SMALL_MEMORY_ZDA].max)
2254 v850_set_data_area (decl, DATA_AREA_ZDA);
2255 }
2256
2257 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2258 return;
2259 }
2260
2261 flags = SYMBOL_REF_FLAGS (symbol);
2262 switch (v850_get_data_area (decl))
2263 {
2264 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2265 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2266 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2267 default: gcc_unreachable ();
2268 }
2269 SYMBOL_REF_FLAGS (symbol) = flags;
2270 }
2271
2272 static void
v850_encode_section_info(tree decl,rtx rtl,int first)2273 v850_encode_section_info (tree decl, rtx rtl, int first)
2274 {
2275 default_encode_section_info (decl, rtl, first);
2276
2277 if (TREE_CODE (decl) == VAR_DECL
2278 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2279 v850_encode_data_area (decl, XEXP (rtl, 0));
2280 }
2281
2282 /* Construct a JR instruction to a routine that will perform the equivalent of
2283 the RTL passed in as an argument. This RTL is a function epilogue that
2284 pops registers off the stack and possibly releases some extra stack space
2285 as well. The code has already verified that the RTL matches these
2286 requirements. */
2287 char *
construct_restore_jr(rtx op)2288 construct_restore_jr (rtx op)
2289 {
2290 int count = XVECLEN (op, 0);
2291 int stack_bytes;
2292 unsigned long int mask;
2293 unsigned long int first;
2294 unsigned long int last;
2295 int i;
2296 static char buff [100]; /* XXX */
2297
2298 if (count <= 2)
2299 {
2300 error ("bogus JR construction: %d", count);
2301 return NULL;
2302 }
2303
2304 /* Work out how many bytes to pop off the stack before retrieving
2305 registers. */
2306 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2307 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2308 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2309
2310 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2311
2312 /* Each pop will remove 4 bytes from the stack.... */
2313 stack_bytes -= (count - 2) * 4;
2314
2315 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2316 if (stack_bytes != 0 && stack_bytes != 16)
2317 {
2318 error ("bad amount of stack space removal: %d", stack_bytes);
2319 return NULL;
2320 }
2321
2322 /* Now compute the bit mask of registers to push. */
2323 mask = 0;
2324 for (i = 2; i < count; i++)
2325 {
2326 rtx vector_element = XVECEXP (op, 0, i);
2327
2328 gcc_assert (GET_CODE (vector_element) == SET);
2329 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2330 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2331 SImode));
2332
2333 mask |= 1 << REGNO (SET_DEST (vector_element));
2334 }
2335
2336 /* Scan for the first register to pop. */
2337 for (first = 0; first < 32; first++)
2338 {
2339 if (mask & (1 << first))
2340 break;
2341 }
2342
2343 gcc_assert (first < 32);
2344
2345 /* Discover the last register to pop. */
2346 if (mask & (1 << LINK_POINTER_REGNUM))
2347 {
2348 gcc_assert (stack_bytes == 16);
2349
2350 last = LINK_POINTER_REGNUM;
2351 }
2352 else
2353 {
2354 gcc_assert (!stack_bytes);
2355 gcc_assert (mask & (1 << 29));
2356
2357 last = 29;
2358 }
2359
2360 /* Note, it is possible to have gaps in the register mask.
2361 We ignore this here, and generate a JR anyway. We will
2362 be popping more registers than is strictly necessary, but
2363 it does save code space. */
2364
2365 if (TARGET_LONG_CALLS)
2366 {
2367 char name[40];
2368
2369 if (first == last)
2370 sprintf (name, "__return_%s", reg_names [first]);
2371 else
2372 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2373
2374 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2375 name, name);
2376 }
2377 else
2378 {
2379 if (first == last)
2380 sprintf (buff, "jr __return_%s", reg_names [first]);
2381 else
2382 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2383 }
2384
2385 return buff;
2386 }
2387
2388
2389 /* Construct a JARL instruction to a routine that will perform the equivalent
2390 of the RTL passed as a parameter. This RTL is a function prologue that
2391 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2392 some stack space as well. The code has already verified that the RTL
2393 matches these requirements. */
2394 char *
construct_save_jarl(rtx op)2395 construct_save_jarl (rtx op)
2396 {
2397 int count = XVECLEN (op, 0);
2398 int stack_bytes;
2399 unsigned long int mask;
2400 unsigned long int first;
2401 unsigned long int last;
2402 int i;
2403 static char buff [100]; /* XXX */
2404
2405 if (count <= 2)
2406 {
2407 error ("bogus JARL construction: %d\n", count);
2408 return NULL;
2409 }
2410
2411 /* Paranoia. */
2412 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2413 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2414 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2415 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2416
2417 /* Work out how many bytes to push onto the stack after storing the
2418 registers. */
2419 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2420
2421 /* Each push will put 4 bytes from the stack.... */
2422 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2423
2424 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2425 if (stack_bytes != 0 && stack_bytes != -16)
2426 {
2427 error ("bad amount of stack space removal: %d", stack_bytes);
2428 return NULL;
2429 }
2430
2431 /* Now compute the bit mask of registers to push. */
2432 mask = 0;
2433 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2434 {
2435 rtx vector_element = XVECEXP (op, 0, i);
2436
2437 gcc_assert (GET_CODE (vector_element) == SET);
2438 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2439 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2440 SImode));
2441
2442 mask |= 1 << REGNO (SET_SRC (vector_element));
2443 }
2444
2445 /* Scan for the first register to push. */
2446 for (first = 0; first < 32; first++)
2447 {
2448 if (mask & (1 << first))
2449 break;
2450 }
2451
2452 gcc_assert (first < 32);
2453
2454 /* Discover the last register to push. */
2455 if (mask & (1 << LINK_POINTER_REGNUM))
2456 {
2457 gcc_assert (stack_bytes == -16);
2458
2459 last = LINK_POINTER_REGNUM;
2460 }
2461 else
2462 {
2463 gcc_assert (!stack_bytes);
2464 gcc_assert (mask & (1 << 29));
2465
2466 last = 29;
2467 }
2468
2469 /* Note, it is possible to have gaps in the register mask.
2470 We ignore this here, and generate a JARL anyway. We will
2471 be pushing more registers than is strictly necessary, but
2472 it does save code space. */
2473
2474 if (TARGET_LONG_CALLS)
2475 {
2476 char name[40];
2477
2478 if (first == last)
2479 sprintf (name, "__save_%s", reg_names [first]);
2480 else
2481 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2482
2483 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2484 name, name);
2485 }
2486 else
2487 {
2488 if (first == last)
2489 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2490 else
2491 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2492 reg_names [last]);
2493 }
2494
2495 return buff;
2496 }
2497
2498 extern tree last_assemble_variable_decl;
2499 extern int size_directive_output;
2500
2501 /* A version of asm_output_aligned_bss() that copes with the special
2502 data areas of the v850. */
2503 void
v850_output_aligned_bss(FILE * file,tree decl,const char * name,unsigned HOST_WIDE_INT size,int align)2504 v850_output_aligned_bss (FILE * file,
2505 tree decl,
2506 const char * name,
2507 unsigned HOST_WIDE_INT size,
2508 int align)
2509 {
2510 switch (v850_get_data_area (decl))
2511 {
2512 case DATA_AREA_ZDA:
2513 switch_to_section (zbss_section);
2514 break;
2515
2516 case DATA_AREA_SDA:
2517 switch_to_section (sbss_section);
2518 break;
2519
2520 case DATA_AREA_TDA:
2521 switch_to_section (tdata_section);
2522
2523 default:
2524 switch_to_section (bss_section);
2525 break;
2526 }
2527
2528 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2529 #ifdef ASM_DECLARE_OBJECT_NAME
2530 last_assemble_variable_decl = decl;
2531 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2532 #else
2533 /* Standard thing is just output label for the object. */
2534 ASM_OUTPUT_LABEL (file, name);
2535 #endif /* ASM_DECLARE_OBJECT_NAME */
2536 ASM_OUTPUT_SKIP (file, size ? size : 1);
2537 }
2538
2539 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2540 void
v850_output_common(FILE * file,tree decl,const char * name,int size,int align)2541 v850_output_common (FILE * file,
2542 tree decl,
2543 const char * name,
2544 int size,
2545 int align)
2546 {
2547 if (decl == NULL_TREE)
2548 {
2549 fprintf (file, "%s", COMMON_ASM_OP);
2550 }
2551 else
2552 {
2553 switch (v850_get_data_area (decl))
2554 {
2555 case DATA_AREA_ZDA:
2556 fprintf (file, "%s", ZCOMMON_ASM_OP);
2557 break;
2558
2559 case DATA_AREA_SDA:
2560 fprintf (file, "%s", SCOMMON_ASM_OP);
2561 break;
2562
2563 case DATA_AREA_TDA:
2564 fprintf (file, "%s", TCOMMON_ASM_OP);
2565 break;
2566
2567 default:
2568 fprintf (file, "%s", COMMON_ASM_OP);
2569 break;
2570 }
2571 }
2572
2573 assemble_name (file, name);
2574 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2575 }
2576
2577 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2578 void
v850_output_local(FILE * file,tree decl,const char * name,int size,int align)2579 v850_output_local (FILE * file,
2580 tree decl,
2581 const char * name,
2582 int size,
2583 int align)
2584 {
2585 fprintf (file, "%s", LOCAL_ASM_OP);
2586 assemble_name (file, name);
2587 fprintf (file, "\n");
2588
2589 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2590 }
2591
2592 /* Add data area to the given declaration if a ghs data area pragma is
2593 currently in effect (#pragma ghs startXXX/endXXX). */
2594 static void
v850_insert_attributes(tree decl,tree * attr_ptr ATTRIBUTE_UNUSED)2595 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2596 {
2597 if (data_area_stack
2598 && data_area_stack->data_area
2599 && current_function_decl == NULL_TREE
2600 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2601 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2602 v850_set_data_area (decl, data_area_stack->data_area);
2603
2604 /* Initialize the default names of the v850 specific sections,
2605 if this has not been done before. */
2606
2607 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2608 {
2609 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2610 = build_string (sizeof (".sdata")-1, ".sdata");
2611
2612 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2613 = build_string (sizeof (".rosdata")-1, ".rosdata");
2614
2615 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2616 = build_string (sizeof (".tdata")-1, ".tdata");
2617
2618 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2619 = build_string (sizeof (".zdata")-1, ".zdata");
2620
2621 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2622 = build_string (sizeof (".rozdata")-1, ".rozdata");
2623 }
2624
2625 if (current_function_decl == NULL_TREE
2626 && (TREE_CODE (decl) == VAR_DECL
2627 || TREE_CODE (decl) == CONST_DECL
2628 || TREE_CODE (decl) == FUNCTION_DECL)
2629 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2630 && !DECL_SECTION_NAME (decl))
2631 {
2632 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2633 tree chosen_section;
2634
2635 if (TREE_CODE (decl) == FUNCTION_DECL)
2636 kind = GHS_SECTION_KIND_TEXT;
2637 else
2638 {
2639 /* First choose a section kind based on the data area of the decl. */
2640 switch (v850_get_data_area (decl))
2641 {
2642 default:
2643 gcc_unreachable ();
2644
2645 case DATA_AREA_SDA:
2646 kind = ((TREE_READONLY (decl))
2647 ? GHS_SECTION_KIND_ROSDATA
2648 : GHS_SECTION_KIND_SDATA);
2649 break;
2650
2651 case DATA_AREA_TDA:
2652 kind = GHS_SECTION_KIND_TDATA;
2653 break;
2654
2655 case DATA_AREA_ZDA:
2656 kind = ((TREE_READONLY (decl))
2657 ? GHS_SECTION_KIND_ROZDATA
2658 : GHS_SECTION_KIND_ZDATA);
2659 break;
2660
2661 case DATA_AREA_NORMAL: /* default data area */
2662 if (TREE_READONLY (decl))
2663 kind = GHS_SECTION_KIND_RODATA;
2664 else if (DECL_INITIAL (decl))
2665 kind = GHS_SECTION_KIND_DATA;
2666 else
2667 kind = GHS_SECTION_KIND_BSS;
2668 }
2669 }
2670
2671 /* Now, if the section kind has been explicitly renamed,
2672 then attach a section attribute. */
2673 chosen_section = GHS_current_section_names [(int) kind];
2674
2675 /* Otherwise, if this kind of section needs an explicit section
2676 attribute, then also attach one. */
2677 if (chosen_section == NULL)
2678 chosen_section = GHS_default_section_names [(int) kind];
2679
2680 if (chosen_section)
2681 {
2682 /* Only set the section name if specified by a pragma, because
2683 otherwise it will force those variables to get allocated storage
2684 in this module, rather than by the linker. */
2685 DECL_SECTION_NAME (decl) = chosen_section;
2686 }
2687 }
2688 }
2689
2690 /* Construct a DISPOSE instruction that is the equivalent of
2691 the given RTX. We have already verified that this should
2692 be possible. */
2693
2694 char *
construct_dispose_instruction(rtx op)2695 construct_dispose_instruction (rtx op)
2696 {
2697 int count = XVECLEN (op, 0);
2698 int stack_bytes;
2699 unsigned long int mask;
2700 int i;
2701 static char buff[ 100 ]; /* XXX */
2702 int use_callt = 0;
2703
2704 if (count <= 2)
2705 {
2706 error ("bogus DISPOSE construction: %d", count);
2707 return NULL;
2708 }
2709
2710 /* Work out how many bytes to pop off the
2711 stack before retrieving registers. */
2712 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2713 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2714 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2715
2716 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2717
2718 /* Each pop will remove 4 bytes from the stack.... */
2719 stack_bytes -= (count - 2) * 4;
2720
2721 /* Make sure that the amount we are popping
2722 will fit into the DISPOSE instruction. */
2723 if (stack_bytes > 128)
2724 {
2725 error ("too much stack space to dispose of: %d", stack_bytes);
2726 return NULL;
2727 }
2728
2729 /* Now compute the bit mask of registers to push. */
2730 mask = 0;
2731
2732 for (i = 2; i < count; i++)
2733 {
2734 rtx vector_element = XVECEXP (op, 0, i);
2735
2736 gcc_assert (GET_CODE (vector_element) == SET);
2737 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2738 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2739 SImode));
2740
2741 if (REGNO (SET_DEST (vector_element)) == 2)
2742 use_callt = 1;
2743 else
2744 mask |= 1 << REGNO (SET_DEST (vector_element));
2745 }
2746
2747 if (! TARGET_DISABLE_CALLT
2748 && (use_callt || stack_bytes == 0 || stack_bytes == 16))
2749 {
2750 if (use_callt)
2751 {
2752 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2753 return buff;
2754 }
2755 else
2756 {
2757 for (i = 20; i < 32; i++)
2758 if (mask & (1 << i))
2759 break;
2760
2761 if (i == 31)
2762 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2763 else
2764 sprintf (buff, "callt ctoff(__callt_return_r%d_r%d%s)",
2765 i, (mask & (1 << 31)) ? 31 : 29, stack_bytes ? "c" : "");
2766 }
2767 }
2768 else
2769 {
2770 static char regs [100]; /* XXX */
2771 int done_one;
2772
2773 /* Generate the DISPOSE instruction. Note we could just issue the
2774 bit mask as a number as the assembler can cope with this, but for
2775 the sake of our readers we turn it into a textual description. */
2776 regs[0] = 0;
2777 done_one = 0;
2778
2779 for (i = 20; i < 32; i++)
2780 {
2781 if (mask & (1 << i))
2782 {
2783 int first;
2784
2785 if (done_one)
2786 strcat (regs, ", ");
2787 else
2788 done_one = 1;
2789
2790 first = i;
2791 strcat (regs, reg_names[ first ]);
2792
2793 for (i++; i < 32; i++)
2794 if ((mask & (1 << i)) == 0)
2795 break;
2796
2797 if (i > first + 1)
2798 {
2799 strcat (regs, " - ");
2800 strcat (regs, reg_names[ i - 1 ] );
2801 }
2802 }
2803 }
2804
2805 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2806 }
2807
2808 return buff;
2809 }
2810
2811 /* Construct a PREPARE instruction that is the equivalent of
2812 the given RTL. We have already verified that this should
2813 be possible. */
2814
2815 char *
construct_prepare_instruction(rtx op)2816 construct_prepare_instruction (rtx op)
2817 {
2818 int count = XVECLEN (op, 0);
2819 int stack_bytes;
2820 unsigned long int mask;
2821 int i;
2822 static char buff[ 100 ]; /* XXX */
2823 int use_callt = 0;
2824
2825 if (count <= 1)
2826 {
2827 error ("bogus PREPEARE construction: %d", count);
2828 return NULL;
2829 }
2830
2831 /* Work out how many bytes to push onto
2832 the stack after storing the registers. */
2833 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2834 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2835 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2836
2837 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2838
2839 /* Each push will put 4 bytes from the stack. */
2840 stack_bytes += (count - 1) * 4;
2841
2842 /* Make sure that the amount we are popping
2843 will fit into the DISPOSE instruction. */
2844 if (stack_bytes < -128)
2845 {
2846 error ("too much stack space to prepare: %d", stack_bytes);
2847 return NULL;
2848 }
2849
2850 /* Now compute the bit mask of registers to push. */
2851 mask = 0;
2852 for (i = 1; i < count; i++)
2853 {
2854 rtx vector_element = XVECEXP (op, 0, i);
2855
2856 gcc_assert (GET_CODE (vector_element) == SET);
2857 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2858 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2859 SImode));
2860
2861 if (REGNO (SET_SRC (vector_element)) == 2)
2862 use_callt = 1;
2863 else
2864 mask |= 1 << REGNO (SET_SRC (vector_element));
2865 }
2866
2867 if ((! TARGET_DISABLE_CALLT)
2868 && (use_callt || stack_bytes == 0 || stack_bytes == -16))
2869 {
2870 if (use_callt)
2871 {
2872 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2873 return buff;
2874 }
2875
2876 for (i = 20; i < 32; i++)
2877 if (mask & (1 << i))
2878 break;
2879
2880 if (i == 31)
2881 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2882 else
2883 sprintf (buff, "callt ctoff(__callt_save_r%d_r%d%s)",
2884 i, (mask & (1 << 31)) ? 31 : 29, stack_bytes ? "c" : "");
2885 }
2886 else
2887 {
2888 static char regs [100]; /* XXX */
2889 int done_one;
2890
2891
2892 /* Generate the PREPARE instruction. Note we could just issue the
2893 bit mask as a number as the assembler can cope with this, but for
2894 the sake of our readers we turn it into a textual description. */
2895 regs[0] = 0;
2896 done_one = 0;
2897
2898 for (i = 20; i < 32; i++)
2899 {
2900 if (mask & (1 << i))
2901 {
2902 int first;
2903
2904 if (done_one)
2905 strcat (regs, ", ");
2906 else
2907 done_one = 1;
2908
2909 first = i;
2910 strcat (regs, reg_names[ first ]);
2911
2912 for (i++; i < 32; i++)
2913 if ((mask & (1 << i)) == 0)
2914 break;
2915
2916 if (i > first + 1)
2917 {
2918 strcat (regs, " - ");
2919 strcat (regs, reg_names[ i - 1 ] );
2920 }
2921 }
2922 }
2923
2924 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2925 }
2926
2927 return buff;
2928 }
2929
2930 /* Return an RTX indicating where the return address to the
2931 calling function can be found. */
2932
2933 rtx
v850_return_addr(int count)2934 v850_return_addr (int count)
2935 {
2936 if (count != 0)
2937 return const0_rtx;
2938
2939 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2940 }
2941
2942 /* Implement TARGET_ASM_INIT_SECTIONS. */
2943
2944 static void
v850_asm_init_sections(void)2945 v850_asm_init_sections (void)
2946 {
2947 rosdata_section
2948 = get_unnamed_section (0, output_section_asm_op,
2949 "\t.section .rosdata,\"a\"");
2950
2951 rozdata_section
2952 = get_unnamed_section (0, output_section_asm_op,
2953 "\t.section .rozdata,\"a\"");
2954
2955 tdata_section
2956 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2957 "\t.section .tdata,\"aw\"");
2958
2959 zdata_section
2960 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2961 "\t.section .zdata,\"aw\"");
2962
2963 zbss_section
2964 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2965 output_section_asm_op,
2966 "\t.section .zbss,\"aw\"");
2967 }
2968
2969 static section *
v850_select_section(tree exp,int reloc ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)2970 v850_select_section (tree exp,
2971 int reloc ATTRIBUTE_UNUSED,
2972 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2973 {
2974 if (TREE_CODE (exp) == VAR_DECL)
2975 {
2976 int is_const;
2977 if (!TREE_READONLY (exp)
2978 || TREE_SIDE_EFFECTS (exp)
2979 || !DECL_INITIAL (exp)
2980 || (DECL_INITIAL (exp) != error_mark_node
2981 && !TREE_CONSTANT (DECL_INITIAL (exp))))
2982 is_const = FALSE;
2983 else
2984 is_const = TRUE;
2985
2986 switch (v850_get_data_area (exp))
2987 {
2988 case DATA_AREA_ZDA:
2989 return is_const ? rozdata_section : zdata_section;
2990
2991 case DATA_AREA_TDA:
2992 return tdata_section;
2993
2994 case DATA_AREA_SDA:
2995 return is_const ? rosdata_section : sdata_section;
2996
2997 default:
2998 return is_const ? readonly_data_section : data_section;
2999 }
3000 }
3001 return readonly_data_section;
3002 }
3003
3004 /* Worker function for TARGET_RETURN_IN_MEMORY. */
3005
3006 static bool
v850_return_in_memory(tree type,tree fntype ATTRIBUTE_UNUSED)3007 v850_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3008 {
3009 /* Return values > 8 bytes in length in memory. */
3010 return int_size_in_bytes (type) > 8 || TYPE_MODE (type) == BLKmode;
3011 }
3012
3013 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
3014
3015 static void
v850_setup_incoming_varargs(CUMULATIVE_ARGS * ca,enum machine_mode mode ATTRIBUTE_UNUSED,tree type ATTRIBUTE_UNUSED,int * pretend_arg_size ATTRIBUTE_UNUSED,int second_time ATTRIBUTE_UNUSED)3016 v850_setup_incoming_varargs (CUMULATIVE_ARGS *ca,
3017 enum machine_mode mode ATTRIBUTE_UNUSED,
3018 tree type ATTRIBUTE_UNUSED,
3019 int *pretend_arg_size ATTRIBUTE_UNUSED,
3020 int second_time ATTRIBUTE_UNUSED)
3021 {
3022 ca->anonymous_args = (!TARGET_GHS ? 1 : 0);
3023 }
3024
3025 #include "gt-v850.h"
3026