1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "df.h"
30 #include "tm_p.h"
31 #include "stringpool.h"
32 #include "optabs.h"
33 #include "emit-rtl.h"
34 #include "recog.h"
35 #include "diagnostic-core.h"
36 #include "fold-const.h"
37 #include "varasm.h"
38 #include "calls.h"
39 #include "stor-layout.h"
40 #include "output.h"
41 #include "insn-attr.h"
42 #include "explow.h"
43 #include "expr.h"
44 #include "reload.h"
45 #include "langhooks.h"
46 #include "cfgrtl.h"
47 #include "gimplify.h"
48 #include "opts.h"
49 #include "dumpfile.h"
50 #include "builtins.h"
51 #include "rtl-iter.h"
52
53 /* This file should be included last. */
54 #include "target-def.h"
55
56 /* Structure of this file:
57
58 + Command Line Option Support
59 + Pattern support - constraints, predicates, expanders
60 + Reload Support
61 + Costs
62 + Functions to save and restore machine-specific function data.
63 + Frame/Epilog/Prolog Related
64 + Operand Printing
65 + Function args in registers
66 + Handle pipeline hazards
67 + Handle attributes
68 + Trampolines
69 + Machine-dependent Reorg
70 + Builtins. */
71
72 /* Symbol encodings:
73
74 Symbols are encoded as @ <char> . <name> where <char> is one of these:
75
76 b - based
77 t - tiny
78 n - near
79 f - far
80 i - io, near
81 I - io, far
82 c - cb (control bus) */
83
84 struct GTY(()) machine_function
85 {
86 int mep_frame_pointer_needed;
87
88 /* For varargs. */
89 int arg_regs_to_save;
90 int regsave_filler;
91 int frame_filler;
92 int frame_locked;
93
94 /* Records __builtin_return address. */
95 rtx eh_stack_adjust;
96
97 int reg_save_size;
98 int reg_save_slot[FIRST_PSEUDO_REGISTER];
99 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
100
101 /* 2 if the current function has an interrupt attribute, 1 if not, 0
102 if unknown. This is here because resource.c uses EPILOGUE_USES
103 which needs it. */
104 int interrupt_handler;
105
106 /* Likewise, for disinterrupt attribute. */
107 int disable_interrupts;
108
109 /* Number of doloop tags used so far. */
110 int doloop_tags;
111
112 /* True if the last tag was allocated to a doloop_end. */
113 bool doloop_tag_from_end;
114
115 /* True if reload changes $TP. */
116 bool reload_changes_tp;
117
118 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
119 We only set this if the function is an interrupt handler. */
120 int asms_without_operands;
121 };
122
123 #define MEP_CONTROL_REG(x) \
124 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
125
126 static GTY(()) section * based_section;
127 static GTY(()) section * tinybss_section;
128 static GTY(()) section * far_section;
129 static GTY(()) section * farbss_section;
130 static GTY(()) section * frodata_section;
131 static GTY(()) section * srodata_section;
132
133 static GTY(()) section * vtext_section;
134 static GTY(()) section * vftext_section;
135 static GTY(()) section * ftext_section;
136
137 static void mep_set_leaf_registers (int);
138 static bool symbol_p (rtx);
139 static bool symbolref_p (rtx);
140 static void encode_pattern_1 (rtx);
141 static void encode_pattern (rtx);
142 static bool const_in_range (rtx, int, int);
143 static void mep_rewrite_mult (rtx_insn *, rtx);
144 static void mep_rewrite_mulsi3 (rtx_insn *, rtx, rtx, rtx);
145 static void mep_rewrite_maddsi3 (rtx_insn *, rtx, rtx, rtx, rtx);
146 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx_insn *, bool);
147 static bool move_needs_splitting (rtx, rtx, machine_mode);
148 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
149 static bool mep_nongeneral_reg (rtx);
150 static bool mep_general_copro_reg (rtx);
151 static bool mep_nonregister (rtx);
152 static struct machine_function* mep_init_machine_status (void);
153 static rtx mep_tp_rtx (void);
154 static rtx mep_gp_rtx (void);
155 static bool mep_interrupt_p (void);
156 static bool mep_disinterrupt_p (void);
157 static bool mep_reg_set_p (rtx, rtx);
158 static bool mep_reg_set_in_function (int);
159 static bool mep_interrupt_saved_reg (int);
160 static bool mep_call_saves_register (int);
161 static rtx_insn *F (rtx_insn *);
162 static void add_constant (int, int, int, int);
163 static rtx_insn *maybe_dead_move (rtx, rtx, bool);
164 static void mep_reload_pointer (int, const char *);
165 static void mep_start_function (FILE *, HOST_WIDE_INT);
166 static bool mep_function_ok_for_sibcall (tree, tree);
167 static int unique_bit_in (HOST_WIDE_INT);
168 static int bit_size_for_clip (HOST_WIDE_INT);
169 static int bytesize (const_tree, machine_mode);
170 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
171 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
172 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
173 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
174 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
175 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
176 static bool mep_function_attribute_inlinable_p (const_tree);
177 static bool mep_can_inline_p (tree, tree);
178 static bool mep_lookup_pragma_disinterrupt (const char *);
179 static int mep_multiple_address_regions (tree, bool);
180 static int mep_attrlist_to_encoding (tree, tree);
181 static void mep_insert_attributes (tree, tree *);
182 static void mep_encode_section_info (tree, rtx, int);
183 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
184 static void mep_unique_section (tree, int);
185 static unsigned int mep_section_type_flags (tree, const char *, int);
186 static void mep_asm_named_section (const char *, unsigned int, tree);
187 static bool mep_mentioned_p (rtx, rtx, int);
188 static void mep_reorg_regmove (rtx_insn *);
189 static rtx_insn *mep_insert_repeat_label_last (rtx_insn *, rtx_code_label *,
190 bool, bool);
191 static void mep_reorg_repeat (rtx_insn *);
192 static bool mep_invertable_branch_p (rtx_insn *);
193 static void mep_invert_branch (rtx_insn *, rtx_insn *);
194 static void mep_reorg_erepeat (rtx_insn *);
195 static void mep_jmp_return_reorg (rtx_insn *);
196 static void mep_reorg_addcombine (rtx_insn *);
197 static void mep_reorg (void);
198 static void mep_init_intrinsics (void);
199 static void mep_init_builtins (void);
200 static void mep_intrinsic_unavailable (int);
201 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
202 static bool mep_get_move_insn (int, const struct cgen_insn **);
203 static rtx mep_convert_arg (machine_mode, rtx);
204 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
205 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
206 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
207 static rtx mep_expand_builtin (tree, rtx, rtx, machine_mode, int);
208 static int mep_adjust_cost (rtx_insn *, rtx, rtx_insn *, int);
209 static int mep_issue_rate (void);
210 static rtx_insn *mep_find_ready_insn (rtx_insn **, int, enum attr_slot, int);
211 static void mep_move_ready_insn (rtx_insn **, int, rtx_insn *);
212 static int mep_sched_reorder (FILE *, int, rtx_insn **, int *, int);
213 static rtx_insn *mep_make_bundle (rtx, rtx_insn *);
214 static void mep_bundle_insns (rtx_insn *);
215 static bool mep_rtx_cost (rtx, machine_mode, int, int, int *, bool);
216 static int mep_address_cost (rtx, machine_mode, addr_space_t, bool);
217 static void mep_setup_incoming_varargs (cumulative_args_t, machine_mode,
218 tree, int *, int);
219 static bool mep_pass_by_reference (cumulative_args_t cum, machine_mode,
220 const_tree, bool);
221 static rtx mep_function_arg (cumulative_args_t, machine_mode,
222 const_tree, bool);
223 static void mep_function_arg_advance (cumulative_args_t, machine_mode,
224 const_tree, bool);
225 static bool mep_vector_mode_supported_p (machine_mode);
226 static rtx mep_allocate_initial_value (rtx);
227 static void mep_asm_init_sections (void);
228 static int mep_comp_type_attributes (const_tree, const_tree);
229 static bool mep_narrow_volatile_bitfield (void);
230 static rtx mep_expand_builtin_saveregs (void);
231 static tree mep_build_builtin_va_list (void);
232 static void mep_expand_va_start (tree, rtx);
233 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
234 static bool mep_can_eliminate (const int, const int);
235 static void mep_conditional_register_usage (void);
236 static void mep_trampoline_init (rtx, tree, rtx);
237
238 #define WANT_GCC_DEFINITIONS
239 #include "mep-intrin.h"
240 #undef WANT_GCC_DEFINITIONS
241
242
243 /* Command Line Option Support. */
244
245 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
246
247 /* True if we can use cmov instructions to move values back and forth
248 between core and coprocessor registers. */
249 bool mep_have_core_copro_moves_p;
250
251 /* True if we can use cmov instructions (or a work-alike) to move
252 values between coprocessor registers. */
253 bool mep_have_copro_copro_moves_p;
254
255 /* A table of all coprocessor instructions that can act like
256 a coprocessor-to-coprocessor cmov. */
257 static const int mep_cmov_insns[] = {
258 mep_cmov,
259 mep_cpmov,
260 mep_fmovs,
261 mep_caddi3,
262 mep_csubi3,
263 mep_candi3,
264 mep_cori3,
265 mep_cxori3,
266 mep_cand3,
267 mep_cor3
268 };
269
270
271 static void
mep_set_leaf_registers(int enable)272 mep_set_leaf_registers (int enable)
273 {
274 int i;
275
276 if (mep_leaf_registers[0] != enable)
277 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
278 mep_leaf_registers[i] = enable;
279 }
280
281 static void
mep_conditional_register_usage(void)282 mep_conditional_register_usage (void)
283 {
284 int i;
285
286 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
287 {
288 fixed_regs[HI_REGNO] = 1;
289 fixed_regs[LO_REGNO] = 1;
290 call_used_regs[HI_REGNO] = 1;
291 call_used_regs[LO_REGNO] = 1;
292 }
293
294 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
295 global_regs[i] = 1;
296 }
297
298 static void
mep_option_override(void)299 mep_option_override (void)
300 {
301 unsigned int i;
302 int j;
303 cl_deferred_option *opt;
304 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) mep_deferred_options;
305
306 if (v)
307 FOR_EACH_VEC_ELT (*v, i, opt)
308 {
309 switch (opt->opt_index)
310 {
311 case OPT_mivc2:
312 for (j = 0; j < 32; j++)
313 fixed_regs[j + 48] = 0;
314 for (j = 0; j < 32; j++)
315 call_used_regs[j + 48] = 1;
316 for (j = 6; j < 8; j++)
317 call_used_regs[j + 48] = 0;
318
319 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
320 RN (0, "$csar0");
321 RN (1, "$cc");
322 RN (4, "$cofr0");
323 RN (5, "$cofr1");
324 RN (6, "$cofa0");
325 RN (7, "$cofa1");
326 RN (15, "$csar1");
327
328 RN (16, "$acc0_0");
329 RN (17, "$acc0_1");
330 RN (18, "$acc0_2");
331 RN (19, "$acc0_3");
332 RN (20, "$acc0_4");
333 RN (21, "$acc0_5");
334 RN (22, "$acc0_6");
335 RN (23, "$acc0_7");
336
337 RN (24, "$acc1_0");
338 RN (25, "$acc1_1");
339 RN (26, "$acc1_2");
340 RN (27, "$acc1_3");
341 RN (28, "$acc1_4");
342 RN (29, "$acc1_5");
343 RN (30, "$acc1_6");
344 RN (31, "$acc1_7");
345 #undef RN
346 break;
347
348 default:
349 gcc_unreachable ();
350 }
351 }
352
353 if (flag_pic == 1)
354 warning (OPT_fpic, "-fpic is not supported");
355 if (flag_pic == 2)
356 warning (OPT_fPIC, "-fPIC is not supported");
357 if (TARGET_S && TARGET_M)
358 error ("only one of -ms and -mm may be given");
359 if (TARGET_S && TARGET_L)
360 error ("only one of -ms and -ml may be given");
361 if (TARGET_M && TARGET_L)
362 error ("only one of -mm and -ml may be given");
363 if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
364 error ("only one of -ms and -mtiny= may be given");
365 if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
366 error ("only one of -mm and -mtiny= may be given");
367 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
368 warning (0, "-mclip currently has no effect without -mminmax");
369
370 if (mep_const_section)
371 {
372 if (strcmp (mep_const_section, "tiny") != 0
373 && strcmp (mep_const_section, "near") != 0
374 && strcmp (mep_const_section, "far") != 0)
375 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
376 }
377
378 if (TARGET_S)
379 mep_tiny_cutoff = 65536;
380 if (TARGET_M)
381 mep_tiny_cutoff = 0;
382 if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
383 mep_tiny_cutoff = 0;
384
385 if (TARGET_64BIT_CR_REGS)
386 flag_split_wide_types = 0;
387
388 init_machine_status = mep_init_machine_status;
389 mep_init_intrinsics ();
390 }
391
392 /* Pattern Support - constraints, predicates, expanders. */
393
394 /* MEP has very few instructions that can refer to the span of
395 addresses used by symbols, so it's common to check for them. */
396
397 static bool
symbol_p(rtx x)398 symbol_p (rtx x)
399 {
400 int c = GET_CODE (x);
401
402 return (c == CONST_INT
403 || c == CONST
404 || c == SYMBOL_REF);
405 }
406
407 static bool
symbolref_p(rtx x)408 symbolref_p (rtx x)
409 {
410 int c;
411
412 if (GET_CODE (x) != MEM)
413 return false;
414
415 c = GET_CODE (XEXP (x, 0));
416 return (c == CONST_INT
417 || c == CONST
418 || c == SYMBOL_REF);
419 }
420
421 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
422
423 #define GEN_REG(R, STRICT) \
424 (GR_REGNO_P (R) \
425 || (!STRICT \
426 && ((R) == ARG_POINTER_REGNUM \
427 || (R) >= FIRST_PSEUDO_REGISTER)))
428
429 static char pattern[12], *patternp;
430 static GTY(()) rtx patternr[12];
431 #define RTX_IS(x) (strcmp (pattern, x) == 0)
432
433 static void
encode_pattern_1(rtx x)434 encode_pattern_1 (rtx x)
435 {
436 int i;
437
438 if (patternp == pattern + sizeof (pattern) - 2)
439 {
440 patternp[-1] = '?';
441 return;
442 }
443
444 patternr[patternp-pattern] = x;
445
446 switch (GET_CODE (x))
447 {
448 case REG:
449 *patternp++ = 'r';
450 break;
451 case MEM:
452 *patternp++ = 'm';
453 case CONST:
454 encode_pattern_1 (XEXP(x, 0));
455 break;
456 case PLUS:
457 *patternp++ = '+';
458 encode_pattern_1 (XEXP(x, 0));
459 encode_pattern_1 (XEXP(x, 1));
460 break;
461 case LO_SUM:
462 *patternp++ = 'L';
463 encode_pattern_1 (XEXP(x, 0));
464 encode_pattern_1 (XEXP(x, 1));
465 break;
466 case HIGH:
467 *patternp++ = 'H';
468 encode_pattern_1 (XEXP(x, 0));
469 break;
470 case SYMBOL_REF:
471 *patternp++ = 's';
472 break;
473 case LABEL_REF:
474 *patternp++ = 'l';
475 break;
476 case CONST_INT:
477 case CONST_DOUBLE:
478 *patternp++ = 'i';
479 break;
480 case UNSPEC:
481 *patternp++ = 'u';
482 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
483 for (i=0; i<XVECLEN (x, 0); i++)
484 encode_pattern_1 (XVECEXP (x, 0, i));
485 break;
486 case USE:
487 *patternp++ = 'U';
488 break;
489 default:
490 *patternp++ = '?';
491 #if 0
492 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
493 debug_rtx (x);
494 gcc_unreachable ();
495 #endif
496 break;
497 }
498 }
499
500 static void
encode_pattern(rtx x)501 encode_pattern (rtx x)
502 {
503 patternp = pattern;
504 encode_pattern_1 (x);
505 *patternp = 0;
506 }
507
508 int
mep_section_tag(rtx x)509 mep_section_tag (rtx x)
510 {
511 const char *name;
512
513 while (1)
514 {
515 switch (GET_CODE (x))
516 {
517 case MEM:
518 case CONST:
519 x = XEXP (x, 0);
520 break;
521 case UNSPEC:
522 x = XVECEXP (x, 0, 0);
523 break;
524 case PLUS:
525 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
526 return 0;
527 x = XEXP (x, 0);
528 break;
529 default:
530 goto done;
531 }
532 }
533 done:
534 if (GET_CODE (x) != SYMBOL_REF)
535 return 0;
536 name = XSTR (x, 0);
537 if (name[0] == '@' && name[2] == '.')
538 {
539 if (name[1] == 'i' || name[1] == 'I')
540 {
541 if (name[1] == 'I')
542 return 'f'; /* near */
543 return 'n'; /* far */
544 }
545 return name[1];
546 }
547 return 0;
548 }
549
550 int
mep_regno_reg_class(int regno)551 mep_regno_reg_class (int regno)
552 {
553 switch (regno)
554 {
555 case SP_REGNO: return SP_REGS;
556 case TP_REGNO: return TP_REGS;
557 case GP_REGNO: return GP_REGS;
558 case 0: return R0_REGS;
559 case HI_REGNO: return HI_REGS;
560 case LO_REGNO: return LO_REGS;
561 case ARG_POINTER_REGNUM: return GENERAL_REGS;
562 }
563
564 if (GR_REGNO_P (regno))
565 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
566 if (CONTROL_REGNO_P (regno))
567 return CONTROL_REGS;
568
569 if (CR_REGNO_P (regno))
570 {
571 int i, j;
572
573 /* Search for the register amongst user-defined subclasses of
574 the coprocessor registers. */
575 for (i = USER0_REGS; i <= USER3_REGS; ++i)
576 {
577 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
578 continue;
579 for (j = 0; j < N_REG_CLASSES; ++j)
580 {
581 enum reg_class sub = reg_class_subclasses[i][j];
582
583 if (sub == LIM_REG_CLASSES)
584 return i;
585 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
586 break;
587 }
588 }
589
590 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
591 }
592
593 if (CCR_REGNO_P (regno))
594 return CCR_REGS;
595
596 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
597 return NO_REGS;
598 }
599
600 static bool
const_in_range(rtx x,int minv,int maxv)601 const_in_range (rtx x, int minv, int maxv)
602 {
603 return (GET_CODE (x) == CONST_INT
604 && INTVAL (x) >= minv
605 && INTVAL (x) <= maxv);
606 }
607
608 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
609 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
610 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
611 at the end of the insn stream. */
612
613 rtx
mep_mulr_source(rtx_insn * insn,rtx dest,rtx src1,rtx src2)614 mep_mulr_source (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
615 {
616 if (rtx_equal_p (dest, src1))
617 return src2;
618 else if (rtx_equal_p (dest, src2))
619 return src1;
620 else
621 {
622 if (insn == 0)
623 emit_insn (gen_movsi (copy_rtx (dest), src1));
624 else
625 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
626 return src2;
627 }
628 }
629
630 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
631 Change the last element of PATTERN from (clobber (scratch:SI))
632 to (clobber (reg:SI HI_REGNO)). */
633
634 static void
mep_rewrite_mult(rtx_insn * insn,rtx pattern)635 mep_rewrite_mult (rtx_insn *insn, rtx pattern)
636 {
637 rtx hi_clobber;
638
639 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
640 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
641 PATTERN (insn) = pattern;
642 INSN_CODE (insn) = -1;
643 }
644
645 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
646 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
647 store the result in DEST if nonnull. */
648
649 static void
mep_rewrite_mulsi3(rtx_insn * insn,rtx dest,rtx src1,rtx src2)650 mep_rewrite_mulsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2)
651 {
652 rtx lo, pattern;
653
654 lo = gen_rtx_REG (SImode, LO_REGNO);
655 if (dest)
656 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
657 mep_mulr_source (insn, dest, src1, src2));
658 else
659 pattern = gen_mulsi3_lo (lo, src1, src2);
660 mep_rewrite_mult (insn, pattern);
661 }
662
663 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
664 SRC3 into $lo, then use either madd or maddr. The move into $lo will
665 be deleted by a peephole2 if SRC3 is already in $lo. */
666
667 static void
mep_rewrite_maddsi3(rtx_insn * insn,rtx dest,rtx src1,rtx src2,rtx src3)668 mep_rewrite_maddsi3 (rtx_insn *insn, rtx dest, rtx src1, rtx src2, rtx src3)
669 {
670 rtx lo, pattern;
671
672 lo = gen_rtx_REG (SImode, LO_REGNO);
673 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
674 if (dest)
675 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
676 mep_mulr_source (insn, dest, src1, src2),
677 copy_rtx (lo));
678 else
679 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
680 mep_rewrite_mult (insn, pattern);
681 }
682
683 /* Return true if $lo has the same value as integer register GPR when
684 instruction INSN is reached. If necessary, rewrite the instruction
685 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
686 rtx for (reg:SI LO_REGNO).
687
688 This function is intended to be used by the peephole2 pass. Since
689 that pass goes from the end of a basic block to the beginning, and
690 propagates liveness information on the way, there is no need to
691 update register notes here.
692
693 If GPR_DEAD_P is true on entry, and this function returns true,
694 then the caller will replace _every_ use of GPR in and after INSN
695 with LO. This means that if the instruction that sets $lo is a
696 mulr- or maddr-type instruction, we can rewrite it to use mul or
697 madd instead. In combination with the copy progagation pass,
698 this allows us to replace sequences like:
699
700 mov GPR,R1
701 mulr GPR,R2
702
703 with:
704
705 mul R1,R2
706
707 if GPR is no longer used. */
708
709 static bool
mep_reuse_lo_p_1(rtx lo,rtx gpr,rtx_insn * insn,bool gpr_dead_p)710 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
711 {
712 do
713 {
714 insn = PREV_INSN (insn);
715 if (INSN_P (insn))
716 switch (recog_memoized (insn))
717 {
718 case CODE_FOR_mulsi3_1:
719 extract_insn (insn);
720 if (rtx_equal_p (recog_data.operand[0], gpr))
721 {
722 mep_rewrite_mulsi3 (insn,
723 gpr_dead_p ? NULL : recog_data.operand[0],
724 recog_data.operand[1],
725 recog_data.operand[2]);
726 return true;
727 }
728 return false;
729
730 case CODE_FOR_maddsi3:
731 extract_insn (insn);
732 if (rtx_equal_p (recog_data.operand[0], gpr))
733 {
734 mep_rewrite_maddsi3 (insn,
735 gpr_dead_p ? NULL : recog_data.operand[0],
736 recog_data.operand[1],
737 recog_data.operand[2],
738 recog_data.operand[3]);
739 return true;
740 }
741 return false;
742
743 case CODE_FOR_mulsi3r:
744 case CODE_FOR_maddsi3r:
745 extract_insn (insn);
746 return rtx_equal_p (recog_data.operand[1], gpr);
747
748 default:
749 if (reg_set_p (lo, insn)
750 || reg_set_p (gpr, insn)
751 || volatile_insn_p (PATTERN (insn)))
752 return false;
753
754 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
755 gpr_dead_p = false;
756 break;
757 }
758 }
759 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
760 return false;
761 }
762
763 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
764
765 bool
mep_reuse_lo_p(rtx lo,rtx gpr,rtx_insn * insn,bool gpr_dead_p)766 mep_reuse_lo_p (rtx lo, rtx gpr, rtx_insn *insn, bool gpr_dead_p)
767 {
768 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
769 extract_insn (insn);
770 return result;
771 }
772
773 /* Return true if SET can be turned into a post-modify load or store
774 that adds OFFSET to GPR. In other words, return true if SET can be
775 changed into:
776
777 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
778
779 It's OK to change SET to an equivalent operation in order to
780 make it match. */
781
782 static bool
mep_use_post_modify_for_set_p(rtx set,rtx gpr,rtx offset)783 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
784 {
785 rtx *reg, *mem;
786 unsigned int reg_bytes, mem_bytes;
787 machine_mode reg_mode, mem_mode;
788
789 /* Only simple SETs can be converted. */
790 if (GET_CODE (set) != SET)
791 return false;
792
793 /* Point REG to what we hope will be the register side of the set and
794 MEM to what we hope will be the memory side. */
795 if (GET_CODE (SET_DEST (set)) == MEM)
796 {
797 mem = &SET_DEST (set);
798 reg = &SET_SRC (set);
799 }
800 else
801 {
802 reg = &SET_DEST (set);
803 mem = &SET_SRC (set);
804 if (GET_CODE (*mem) == SIGN_EXTEND)
805 mem = &XEXP (*mem, 0);
806 }
807
808 /* Check that *REG is a suitable coprocessor register. */
809 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
810 return false;
811
812 /* Check that *MEM is a suitable memory reference. */
813 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
814 return false;
815
816 /* Get the number of bytes in each operand. */
817 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
818 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
819
820 /* Check that OFFSET is suitably aligned. */
821 if (INTVAL (offset) & (mem_bytes - 1))
822 return false;
823
824 /* Convert *MEM to a normal integer mode. */
825 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
826 *mem = change_address (*mem, mem_mode, NULL);
827
828 /* Adjust *REG as well. */
829 *reg = shallow_copy_rtx (*reg);
830 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
831 {
832 /* SET is a subword load. Convert it to an explicit extension. */
833 PUT_MODE (*reg, SImode);
834 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
835 }
836 else
837 {
838 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
839 PUT_MODE (*reg, reg_mode);
840 }
841 return true;
842 }
843
844 /* Return the effect of frame-related instruction INSN. */
845
846 static rtx
mep_frame_expr(rtx_insn * insn)847 mep_frame_expr (rtx_insn *insn)
848 {
849 rtx note, expr;
850
851 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
852 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
853 RTX_FRAME_RELATED_P (expr) = 1;
854 return expr;
855 }
856
857 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
858 new pattern in INSN1; INSN2 will be deleted by the caller. */
859
860 static void
mep_make_parallel(rtx_insn * insn1,rtx_insn * insn2)861 mep_make_parallel (rtx_insn *insn1, rtx_insn *insn2)
862 {
863 rtx expr;
864
865 if (RTX_FRAME_RELATED_P (insn2))
866 {
867 expr = mep_frame_expr (insn2);
868 if (RTX_FRAME_RELATED_P (insn1))
869 expr = gen_rtx_SEQUENCE (VOIDmode,
870 gen_rtvec (2, mep_frame_expr (insn1), expr));
871 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
872 RTX_FRAME_RELATED_P (insn1) = 1;
873 }
874
875 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
876 gen_rtvec (2, PATTERN (insn1),
877 PATTERN (insn2)));
878 INSN_CODE (insn1) = -1;
879 }
880
881 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
882 the basic block to see if any previous load or store instruction can
883 be persuaded to do SET_INSN as a side-effect. Return true if so. */
884
885 static bool
mep_use_post_modify_p_1(rtx_insn * set_insn,rtx reg,rtx offset)886 mep_use_post_modify_p_1 (rtx_insn *set_insn, rtx reg, rtx offset)
887 {
888 rtx_insn *insn;
889
890 insn = set_insn;
891 do
892 {
893 insn = PREV_INSN (insn);
894 if (INSN_P (insn))
895 {
896 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
897 {
898 mep_make_parallel (insn, set_insn);
899 return true;
900 }
901
902 if (reg_set_p (reg, insn)
903 || reg_referenced_p (reg, PATTERN (insn))
904 || volatile_insn_p (PATTERN (insn)))
905 return false;
906 }
907 }
908 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
909 return false;
910 }
911
912 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
913
914 bool
mep_use_post_modify_p(rtx_insn * insn,rtx reg,rtx offset)915 mep_use_post_modify_p (rtx_insn *insn, rtx reg, rtx offset)
916 {
917 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
918 extract_insn (insn);
919 return result;
920 }
921
922 bool
mep_allow_clip(rtx ux,rtx lx,int s)923 mep_allow_clip (rtx ux, rtx lx, int s)
924 {
925 HOST_WIDE_INT u = INTVAL (ux);
926 HOST_WIDE_INT l = INTVAL (lx);
927 int i;
928
929 if (!TARGET_OPT_CLIP)
930 return false;
931
932 if (s)
933 {
934 for (i = 0; i < 30; i ++)
935 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
936 && (l == - ((HOST_WIDE_INT) 1 << i)))
937 return true;
938 }
939 else
940 {
941 if (l != 0)
942 return false;
943
944 for (i = 0; i < 30; i ++)
945 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
946 return true;
947 }
948 return false;
949 }
950
951 bool
mep_bit_position_p(rtx x,bool looking_for)952 mep_bit_position_p (rtx x, bool looking_for)
953 {
954 if (GET_CODE (x) != CONST_INT)
955 return false;
956 switch ((int) INTVAL(x) & 0xff)
957 {
958 case 0x01: case 0x02: case 0x04: case 0x08:
959 case 0x10: case 0x20: case 0x40: case 0x80:
960 return looking_for;
961 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
962 case 0xef: case 0xdf: case 0xbf: case 0x7f:
963 return !looking_for;
964 }
965 return false;
966 }
967
968 static bool
move_needs_splitting(rtx dest,rtx src,machine_mode mode ATTRIBUTE_UNUSED)969 move_needs_splitting (rtx dest, rtx src,
970 machine_mode mode ATTRIBUTE_UNUSED)
971 {
972 int s = mep_section_tag (src);
973
974 while (1)
975 {
976 if (GET_CODE (src) == CONST
977 || GET_CODE (src) == MEM)
978 src = XEXP (src, 0);
979 else if (GET_CODE (src) == SYMBOL_REF
980 || GET_CODE (src) == LABEL_REF
981 || GET_CODE (src) == PLUS)
982 break;
983 else
984 return false;
985 }
986 if (s == 'f'
987 || (GET_CODE (src) == PLUS
988 && GET_CODE (XEXP (src, 1)) == CONST_INT
989 && (INTVAL (XEXP (src, 1)) < -65536
990 || INTVAL (XEXP (src, 1)) > 0xffffff))
991 || (GET_CODE (dest) == REG
992 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
993 return true;
994 return false;
995 }
996
997 bool
mep_split_mov(rtx * operands,int symbolic)998 mep_split_mov (rtx *operands, int symbolic)
999 {
1000 if (symbolic)
1001 {
1002 if (move_needs_splitting (operands[0], operands[1], SImode))
1003 return true;
1004 return false;
1005 }
1006
1007 if (GET_CODE (operands[1]) != CONST_INT)
1008 return false;
1009
1010 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1011 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1012 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1013 return false;
1014
1015 if (((!reload_completed && !reload_in_progress)
1016 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1017 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1018 return false;
1019
1020 return true;
1021 }
1022
1023 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1024 it to one specific value. So the insn chosen depends on whether
1025 the source and destination modes match. */
1026
1027 bool
mep_vliw_mode_match(rtx tgt)1028 mep_vliw_mode_match (rtx tgt)
1029 {
1030 bool src_vliw = mep_vliw_function_p (cfun->decl);
1031 bool tgt_vliw = INTVAL (tgt);
1032
1033 return src_vliw == tgt_vliw;
1034 }
1035
1036 /* Like the above, but also test for near/far mismatches. */
1037
1038 bool
mep_vliw_jmp_match(rtx tgt)1039 mep_vliw_jmp_match (rtx tgt)
1040 {
1041 bool src_vliw = mep_vliw_function_p (cfun->decl);
1042 bool tgt_vliw = INTVAL (tgt);
1043
1044 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1045 return false;
1046
1047 return src_vliw == tgt_vliw;
1048 }
1049
1050 bool
mep_multi_slot(rtx_insn * x)1051 mep_multi_slot (rtx_insn *x)
1052 {
1053 return get_attr_slot (x) == SLOT_MULTI;
1054 }
1055
1056 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1057
1058 static bool
mep_legitimate_constant_p(machine_mode mode ATTRIBUTE_UNUSED,rtx x)1059 mep_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1060 {
1061 /* We can't convert symbol values to gp- or tp-rel values after
1062 reload, as reload might have used $gp or $tp for other
1063 purposes. */
1064 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1065 {
1066 char e = mep_section_tag (x);
1067 return (e != 't' && e != 'b');
1068 }
1069 return 1;
1070 }
1071
1072 /* Be careful not to use macros that need to be compiled one way for
1073 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1074
1075 bool
mep_legitimate_address(machine_mode mode,rtx x,int strict)1076 mep_legitimate_address (machine_mode mode, rtx x, int strict)
1077 {
1078 int the_tag;
1079
1080 #define DEBUG_LEGIT 0
1081 #if DEBUG_LEGIT
1082 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1083 debug_rtx (x);
1084 #endif
1085
1086 if (GET_CODE (x) == LO_SUM
1087 && GET_CODE (XEXP (x, 0)) == REG
1088 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1089 && CONSTANT_P (XEXP (x, 1)))
1090 {
1091 if (GET_MODE_SIZE (mode) > 4)
1092 {
1093 /* We will end up splitting this, and lo_sums are not
1094 offsettable for us. */
1095 #if DEBUG_LEGIT
1096 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1097 #endif
1098 return false;
1099 }
1100 #if DEBUG_LEGIT
1101 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1102 #endif
1103 return true;
1104 }
1105
1106 if (GET_CODE (x) == REG
1107 && GEN_REG (REGNO (x), strict))
1108 {
1109 #if DEBUG_LEGIT
1110 fprintf (stderr, " - yup, [reg]\n");
1111 #endif
1112 return true;
1113 }
1114
1115 if (GET_CODE (x) == PLUS
1116 && GET_CODE (XEXP (x, 0)) == REG
1117 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1118 && const_in_range (XEXP (x, 1), -32768, 32767))
1119 {
1120 #if DEBUG_LEGIT
1121 fprintf (stderr, " - yup, [reg+const]\n");
1122 #endif
1123 return true;
1124 }
1125
1126 if (GET_CODE (x) == PLUS
1127 && GET_CODE (XEXP (x, 0)) == REG
1128 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1129 && GET_CODE (XEXP (x, 1)) == CONST
1130 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1131 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1132 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1133 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1134 {
1135 #if DEBUG_LEGIT
1136 fprintf (stderr, " - yup, [reg+unspec]\n");
1137 #endif
1138 return true;
1139 }
1140
1141 the_tag = mep_section_tag (x);
1142
1143 if (the_tag == 'f')
1144 {
1145 #if DEBUG_LEGIT
1146 fprintf (stderr, " - nope, [far]\n");
1147 #endif
1148 return false;
1149 }
1150
1151 if (mode == VOIDmode
1152 && GET_CODE (x) == SYMBOL_REF)
1153 {
1154 #if DEBUG_LEGIT
1155 fprintf (stderr, " - yup, call [symbol]\n");
1156 #endif
1157 return true;
1158 }
1159
1160 if ((mode == SImode || mode == SFmode)
1161 && CONSTANT_P (x)
1162 && mep_legitimate_constant_p (mode, x)
1163 && the_tag != 't' && the_tag != 'b')
1164 {
1165 if (GET_CODE (x) != CONST_INT
1166 || (INTVAL (x) <= 0xfffff
1167 && INTVAL (x) >= 0
1168 && (INTVAL (x) % 4) == 0))
1169 {
1170 #if DEBUG_LEGIT
1171 fprintf (stderr, " - yup, [const]\n");
1172 #endif
1173 return true;
1174 }
1175 }
1176
1177 #if DEBUG_LEGIT
1178 fprintf (stderr, " - nope.\n");
1179 #endif
1180 return false;
1181 }
1182
1183 int
mep_legitimize_reload_address(rtx * x,machine_mode mode,int opnum,int type_i,int ind_levels ATTRIBUTE_UNUSED)1184 mep_legitimize_reload_address (rtx *x, machine_mode mode, int opnum,
1185 int type_i,
1186 int ind_levels ATTRIBUTE_UNUSED)
1187 {
1188 enum reload_type type = (enum reload_type) type_i;
1189
1190 if (GET_CODE (*x) == PLUS
1191 && GET_CODE (XEXP (*x, 0)) == MEM
1192 && GET_CODE (XEXP (*x, 1)) == REG)
1193 {
1194 /* GCC will by default copy the MEM into a REG, which results in
1195 an invalid address. For us, the best thing to do is move the
1196 whole expression to a REG. */
1197 push_reload (*x, NULL_RTX, x, NULL,
1198 GENERAL_REGS, mode, VOIDmode,
1199 0, 0, opnum, type);
1200 return 1;
1201 }
1202
1203 if (GET_CODE (*x) == PLUS
1204 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1205 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1206 {
1207 char e = mep_section_tag (XEXP (*x, 0));
1208
1209 if (e != 't' && e != 'b')
1210 {
1211 /* GCC thinks that (sym+const) is a valid address. Well,
1212 sometimes it is, this time it isn't. The best thing to
1213 do is reload the symbol to a register, since reg+int
1214 tends to work, and we can't just add the symbol and
1215 constant anyway. */
1216 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1217 GENERAL_REGS, mode, VOIDmode,
1218 0, 0, opnum, type);
1219 return 1;
1220 }
1221 }
1222 return 0;
1223 }
1224
1225 int
mep_core_address_length(rtx_insn * insn,int opn)1226 mep_core_address_length (rtx_insn *insn, int opn)
1227 {
1228 rtx set = single_set (insn);
1229 rtx mem = XEXP (set, opn);
1230 rtx other = XEXP (set, 1-opn);
1231 rtx addr = XEXP (mem, 0);
1232
1233 if (register_operand (addr, Pmode))
1234 return 2;
1235 if (GET_CODE (addr) == PLUS)
1236 {
1237 rtx addend = XEXP (addr, 1);
1238
1239 gcc_assert (REG_P (XEXP (addr, 0)));
1240
1241 switch (REGNO (XEXP (addr, 0)))
1242 {
1243 case STACK_POINTER_REGNUM:
1244 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1245 && mep_imm7a4_operand (addend, VOIDmode))
1246 return 2;
1247 break;
1248
1249 case 13: /* TP */
1250 gcc_assert (REG_P (other));
1251
1252 if (REGNO (other) >= 8)
1253 break;
1254
1255 if (GET_CODE (addend) == CONST
1256 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1257 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1258 return 2;
1259
1260 if (GET_CODE (addend) == CONST_INT
1261 && INTVAL (addend) >= 0
1262 && INTVAL (addend) <= 127
1263 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1264 return 2;
1265 break;
1266 }
1267 }
1268
1269 return 4;
1270 }
1271
1272 int
mep_cop_address_length(rtx_insn * insn,int opn)1273 mep_cop_address_length (rtx_insn *insn, int opn)
1274 {
1275 rtx set = single_set (insn);
1276 rtx mem = XEXP (set, opn);
1277 rtx addr = XEXP (mem, 0);
1278
1279 if (GET_CODE (mem) != MEM)
1280 return 2;
1281 if (register_operand (addr, Pmode))
1282 return 2;
1283 if (GET_CODE (addr) == POST_INC)
1284 return 2;
1285
1286 return 4;
1287 }
1288
1289 #define DEBUG_EXPAND_MOV 0
1290 bool
mep_expand_mov(rtx * operands,machine_mode mode)1291 mep_expand_mov (rtx *operands, machine_mode mode)
1292 {
1293 int i, t;
1294 int tag[2];
1295 rtx tpsym, tpoffs;
1296 int post_reload = 0;
1297
1298 tag[0] = mep_section_tag (operands[0]);
1299 tag[1] = mep_section_tag (operands[1]);
1300
1301 if (!reload_in_progress
1302 && !reload_completed
1303 && GET_CODE (operands[0]) != REG
1304 && GET_CODE (operands[0]) != SUBREG
1305 && GET_CODE (operands[1]) != REG
1306 && GET_CODE (operands[1]) != SUBREG)
1307 operands[1] = copy_to_mode_reg (mode, operands[1]);
1308
1309 #if DEBUG_EXPAND_MOV
1310 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1311 reload_in_progress || reload_completed);
1312 debug_rtx (operands[0]);
1313 debug_rtx (operands[1]);
1314 #endif
1315
1316 if (mode == DImode || mode == DFmode)
1317 return false;
1318
1319 if (reload_in_progress || reload_completed)
1320 {
1321 rtx r;
1322
1323 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1324 cfun->machine->reload_changes_tp = true;
1325
1326 if (tag[0] == 't' || tag[1] == 't')
1327 {
1328 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1329 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1330 post_reload = 1;
1331 }
1332 if (tag[0] == 'b' || tag[1] == 'b')
1333 {
1334 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1335 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1336 post_reload = 1;
1337 }
1338 if (cfun->machine->reload_changes_tp == true)
1339 post_reload = 1;
1340 }
1341
1342 if (!post_reload)
1343 {
1344 rtx n;
1345 if (symbol_p (operands[1]))
1346 {
1347 t = mep_section_tag (operands[1]);
1348 if (t == 'b' || t == 't')
1349 {
1350
1351 if (GET_CODE (operands[1]) == SYMBOL_REF)
1352 {
1353 tpsym = operands[1];
1354 n = gen_rtx_UNSPEC (mode,
1355 gen_rtvec (1, operands[1]),
1356 t == 'b' ? UNS_TPREL : UNS_GPREL);
1357 n = gen_rtx_CONST (mode, n);
1358 }
1359 else if (GET_CODE (operands[1]) == CONST
1360 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1361 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1362 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1363 {
1364 tpsym = XEXP (XEXP (operands[1], 0), 0);
1365 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1366 n = gen_rtx_UNSPEC (mode,
1367 gen_rtvec (1, tpsym),
1368 t == 'b' ? UNS_TPREL : UNS_GPREL);
1369 n = gen_rtx_PLUS (mode, n, tpoffs);
1370 n = gen_rtx_CONST (mode, n);
1371 }
1372 else if (GET_CODE (operands[1]) == CONST
1373 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1374 return false;
1375 else
1376 {
1377 error ("unusual TP-relative address");
1378 return false;
1379 }
1380
1381 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1382 : mep_gp_rtx ()), n);
1383 n = emit_insn (gen_rtx_SET (operands[0], n));
1384 #if DEBUG_EXPAND_MOV
1385 fprintf(stderr, "mep_expand_mov emitting ");
1386 debug_rtx(n);
1387 #endif
1388 return true;
1389 }
1390 }
1391
1392 for (i=0; i < 2; i++)
1393 {
1394 t = mep_section_tag (operands[i]);
1395 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1396 {
1397 rtx sym, n, r;
1398 int u;
1399
1400 sym = XEXP (operands[i], 0);
1401 if (GET_CODE (sym) == CONST
1402 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1403 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1404
1405 if (t == 'b')
1406 {
1407 r = mep_tp_rtx ();
1408 u = UNS_TPREL;
1409 }
1410 else
1411 {
1412 r = mep_gp_rtx ();
1413 u = UNS_GPREL;
1414 }
1415
1416 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1417 n = gen_rtx_CONST (Pmode, n);
1418 n = gen_rtx_PLUS (Pmode, r, n);
1419 operands[i] = replace_equiv_address (operands[i], n);
1420 }
1421 }
1422 }
1423
1424 if ((GET_CODE (operands[1]) != REG
1425 && MEP_CONTROL_REG (operands[0]))
1426 || (GET_CODE (operands[0]) != REG
1427 && MEP_CONTROL_REG (operands[1])))
1428 {
1429 rtx temp;
1430 #if DEBUG_EXPAND_MOV
1431 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1432 #endif
1433 temp = gen_reg_rtx (mode);
1434 emit_move_insn (temp, operands[1]);
1435 operands[1] = temp;
1436 }
1437
1438 if (symbolref_p (operands[0])
1439 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1440 || (GET_MODE_SIZE (mode) != 4)))
1441 {
1442 rtx temp;
1443
1444 gcc_assert (!reload_in_progress && !reload_completed);
1445
1446 temp = force_reg (Pmode, XEXP (operands[0], 0));
1447 operands[0] = replace_equiv_address (operands[0], temp);
1448 emit_move_insn (operands[0], operands[1]);
1449 return true;
1450 }
1451
1452 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1453 tag[1] = 0;
1454
1455 if (symbol_p (operands[1])
1456 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1457 {
1458 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1459 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1460 return true;
1461 }
1462
1463 if (symbolref_p (operands[1])
1464 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1465 {
1466 rtx temp;
1467
1468 if (reload_in_progress || reload_completed)
1469 temp = operands[0];
1470 else
1471 temp = gen_reg_rtx (Pmode);
1472
1473 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1474 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1475 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1476 return true;
1477 }
1478
1479 return false;
1480 }
1481
1482 /* Cases where the pattern can't be made to use at all. */
1483
1484 bool
mep_mov_ok(rtx * operands,machine_mode mode ATTRIBUTE_UNUSED)1485 mep_mov_ok (rtx *operands, machine_mode mode ATTRIBUTE_UNUSED)
1486 {
1487 int i;
1488
1489 #define DEBUG_MOV_OK 0
1490 #if DEBUG_MOV_OK
1491 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1492 mep_section_tag (operands[1]));
1493 debug_rtx (operands[0]);
1494 debug_rtx (operands[1]);
1495 #endif
1496
1497 /* We want the movh patterns to get these. */
1498 if (GET_CODE (operands[1]) == HIGH)
1499 return false;
1500
1501 /* We can't store a register to a far variable without using a
1502 scratch register to hold the address. Using far variables should
1503 be split by mep_emit_mov anyway. */
1504 if (mep_section_tag (operands[0]) == 'f'
1505 || mep_section_tag (operands[1]) == 'f')
1506 {
1507 #if DEBUG_MOV_OK
1508 fprintf (stderr, " - no, f\n");
1509 #endif
1510 return false;
1511 }
1512 i = mep_section_tag (operands[1]);
1513 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1514 /* These are supposed to be generated with adds of the appropriate
1515 register. During and after reload, however, we allow them to
1516 be accessed as normal symbols because adding a dependency on
1517 the base register now might cause problems. */
1518 {
1519 #if DEBUG_MOV_OK
1520 fprintf (stderr, " - no, bt\n");
1521 #endif
1522 return false;
1523 }
1524
1525 /* The only moves we can allow involve at least one general
1526 register, so require it. */
1527 for (i = 0; i < 2; i ++)
1528 {
1529 /* Allow subregs too, before reload. */
1530 rtx x = operands[i];
1531
1532 if (GET_CODE (x) == SUBREG)
1533 x = XEXP (x, 0);
1534 if (GET_CODE (x) == REG
1535 && ! MEP_CONTROL_REG (x))
1536 {
1537 #if DEBUG_MOV_OK
1538 fprintf (stderr, " - ok\n");
1539 #endif
1540 return true;
1541 }
1542 }
1543 #if DEBUG_MOV_OK
1544 fprintf (stderr, " - no, no gen reg\n");
1545 #endif
1546 return false;
1547 }
1548
1549 #define DEBUG_SPLIT_WIDE_MOVE 0
1550 void
mep_split_wide_move(rtx * operands,machine_mode mode)1551 mep_split_wide_move (rtx *operands, machine_mode mode)
1552 {
1553 int i;
1554
1555 #if DEBUG_SPLIT_WIDE_MOVE
1556 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1557 debug_rtx (operands[0]);
1558 debug_rtx (operands[1]);
1559 #endif
1560
1561 for (i = 0; i <= 1; i++)
1562 {
1563 rtx op = operands[i], hi, lo;
1564
1565 switch (GET_CODE (op))
1566 {
1567 case REG:
1568 {
1569 unsigned int regno = REGNO (op);
1570
1571 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1572 {
1573 rtx i32;
1574
1575 lo = gen_rtx_REG (SImode, regno);
1576 i32 = GEN_INT (32);
1577 hi = gen_rtx_ZERO_EXTRACT (SImode,
1578 gen_rtx_REG (DImode, regno),
1579 i32, i32);
1580 }
1581 else
1582 {
1583 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1584 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1585 }
1586 }
1587 break;
1588
1589 case CONST_INT:
1590 case CONST_DOUBLE:
1591 case MEM:
1592 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1593 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1594 break;
1595
1596 default:
1597 gcc_unreachable ();
1598 }
1599
1600 /* The high part of CR <- GPR moves must be done after the low part. */
1601 operands [i + 4] = lo;
1602 operands [i + 2] = hi;
1603 }
1604
1605 if (reg_mentioned_p (operands[2], operands[5])
1606 || GET_CODE (operands[2]) == ZERO_EXTRACT
1607 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1608 {
1609 rtx tmp;
1610
1611 /* Overlapping register pairs -- make sure we don't
1612 early-clobber ourselves. */
1613 tmp = operands[2];
1614 operands[2] = operands[4];
1615 operands[4] = tmp;
1616 tmp = operands[3];
1617 operands[3] = operands[5];
1618 operands[5] = tmp;
1619 }
1620
1621 #if DEBUG_SPLIT_WIDE_MOVE
1622 fprintf(stderr, "\033[34m");
1623 debug_rtx (operands[2]);
1624 debug_rtx (operands[3]);
1625 debug_rtx (operands[4]);
1626 debug_rtx (operands[5]);
1627 fprintf(stderr, "\033[0m");
1628 #endif
1629 }
1630
1631 /* Emit a setcc instruction in its entirity. */
1632
1633 static bool
mep_expand_setcc_1(enum rtx_code code,rtx dest,rtx op1,rtx op2)1634 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1635 {
1636 rtx tmp;
1637
1638 switch (code)
1639 {
1640 case GT:
1641 case GTU:
1642 tmp = op1, op1 = op2, op2 = tmp;
1643 code = swap_condition (code);
1644 /* FALLTHRU */
1645
1646 case LT:
1647 case LTU:
1648 op1 = force_reg (SImode, op1);
1649 emit_insn (gen_rtx_SET (dest, gen_rtx_fmt_ee (code, SImode, op1, op2)));
1650 return true;
1651
1652 case EQ:
1653 if (op2 != const0_rtx)
1654 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1655 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1656 return true;
1657
1658 case NE:
1659 /* Branchful sequence:
1660 mov dest, 0 16-bit
1661 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1662 mov dest, 1 16-bit
1663
1664 Branchless sequence:
1665 add3 tmp, op1, -op2 32-bit (or mov + sub)
1666 sltu3 tmp, tmp, 1 16-bit
1667 xor3 dest, tmp, 1 32-bit
1668 */
1669 if (optimize_size && op2 != const0_rtx)
1670 return false;
1671
1672 if (op2 != const0_rtx)
1673 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1674
1675 op2 = gen_reg_rtx (SImode);
1676 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1677
1678 emit_insn (gen_rtx_SET (dest, gen_rtx_XOR (SImode, op2, const1_rtx)));
1679 return true;
1680
1681 case LE:
1682 if (GET_CODE (op2) != CONST_INT
1683 || INTVAL (op2) == 0x7ffffff)
1684 return false;
1685 op2 = GEN_INT (INTVAL (op2) + 1);
1686 return mep_expand_setcc_1 (LT, dest, op1, op2);
1687
1688 case LEU:
1689 if (GET_CODE (op2) != CONST_INT
1690 || INTVAL (op2) == -1)
1691 return false;
1692 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1693 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1694
1695 case GE:
1696 if (GET_CODE (op2) != CONST_INT
1697 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1698 return false;
1699 op2 = GEN_INT (INTVAL (op2) - 1);
1700 return mep_expand_setcc_1 (GT, dest, op1, op2);
1701
1702 case GEU:
1703 if (GET_CODE (op2) != CONST_INT
1704 || op2 == const0_rtx)
1705 return false;
1706 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1707 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1708
1709 default:
1710 gcc_unreachable ();
1711 }
1712 }
1713
1714 bool
mep_expand_setcc(rtx * operands)1715 mep_expand_setcc (rtx *operands)
1716 {
1717 rtx dest = operands[0];
1718 enum rtx_code code = GET_CODE (operands[1]);
1719 rtx op0 = operands[2];
1720 rtx op1 = operands[3];
1721
1722 return mep_expand_setcc_1 (code, dest, op0, op1);
1723 }
1724
1725 rtx
mep_expand_cbranch(rtx * operands)1726 mep_expand_cbranch (rtx *operands)
1727 {
1728 enum rtx_code code = GET_CODE (operands[0]);
1729 rtx op0 = operands[1];
1730 rtx op1 = operands[2];
1731 rtx tmp;
1732
1733 restart:
1734 switch (code)
1735 {
1736 case LT:
1737 if (mep_imm4_operand (op1, SImode))
1738 break;
1739
1740 tmp = gen_reg_rtx (SImode);
1741 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1742 code = NE;
1743 op0 = tmp;
1744 op1 = const0_rtx;
1745 break;
1746
1747 case GE:
1748 if (mep_imm4_operand (op1, SImode))
1749 break;
1750
1751 tmp = gen_reg_rtx (SImode);
1752 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1753
1754 code = EQ;
1755 op0 = tmp;
1756 op1 = const0_rtx;
1757 break;
1758
1759 case EQ:
1760 case NE:
1761 if (! mep_reg_or_imm4_operand (op1, SImode))
1762 op1 = force_reg (SImode, op1);
1763 break;
1764
1765 case LE:
1766 case GT:
1767 if (GET_CODE (op1) == CONST_INT
1768 && INTVAL (op1) != 0x7fffffff)
1769 {
1770 op1 = GEN_INT (INTVAL (op1) + 1);
1771 code = (code == LE ? LT : GE);
1772 goto restart;
1773 }
1774
1775 tmp = gen_reg_rtx (SImode);
1776 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1777
1778 code = (code == LE ? EQ : NE);
1779 op0 = tmp;
1780 op1 = const0_rtx;
1781 break;
1782
1783 case LTU:
1784 if (op1 == const1_rtx)
1785 {
1786 code = EQ;
1787 op1 = const0_rtx;
1788 break;
1789 }
1790
1791 tmp = gen_reg_rtx (SImode);
1792 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1793 code = NE;
1794 op0 = tmp;
1795 op1 = const0_rtx;
1796 break;
1797
1798 case LEU:
1799 tmp = gen_reg_rtx (SImode);
1800 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1801 code = NE;
1802 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1803 code = EQ;
1804 else
1805 gcc_unreachable ();
1806 op0 = tmp;
1807 op1 = const0_rtx;
1808 break;
1809
1810 case GTU:
1811 tmp = gen_reg_rtx (SImode);
1812 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1813 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1814 code = NE;
1815 op0 = tmp;
1816 op1 = const0_rtx;
1817 break;
1818
1819 case GEU:
1820 tmp = gen_reg_rtx (SImode);
1821 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1822 code = NE;
1823 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1824 code = EQ;
1825 else
1826 gcc_unreachable ();
1827 op0 = tmp;
1828 op1 = const0_rtx;
1829 break;
1830
1831 default:
1832 gcc_unreachable ();
1833 }
1834
1835 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1836 }
1837
1838 const char *
mep_emit_cbranch(rtx * operands,int ne)1839 mep_emit_cbranch (rtx *operands, int ne)
1840 {
1841 if (GET_CODE (operands[1]) == REG)
1842 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1843 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1844 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1845 else
1846 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1847 }
1848
1849 void
mep_expand_call(rtx * operands,int returns_value)1850 mep_expand_call (rtx *operands, int returns_value)
1851 {
1852 rtx addr = operands[returns_value];
1853 rtx tp = mep_tp_rtx ();
1854 rtx gp = mep_gp_rtx ();
1855
1856 gcc_assert (GET_CODE (addr) == MEM);
1857
1858 addr = XEXP (addr, 0);
1859
1860 if (! mep_call_address_operand (addr, VOIDmode))
1861 addr = force_reg (SImode, addr);
1862
1863 if (! operands[returns_value+2])
1864 operands[returns_value+2] = const0_rtx;
1865
1866 if (returns_value)
1867 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1868 operands[3], tp, gp));
1869 else
1870 emit_call_insn (gen_call_internal (addr, operands[1],
1871 operands[2], tp, gp));
1872 }
1873
1874 /* Aliasing Support. */
1875
1876 /* If X is a machine specific address (i.e. a symbol or label being
1877 referenced as a displacement from the GOT implemented using an
1878 UNSPEC), then return the base term. Otherwise return X. */
1879
1880 rtx
mep_find_base_term(rtx x)1881 mep_find_base_term (rtx x)
1882 {
1883 rtx base, term;
1884 int unspec;
1885
1886 if (GET_CODE (x) != PLUS)
1887 return x;
1888 base = XEXP (x, 0);
1889 term = XEXP (x, 1);
1890
1891 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1892 && base == mep_tp_rtx ())
1893 unspec = UNS_TPREL;
1894 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1895 && base == mep_gp_rtx ())
1896 unspec = UNS_GPREL;
1897 else
1898 return x;
1899
1900 if (GET_CODE (term) != CONST)
1901 return x;
1902 term = XEXP (term, 0);
1903
1904 if (GET_CODE (term) != UNSPEC
1905 || XINT (term, 1) != unspec)
1906 return x;
1907
1908 return XVECEXP (term, 0, 0);
1909 }
1910
1911 /* Reload Support. */
1912
1913 /* Return true if the registers in CLASS cannot represent the change from
1914 modes FROM to TO. */
1915
1916 bool
mep_cannot_change_mode_class(machine_mode from,machine_mode to,enum reg_class regclass)1917 mep_cannot_change_mode_class (machine_mode from, machine_mode to,
1918 enum reg_class regclass)
1919 {
1920 if (from == to)
1921 return false;
1922
1923 /* 64-bit COP regs must remain 64-bit COP regs. */
1924 if (TARGET_64BIT_CR_REGS
1925 && (regclass == CR_REGS
1926 || regclass == LOADABLE_CR_REGS)
1927 && (GET_MODE_SIZE (to) < 8
1928 || GET_MODE_SIZE (from) < 8))
1929 return true;
1930
1931 return false;
1932 }
1933
1934 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1935
1936 static bool
mep_general_reg(rtx x)1937 mep_general_reg (rtx x)
1938 {
1939 while (GET_CODE (x) == SUBREG)
1940 x = XEXP (x, 0);
1941 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
1942 }
1943
1944 static bool
mep_nongeneral_reg(rtx x)1945 mep_nongeneral_reg (rtx x)
1946 {
1947 while (GET_CODE (x) == SUBREG)
1948 x = XEXP (x, 0);
1949 return (GET_CODE (x) == REG
1950 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
1951 }
1952
1953 static bool
mep_general_copro_reg(rtx x)1954 mep_general_copro_reg (rtx x)
1955 {
1956 while (GET_CODE (x) == SUBREG)
1957 x = XEXP (x, 0);
1958 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
1959 }
1960
1961 static bool
mep_nonregister(rtx x)1962 mep_nonregister (rtx x)
1963 {
1964 while (GET_CODE (x) == SUBREG)
1965 x = XEXP (x, 0);
1966 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
1967 }
1968
1969 #define DEBUG_RELOAD 0
1970
1971 /* Return the secondary reload class needed for moving value X to or
1972 from a register in coprocessor register class CLASS. */
1973
1974 static enum reg_class
mep_secondary_copro_reload_class(enum reg_class rclass,rtx x)1975 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
1976 {
1977 if (mep_general_reg (x))
1978 /* We can do the move directly if mep_have_core_copro_moves_p,
1979 otherwise we need to go through memory. Either way, no secondary
1980 register is needed. */
1981 return NO_REGS;
1982
1983 if (mep_general_copro_reg (x))
1984 {
1985 /* We can do the move directly if mep_have_copro_copro_moves_p. */
1986 if (mep_have_copro_copro_moves_p)
1987 return NO_REGS;
1988
1989 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
1990 if (mep_have_core_copro_moves_p)
1991 return GENERAL_REGS;
1992
1993 /* Otherwise we need to do it through memory. No secondary
1994 register is needed. */
1995 return NO_REGS;
1996 }
1997
1998 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
1999 && constraint_satisfied_p (x, CONSTRAINT_U))
2000 /* X is a memory value that we can access directly. */
2001 return NO_REGS;
2002
2003 /* We have to move X into a GPR first and then copy it to
2004 the coprocessor register. The move from the GPR to the
2005 coprocessor might be done directly or through memory,
2006 depending on mep_have_core_copro_moves_p. */
2007 return GENERAL_REGS;
2008 }
2009
2010 /* Copying X to register in RCLASS. */
2011
2012 enum reg_class
mep_secondary_input_reload_class(enum reg_class rclass,machine_mode mode ATTRIBUTE_UNUSED,rtx x)2013 mep_secondary_input_reload_class (enum reg_class rclass,
2014 machine_mode mode ATTRIBUTE_UNUSED,
2015 rtx x)
2016 {
2017 int rv = NO_REGS;
2018
2019 #if DEBUG_RELOAD
2020 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2021 debug_rtx (x);
2022 #endif
2023
2024 if (reg_class_subset_p (rclass, CR_REGS))
2025 rv = mep_secondary_copro_reload_class (rclass, x);
2026 else if (MEP_NONGENERAL_CLASS (rclass)
2027 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2028 rv = GENERAL_REGS;
2029
2030 #if DEBUG_RELOAD
2031 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2032 #endif
2033 return (enum reg_class) rv;
2034 }
2035
2036 /* Copying register in RCLASS to X. */
2037
2038 enum reg_class
mep_secondary_output_reload_class(enum reg_class rclass,machine_mode mode ATTRIBUTE_UNUSED,rtx x)2039 mep_secondary_output_reload_class (enum reg_class rclass,
2040 machine_mode mode ATTRIBUTE_UNUSED,
2041 rtx x)
2042 {
2043 int rv = NO_REGS;
2044
2045 #if DEBUG_RELOAD
2046 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2047 debug_rtx (x);
2048 #endif
2049
2050 if (reg_class_subset_p (rclass, CR_REGS))
2051 rv = mep_secondary_copro_reload_class (rclass, x);
2052 else if (MEP_NONGENERAL_CLASS (rclass)
2053 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2054 rv = GENERAL_REGS;
2055
2056 #if DEBUG_RELOAD
2057 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2058 #endif
2059
2060 return (enum reg_class) rv;
2061 }
2062
2063 /* Implement SECONDARY_MEMORY_NEEDED. */
2064
2065 bool
mep_secondary_memory_needed(enum reg_class rclass1,enum reg_class rclass2,machine_mode mode ATTRIBUTE_UNUSED)2066 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2067 machine_mode mode ATTRIBUTE_UNUSED)
2068 {
2069 if (!mep_have_core_copro_moves_p)
2070 {
2071 if (reg_classes_intersect_p (rclass1, CR_REGS)
2072 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2073 return true;
2074 if (reg_classes_intersect_p (rclass2, CR_REGS)
2075 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2076 return true;
2077 if (!mep_have_copro_copro_moves_p
2078 && reg_classes_intersect_p (rclass1, CR_REGS)
2079 && reg_classes_intersect_p (rclass2, CR_REGS))
2080 return true;
2081 }
2082 return false;
2083 }
2084
2085 void
mep_expand_reload(rtx * operands,machine_mode mode)2086 mep_expand_reload (rtx *operands, machine_mode mode)
2087 {
2088 /* There are three cases for each direction:
2089 register, farsym
2090 control, farsym
2091 control, nearsym */
2092
2093 int s0 = mep_section_tag (operands[0]) == 'f';
2094 int s1 = mep_section_tag (operands[1]) == 'f';
2095 int c0 = mep_nongeneral_reg (operands[0]);
2096 int c1 = mep_nongeneral_reg (operands[1]);
2097 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2098
2099 #if DEBUG_RELOAD
2100 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2101 debug_rtx (operands[0]);
2102 debug_rtx (operands[1]);
2103 #endif
2104
2105 switch (which)
2106 {
2107 case 00: /* Don't know why this gets here. */
2108 case 02: /* general = far */
2109 emit_move_insn (operands[0], operands[1]);
2110 return;
2111
2112 case 10: /* cr = mem */
2113 case 11: /* cr = cr */
2114 case 01: /* mem = cr */
2115 case 12: /* cr = far */
2116 emit_move_insn (operands[2], operands[1]);
2117 emit_move_insn (operands[0], operands[2]);
2118 return;
2119
2120 case 20: /* far = general */
2121 emit_move_insn (operands[2], XEXP (operands[1], 0));
2122 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2123 return;
2124
2125 case 21: /* far = cr */
2126 case 22: /* far = far */
2127 default:
2128 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2129 which, mode_name[mode]);
2130 debug_rtx (operands[0]);
2131 debug_rtx (operands[1]);
2132 gcc_unreachable ();
2133 }
2134 }
2135
2136 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2137 can be moved directly into registers 0 to 7, but not into the rest.
2138 If so, and if the required class includes registers 0 to 7, restrict
2139 it to those registers. */
2140
2141 enum reg_class
mep_preferred_reload_class(rtx x,enum reg_class rclass)2142 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2143 {
2144 switch (GET_CODE (x))
2145 {
2146 case CONST_INT:
2147 if (INTVAL (x) >= 0x10000
2148 && INTVAL (x) < 0x01000000
2149 && (INTVAL (x) & 0xffff) != 0
2150 && reg_class_subset_p (TPREL_REGS, rclass))
2151 rclass = TPREL_REGS;
2152 break;
2153
2154 case CONST:
2155 case SYMBOL_REF:
2156 case LABEL_REF:
2157 if (mep_section_tag (x) != 'f'
2158 && reg_class_subset_p (TPREL_REGS, rclass))
2159 rclass = TPREL_REGS;
2160 break;
2161
2162 default:
2163 break;
2164 }
2165 return rclass;
2166 }
2167
2168 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2169 moves, 4 for direct double-register moves, and 1000 for anything
2170 that requires a temporary register or temporary stack slot. */
2171
2172 int
mep_register_move_cost(machine_mode mode,enum reg_class from,enum reg_class to)2173 mep_register_move_cost (machine_mode mode, enum reg_class from, enum reg_class to)
2174 {
2175 if (mep_have_copro_copro_moves_p
2176 && reg_class_subset_p (from, CR_REGS)
2177 && reg_class_subset_p (to, CR_REGS))
2178 {
2179 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2180 return 4;
2181 return 2;
2182 }
2183 if (reg_class_subset_p (from, CR_REGS)
2184 && reg_class_subset_p (to, CR_REGS))
2185 {
2186 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2187 return 8;
2188 return 4;
2189 }
2190 if (reg_class_subset_p (from, CR_REGS)
2191 || reg_class_subset_p (to, CR_REGS))
2192 {
2193 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2194 return 4;
2195 return 2;
2196 }
2197 if (mep_secondary_memory_needed (from, to, mode))
2198 return 1000;
2199 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2200 return 1000;
2201
2202 if (GET_MODE_SIZE (mode) > 4)
2203 return 4;
2204
2205 return 2;
2206 }
2207
2208
2209 /* Functions to save and restore machine-specific function data. */
2210
2211 static struct machine_function *
mep_init_machine_status(void)2212 mep_init_machine_status (void)
2213 {
2214 return ggc_cleared_alloc<machine_function> ();
2215 }
2216
2217 static rtx
mep_allocate_initial_value(rtx reg)2218 mep_allocate_initial_value (rtx reg)
2219 {
2220 int rss;
2221
2222 if (GET_CODE (reg) != REG)
2223 return NULL_RTX;
2224
2225 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2226 return NULL_RTX;
2227
2228 /* In interrupt functions, the "initial" values of $gp and $tp are
2229 provided by the prologue. They are not necessarily the same as
2230 the values that the caller was using. */
2231 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2232 if (mep_interrupt_p ())
2233 return NULL_RTX;
2234
2235 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2236 {
2237 cfun->machine->reg_save_size += 4;
2238 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2239 }
2240
2241 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2242 return gen_rtx_MEM (SImode, plus_constant (Pmode, arg_pointer_rtx, -rss));
2243 }
2244
2245 rtx
mep_return_addr_rtx(int count)2246 mep_return_addr_rtx (int count)
2247 {
2248 if (count != 0)
2249 return const0_rtx;
2250
2251 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2252 }
2253
2254 static rtx
mep_tp_rtx(void)2255 mep_tp_rtx (void)
2256 {
2257 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2258 }
2259
2260 static rtx
mep_gp_rtx(void)2261 mep_gp_rtx (void)
2262 {
2263 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2264 }
2265
2266 static bool
mep_interrupt_p(void)2267 mep_interrupt_p (void)
2268 {
2269 if (cfun->machine->interrupt_handler == 0)
2270 {
2271 int interrupt_handler
2272 = (lookup_attribute ("interrupt",
2273 DECL_ATTRIBUTES (current_function_decl))
2274 != NULL_TREE);
2275 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2276 }
2277 return cfun->machine->interrupt_handler == 2;
2278 }
2279
2280 static bool
mep_disinterrupt_p(void)2281 mep_disinterrupt_p (void)
2282 {
2283 if (cfun->machine->disable_interrupts == 0)
2284 {
2285 int disable_interrupts
2286 = (lookup_attribute ("disinterrupt",
2287 DECL_ATTRIBUTES (current_function_decl))
2288 != NULL_TREE);
2289 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2290 }
2291 return cfun->machine->disable_interrupts == 2;
2292 }
2293
2294
2295 /* Frame/Epilog/Prolog Related. */
2296
2297 static bool
mep_reg_set_p(rtx reg,rtx insn)2298 mep_reg_set_p (rtx reg, rtx insn)
2299 {
2300 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2301 if (INSN_P (insn))
2302 {
2303 if (FIND_REG_INC_NOTE (insn, reg))
2304 return true;
2305 insn = PATTERN (insn);
2306 }
2307
2308 if (GET_CODE (insn) == SET
2309 && GET_CODE (XEXP (insn, 0)) == REG
2310 && GET_CODE (XEXP (insn, 1)) == REG
2311 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2312 return false;
2313
2314 return set_of (reg, insn) != NULL_RTX;
2315 }
2316
2317
2318 #define MEP_SAVES_UNKNOWN 0
2319 #define MEP_SAVES_YES 1
2320 #define MEP_SAVES_MAYBE 2
2321 #define MEP_SAVES_NO 3
2322
2323 static bool
mep_reg_set_in_function(int regno)2324 mep_reg_set_in_function (int regno)
2325 {
2326 rtx reg;
2327 rtx_insn *insn;
2328
2329 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2330 return true;
2331
2332 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2333 return true;
2334
2335 push_topmost_sequence ();
2336 insn = get_insns ();
2337 pop_topmost_sequence ();
2338
2339 if (!insn)
2340 return false;
2341
2342 reg = gen_rtx_REG (SImode, regno);
2343
2344 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2345 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2346 return true;
2347 return false;
2348 }
2349
2350 static bool
mep_asm_without_operands_p(void)2351 mep_asm_without_operands_p (void)
2352 {
2353 if (cfun->machine->asms_without_operands == 0)
2354 {
2355 rtx_insn *insn;
2356
2357 push_topmost_sequence ();
2358 insn = get_insns ();
2359 pop_topmost_sequence ();
2360
2361 cfun->machine->asms_without_operands = 1;
2362 while (insn)
2363 {
2364 if (INSN_P (insn)
2365 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2366 {
2367 cfun->machine->asms_without_operands = 2;
2368 break;
2369 }
2370 insn = NEXT_INSN (insn);
2371 }
2372
2373 }
2374 return cfun->machine->asms_without_operands == 2;
2375 }
2376
2377 /* Interrupt functions save/restore every call-preserved register, and
2378 any call-used register it uses (or all if it calls any function,
2379 since they may get clobbered there too). Here we check to see
2380 which call-used registers need saving. */
2381
2382 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2383 && (r == FIRST_CCR_REGNO + 1 \
2384 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2385 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2386
2387 static bool
mep_interrupt_saved_reg(int r)2388 mep_interrupt_saved_reg (int r)
2389 {
2390 if (!mep_interrupt_p ())
2391 return false;
2392 if (r == REGSAVE_CONTROL_TEMP
2393 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2394 return true;
2395 if (mep_asm_without_operands_p ()
2396 && (!fixed_regs[r]
2397 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2398 || IVC2_ISAVED_REG (r)))
2399 return true;
2400 if (!crtl->is_leaf)
2401 /* Function calls mean we need to save $lp. */
2402 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2403 return true;
2404 if (!crtl->is_leaf || cfun->machine->doloop_tags > 0)
2405 /* The interrupt handler might use these registers for repeat blocks,
2406 or it might call a function that does so. */
2407 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2408 return true;
2409 if (crtl->is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2410 return false;
2411 /* Functions we call might clobber these. */
2412 if (call_used_regs[r] && !fixed_regs[r])
2413 return true;
2414 /* Additional registers that need to be saved for IVC2. */
2415 if (IVC2_ISAVED_REG (r))
2416 return true;
2417
2418 return false;
2419 }
2420
2421 static bool
mep_call_saves_register(int r)2422 mep_call_saves_register (int r)
2423 {
2424 if (! cfun->machine->frame_locked)
2425 {
2426 int rv = MEP_SAVES_NO;
2427
2428 if (cfun->machine->reg_save_slot[r])
2429 rv = MEP_SAVES_YES;
2430 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2431 rv = MEP_SAVES_YES;
2432 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2433 rv = MEP_SAVES_YES;
2434 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2435 rv = MEP_SAVES_YES;
2436 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2437 /* We need these to have stack slots so that they can be set during
2438 unwinding. */
2439 rv = MEP_SAVES_YES;
2440 else if (mep_interrupt_saved_reg (r))
2441 rv = MEP_SAVES_YES;
2442 cfun->machine->reg_saved[r] = rv;
2443 }
2444 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2445 }
2446
2447 /* Return true if epilogue uses register REGNO. */
2448
2449 bool
mep_epilogue_uses(int regno)2450 mep_epilogue_uses (int regno)
2451 {
2452 /* Since $lp is a call-saved register, the generic code will normally
2453 mark it used in the epilogue if it needs to be saved and restored.
2454 However, when profiling is enabled, the profiling code will implicitly
2455 clobber $11. This case has to be handled specially both here and in
2456 mep_call_saves_register. */
2457 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2458 return true;
2459 /* Interrupt functions save/restore pretty much everything. */
2460 return (reload_completed && mep_interrupt_saved_reg (regno));
2461 }
2462
2463 static int
mep_reg_size(int regno)2464 mep_reg_size (int regno)
2465 {
2466 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2467 return 8;
2468 return 4;
2469 }
2470
2471 /* Worker function for TARGET_CAN_ELIMINATE. */
2472
2473 bool
mep_can_eliminate(const int from,const int to)2474 mep_can_eliminate (const int from, const int to)
2475 {
2476 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2477 ? ! frame_pointer_needed
2478 : true);
2479 }
2480
2481 int
mep_elimination_offset(int from,int to)2482 mep_elimination_offset (int from, int to)
2483 {
2484 int reg_save_size;
2485 int i;
2486 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2487 int total_size;
2488
2489 if (!cfun->machine->frame_locked)
2490 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2491
2492 /* We don't count arg_regs_to_save in the arg pointer offset, because
2493 gcc thinks the arg pointer has moved along with the saved regs.
2494 However, we do count it when we adjust $sp in the prologue. */
2495 reg_save_size = 0;
2496 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2497 if (mep_call_saves_register (i))
2498 reg_save_size += mep_reg_size (i);
2499
2500 if (reg_save_size % 8)
2501 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2502 else
2503 cfun->machine->regsave_filler = 0;
2504
2505 /* This is what our total stack adjustment looks like. */
2506 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2507
2508 if (total_size % 8)
2509 cfun->machine->frame_filler = 8 - (total_size % 8);
2510 else
2511 cfun->machine->frame_filler = 0;
2512
2513
2514 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2515 return reg_save_size + cfun->machine->regsave_filler;
2516
2517 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2518 return cfun->machine->frame_filler + frame_size;
2519
2520 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2521 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2522
2523 gcc_unreachable ();
2524 }
2525
2526 static rtx_insn *
F(rtx_insn * x)2527 F (rtx_insn *x)
2528 {
2529 RTX_FRAME_RELATED_P (x) = 1;
2530 return x;
2531 }
2532
2533 /* Since the prologue/epilogue code is generated after optimization,
2534 we can't rely on gcc to split constants for us. So, this code
2535 captures all the ways to add a constant to a register in one logic
2536 chunk, including optimizing away insns we just don't need. This
2537 makes the prolog/epilog code easier to follow. */
2538 static void
add_constant(int dest,int src,int value,int mark_frame)2539 add_constant (int dest, int src, int value, int mark_frame)
2540 {
2541 rtx_insn *insn;
2542 int hi, lo;
2543
2544 if (src == dest && value == 0)
2545 return;
2546
2547 if (value == 0)
2548 {
2549 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2550 gen_rtx_REG (SImode, src));
2551 if (mark_frame)
2552 RTX_FRAME_RELATED_P(insn) = 1;
2553 return;
2554 }
2555
2556 if (value >= -32768 && value <= 32767)
2557 {
2558 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2559 gen_rtx_REG (SImode, src),
2560 GEN_INT (value)));
2561 if (mark_frame)
2562 RTX_FRAME_RELATED_P(insn) = 1;
2563 return;
2564 }
2565
2566 /* Big constant, need to use a temp register. We use
2567 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2568 area is always small enough to directly add to). */
2569
2570 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2571 lo = value & 0xffff;
2572
2573 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2574 GEN_INT (hi));
2575
2576 if (lo)
2577 {
2578 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2579 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2580 GEN_INT (lo)));
2581 }
2582
2583 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2584 gen_rtx_REG (SImode, src),
2585 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2586 if (mark_frame)
2587 {
2588 RTX_FRAME_RELATED_P(insn) = 1;
2589 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2590 gen_rtx_SET (gen_rtx_REG (SImode, dest),
2591 gen_rtx_PLUS (SImode,
2592 gen_rtx_REG (SImode, dest),
2593 GEN_INT (value))));
2594 }
2595 }
2596
2597 /* Move SRC to DEST. Mark the move as being potentially dead if
2598 MAYBE_DEAD_P. */
2599
2600 static rtx_insn *
maybe_dead_move(rtx dest,rtx src,bool ATTRIBUTE_UNUSED maybe_dead_p)2601 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2602 {
2603 rtx_insn *insn = emit_move_insn (dest, src);
2604 #if 0
2605 if (maybe_dead_p)
2606 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2607 #endif
2608 return insn;
2609 }
2610
2611 /* Used for interrupt functions, which can't assume that $tp and $gp
2612 contain the correct pointers. */
2613
2614 static void
mep_reload_pointer(int regno,const char * symbol)2615 mep_reload_pointer (int regno, const char *symbol)
2616 {
2617 rtx reg, sym;
2618
2619 if (!df_regs_ever_live_p(regno) && crtl->is_leaf)
2620 return;
2621
2622 reg = gen_rtx_REG (SImode, regno);
2623 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2624 emit_insn (gen_movsi_topsym_s (reg, sym));
2625 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2626 }
2627
2628 /* Assign save slots for any register not already saved. DImode
2629 registers go at the end of the reg save area; the rest go at the
2630 beginning. This is for alignment purposes. Returns true if a frame
2631 is really needed. */
2632 static bool
mep_assign_save_slots(int reg_save_size)2633 mep_assign_save_slots (int reg_save_size)
2634 {
2635 bool really_need_stack_frame = false;
2636 int di_ofs = 0;
2637 int i;
2638
2639 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2640 if (mep_call_saves_register(i))
2641 {
2642 int regsize = mep_reg_size (i);
2643
2644 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2645 || mep_reg_set_in_function (i))
2646 really_need_stack_frame = true;
2647
2648 if (cfun->machine->reg_save_slot[i])
2649 continue;
2650
2651 if (regsize < 8)
2652 {
2653 cfun->machine->reg_save_size += regsize;
2654 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2655 }
2656 else
2657 {
2658 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2659 di_ofs += 8;
2660 }
2661 }
2662 cfun->machine->frame_locked = 1;
2663 return really_need_stack_frame;
2664 }
2665
2666 void
mep_expand_prologue(void)2667 mep_expand_prologue (void)
2668 {
2669 int i, rss, sp_offset = 0;
2670 int reg_save_size;
2671 int frame_size;
2672 int really_need_stack_frame;
2673
2674 /* We must not allow register renaming in interrupt functions,
2675 because that invalidates the correctness of the set of call-used
2676 registers we're going to save/restore. */
2677 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2678
2679 if (mep_disinterrupt_p ())
2680 emit_insn (gen_mep_disable_int ());
2681
2682 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2683
2684 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2685 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2686 really_need_stack_frame = frame_size;
2687
2688 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2689
2690 sp_offset = reg_save_size;
2691 if (sp_offset + frame_size < 128)
2692 sp_offset += frame_size ;
2693
2694 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2695
2696 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2697 if (mep_call_saves_register(i))
2698 {
2699 rtx mem;
2700 bool maybe_dead_p;
2701 machine_mode rmode;
2702
2703 rss = cfun->machine->reg_save_slot[i];
2704
2705 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2706 && (!mep_reg_set_in_function (i)
2707 && !mep_interrupt_p ()))
2708 continue;
2709
2710 if (mep_reg_size (i) == 8)
2711 rmode = DImode;
2712 else
2713 rmode = SImode;
2714
2715 /* If there is a pseudo associated with this register's initial value,
2716 reload might have already spilt it to the stack slot suggested by
2717 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2718 deleted as dead. */
2719 mem = gen_rtx_MEM (rmode,
2720 plus_constant (Pmode, stack_pointer_rtx,
2721 sp_offset - rss));
2722 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2723
2724 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2725 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2726 else if (rmode == DImode)
2727 {
2728 rtx_insn *insn;
2729 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2730
2731 mem = gen_rtx_MEM (SImode,
2732 plus_constant (Pmode, stack_pointer_rtx,
2733 sp_offset - rss + be));
2734
2735 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2736 gen_rtx_REG (SImode, i),
2737 maybe_dead_p);
2738 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2739 gen_rtx_ZERO_EXTRACT (SImode,
2740 gen_rtx_REG (DImode, i),
2741 GEN_INT (32),
2742 GEN_INT (32)),
2743 maybe_dead_p);
2744 insn = maybe_dead_move (mem,
2745 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2746 maybe_dead_p);
2747 RTX_FRAME_RELATED_P (insn) = 1;
2748
2749 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2750 gen_rtx_SET (copy_rtx (mem),
2751 gen_rtx_REG (rmode, i)));
2752 mem = gen_rtx_MEM (SImode,
2753 plus_constant (Pmode, stack_pointer_rtx,
2754 sp_offset - rss + (4-be)));
2755 insn = maybe_dead_move (mem,
2756 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2757 maybe_dead_p);
2758 }
2759 else
2760 {
2761 rtx_insn *insn;
2762 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2763 gen_rtx_REG (rmode, i),
2764 maybe_dead_p);
2765 insn = maybe_dead_move (mem,
2766 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2767 maybe_dead_p);
2768 RTX_FRAME_RELATED_P (insn) = 1;
2769
2770 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2771 gen_rtx_SET (copy_rtx (mem),
2772 gen_rtx_REG (rmode, i)));
2773 }
2774 }
2775
2776 if (frame_pointer_needed)
2777 {
2778 /* We've already adjusted down by sp_offset. Total $sp change
2779 is reg_save_size + frame_size. We want a net change here of
2780 just reg_save_size. */
2781 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2782 }
2783
2784 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2785
2786 if (mep_interrupt_p ())
2787 {
2788 mep_reload_pointer(GP_REGNO, "__sdabase");
2789 mep_reload_pointer(TP_REGNO, "__tpbase");
2790 }
2791 }
2792
2793 static void
mep_start_function(FILE * file,HOST_WIDE_INT hwi_local)2794 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2795 {
2796 int local = hwi_local;
2797 int frame_size = local + crtl->outgoing_args_size;
2798 int reg_save_size;
2799 int ffill;
2800 int i, sp, skip;
2801 int sp_offset;
2802 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2803
2804 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2805 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2806 sp_offset = reg_save_size + frame_size;
2807
2808 ffill = cfun->machine->frame_filler;
2809
2810 if (cfun->machine->mep_frame_pointer_needed)
2811 reg_names[FP_REGNO] = "$fp";
2812 else
2813 reg_names[FP_REGNO] = "$8";
2814
2815 if (sp_offset == 0)
2816 return;
2817
2818 if (debug_info_level == DINFO_LEVEL_NONE)
2819 {
2820 fprintf (file, "\t# frame: %d", sp_offset);
2821 if (reg_save_size)
2822 fprintf (file, " %d regs", reg_save_size);
2823 if (local)
2824 fprintf (file, " %d locals", local);
2825 if (crtl->outgoing_args_size)
2826 fprintf (file, " %d args", crtl->outgoing_args_size);
2827 fprintf (file, "\n");
2828 return;
2829 }
2830
2831 fprintf (file, "\t#\n");
2832 fprintf (file, "\t# Initial Frame Information:\n");
2833 if (sp_offset || !frame_pointer_needed)
2834 fprintf (file, "\t# Entry ---------- 0\n");
2835
2836 /* Sort registers by save slots, so they're printed in the order
2837 they appear in memory, not the order they're saved in. */
2838 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2839 slot_map[si] = si;
2840 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2841 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2842 if (cfun->machine->reg_save_slot[slot_map[si]]
2843 > cfun->machine->reg_save_slot[slot_map[sj]])
2844 {
2845 int t = slot_map[si];
2846 slot_map[si] = slot_map[sj];
2847 slot_map[sj] = t;
2848 }
2849
2850 sp = 0;
2851 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2852 {
2853 int rsize;
2854 int r = slot_map[i];
2855 int rss = cfun->machine->reg_save_slot[r];
2856
2857 if (!mep_call_saves_register (r))
2858 continue;
2859
2860 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2861 && (!mep_reg_set_in_function (r)
2862 && !mep_interrupt_p ()))
2863 continue;
2864
2865 rsize = mep_reg_size(r);
2866 skip = rss - (sp+rsize);
2867 if (skip)
2868 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2869 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2870 rsize, reg_names[r], sp_offset - rss);
2871 sp = rss;
2872 }
2873
2874 skip = reg_save_size - sp;
2875 if (skip)
2876 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2877
2878 if (frame_pointer_needed)
2879 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2880 if (local)
2881 fprintf (file, "\t# %3d bytes for local vars\n", local);
2882 if (ffill)
2883 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2884 if (crtl->outgoing_args_size)
2885 fprintf (file, "\t# %3d bytes for outgoing args\n",
2886 crtl->outgoing_args_size);
2887 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2888 fprintf (file, "\t#\n");
2889 }
2890
2891
2892 static int mep_prevent_lp_restore = 0;
2893 static int mep_sibcall_epilogue = 0;
2894
2895 void
mep_expand_epilogue(void)2896 mep_expand_epilogue (void)
2897 {
2898 int i, sp_offset = 0;
2899 int reg_save_size = 0;
2900 int frame_size;
2901 int lp_temp = LP_REGNO, lp_slot = -1;
2902 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2903 int interrupt_handler = mep_interrupt_p ();
2904
2905 if (profile_arc_flag == 2)
2906 emit_insn (gen_mep_bb_trace_ret ());
2907
2908 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2909 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2910
2911 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2912
2913 if (frame_pointer_needed)
2914 {
2915 /* If we have a frame pointer, we won't have a reliable stack
2916 pointer (alloca, you know), so rebase SP from FP */
2917 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
2918 gen_rtx_REG (SImode, FP_REGNO));
2919 sp_offset = reg_save_size;
2920 }
2921 else
2922 {
2923 /* SP is right under our local variable space. Adjust it if
2924 needed. */
2925 sp_offset = reg_save_size + frame_size;
2926 if (sp_offset >= 128)
2927 {
2928 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
2929 sp_offset -= frame_size;
2930 }
2931 }
2932
2933 /* This is backwards so that we restore the control and coprocessor
2934 registers before the temporary registers we use to restore
2935 them. */
2936 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
2937 if (mep_call_saves_register (i))
2938 {
2939 machine_mode rmode;
2940 int rss = cfun->machine->reg_save_slot[i];
2941
2942 if (mep_reg_size (i) == 8)
2943 rmode = DImode;
2944 else
2945 rmode = SImode;
2946
2947 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2948 && !(mep_reg_set_in_function (i) || interrupt_handler))
2949 continue;
2950 if (mep_prevent_lp_restore && i == LP_REGNO)
2951 continue;
2952 if (!mep_prevent_lp_restore
2953 && !interrupt_handler
2954 && (i == 10 || i == 11))
2955 continue;
2956
2957 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2958 emit_move_insn (gen_rtx_REG (rmode, i),
2959 gen_rtx_MEM (rmode,
2960 plus_constant (Pmode, stack_pointer_rtx,
2961 sp_offset - rss)));
2962 else
2963 {
2964 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
2965 /* Defer this one so we can jump indirect rather than
2966 copying the RA to $lp and "ret". EH epilogues
2967 automatically skip this anyway. */
2968 lp_slot = sp_offset-rss;
2969 else
2970 {
2971 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2972 gen_rtx_MEM (rmode,
2973 plus_constant (Pmode,
2974 stack_pointer_rtx,
2975 sp_offset-rss)));
2976 emit_move_insn (gen_rtx_REG (rmode, i),
2977 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
2978 }
2979 }
2980 }
2981 if (lp_slot != -1)
2982 {
2983 /* Restore this one last so we know it will be in the temp
2984 register when we return by jumping indirectly via the temp. */
2985 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2986 gen_rtx_MEM (SImode,
2987 plus_constant (Pmode, stack_pointer_rtx,
2988 lp_slot)));
2989 lp_temp = REGSAVE_CONTROL_TEMP;
2990 }
2991
2992
2993 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
2994
2995 if (crtl->calls_eh_return && mep_prevent_lp_restore)
2996 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
2997 gen_rtx_REG (SImode, SP_REGNO),
2998 cfun->machine->eh_stack_adjust));
2999
3000 if (mep_sibcall_epilogue)
3001 return;
3002
3003 if (mep_disinterrupt_p ())
3004 emit_insn (gen_mep_enable_int ());
3005
3006 if (mep_prevent_lp_restore)
3007 {
3008 emit_jump_insn (gen_eh_return_internal ());
3009 emit_barrier ();
3010 }
3011 else if (interrupt_handler)
3012 emit_jump_insn (gen_mep_reti ());
3013 else
3014 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3015 }
3016
3017 void
mep_expand_eh_return(rtx * operands)3018 mep_expand_eh_return (rtx *operands)
3019 {
3020 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3021 {
3022 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3023 emit_move_insn (ra, operands[0]);
3024 operands[0] = ra;
3025 }
3026
3027 emit_insn (gen_eh_epilogue (operands[0]));
3028 }
3029
3030 void
mep_emit_eh_epilogue(rtx * operands ATTRIBUTE_UNUSED)3031 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3032 {
3033 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3034 mep_prevent_lp_restore = 1;
3035 mep_expand_epilogue ();
3036 mep_prevent_lp_restore = 0;
3037 }
3038
3039 void
mep_expand_sibcall_epilogue(void)3040 mep_expand_sibcall_epilogue (void)
3041 {
3042 mep_sibcall_epilogue = 1;
3043 mep_expand_epilogue ();
3044 mep_sibcall_epilogue = 0;
3045 }
3046
3047 static bool
mep_function_ok_for_sibcall(tree decl,tree exp ATTRIBUTE_UNUSED)3048 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3049 {
3050 if (decl == NULL)
3051 return false;
3052
3053 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3054 return false;
3055
3056 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3057 if (mep_interrupt_p () || mep_disinterrupt_p ())
3058 return false;
3059
3060 return true;
3061 }
3062
3063 rtx
mep_return_stackadj_rtx(void)3064 mep_return_stackadj_rtx (void)
3065 {
3066 return gen_rtx_REG (SImode, 10);
3067 }
3068
3069 rtx
mep_return_handler_rtx(void)3070 mep_return_handler_rtx (void)
3071 {
3072 return gen_rtx_REG (SImode, LP_REGNO);
3073 }
3074
3075 void
mep_function_profiler(FILE * file)3076 mep_function_profiler (FILE *file)
3077 {
3078 /* Always right at the beginning of the function. */
3079 fprintf (file, "\t# mep function profiler\n");
3080 fprintf (file, "\tadd\t$sp, -8\n");
3081 fprintf (file, "\tsw\t$0, ($sp)\n");
3082 fprintf (file, "\tldc\t$0, $lp\n");
3083 fprintf (file, "\tsw\t$0, 4($sp)\n");
3084 fprintf (file, "\tbsr\t__mep_mcount\n");
3085 fprintf (file, "\tlw\t$0, 4($sp)\n");
3086 fprintf (file, "\tstc\t$0, $lp\n");
3087 fprintf (file, "\tlw\t$0, ($sp)\n");
3088 fprintf (file, "\tadd\t$sp, 8\n\n");
3089 }
3090
3091 const char *
mep_emit_bb_trace_ret(void)3092 mep_emit_bb_trace_ret (void)
3093 {
3094 fprintf (asm_out_file, "\t# end of block profiling\n");
3095 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3096 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3097 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3098 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3099 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3100 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3101 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3102 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3103 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3104 return "";
3105 }
3106
3107 #undef SAVE
3108 #undef RESTORE
3109
3110 /* Operand Printing. */
3111
3112 void
mep_print_operand_address(FILE * stream,rtx address)3113 mep_print_operand_address (FILE *stream, rtx address)
3114 {
3115 if (GET_CODE (address) == MEM)
3116 address = XEXP (address, 0);
3117 else
3118 /* cf: gcc.dg/asm-4.c. */
3119 gcc_assert (GET_CODE (address) == REG);
3120
3121 mep_print_operand (stream, address, 0);
3122 }
3123
3124 static struct
3125 {
3126 char code;
3127 const char *pattern;
3128 const char *format;
3129 }
3130 const conversions[] =
3131 {
3132 { 0, "r", "0" },
3133 { 0, "m+ri", "3(2)" },
3134 { 0, "mr", "(1)" },
3135 { 0, "ms", "(1)" },
3136 { 0, "ml", "(1)" },
3137 { 0, "mLrs", "%lo(3)(2)" },
3138 { 0, "mLr+si", "%lo(4+5)(2)" },
3139 { 0, "m+ru2s", "%tpoff(5)(2)" },
3140 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3141 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3142 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3143 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3144 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3145 { 0, "mi", "(1)" },
3146 { 0, "m+si", "(2+3)" },
3147 { 0, "m+li", "(2+3)" },
3148 { 0, "i", "0" },
3149 { 0, "s", "0" },
3150 { 0, "+si", "1+2" },
3151 { 0, "+u2si", "%tpoff(3+4)" },
3152 { 0, "+u3si", "%sdaoff(3+4)" },
3153 { 0, "l", "0" },
3154 { 'b', "i", "0" },
3155 { 'B', "i", "0" },
3156 { 'U', "i", "0" },
3157 { 'h', "i", "0" },
3158 { 'h', "Hs", "%hi(1)" },
3159 { 'I', "i", "0" },
3160 { 'I', "u2s", "%tpoff(2)" },
3161 { 'I', "u3s", "%sdaoff(2)" },
3162 { 'I', "+u2si", "%tpoff(3+4)" },
3163 { 'I', "+u3si", "%sdaoff(3+4)" },
3164 { 'J', "i", "0" },
3165 { 'P', "mr", "(1\\+),\\0" },
3166 { 'x', "i", "0" },
3167 { 0, 0, 0 }
3168 };
3169
3170 static int
unique_bit_in(HOST_WIDE_INT i)3171 unique_bit_in (HOST_WIDE_INT i)
3172 {
3173 switch (i & 0xff)
3174 {
3175 case 0x01: case 0xfe: return 0;
3176 case 0x02: case 0xfd: return 1;
3177 case 0x04: case 0xfb: return 2;
3178 case 0x08: case 0xf7: return 3;
3179 case 0x10: case 0x7f: return 4;
3180 case 0x20: case 0xbf: return 5;
3181 case 0x40: case 0xdf: return 6;
3182 case 0x80: case 0xef: return 7;
3183 default:
3184 gcc_unreachable ();
3185 }
3186 }
3187
3188 static int
bit_size_for_clip(HOST_WIDE_INT i)3189 bit_size_for_clip (HOST_WIDE_INT i)
3190 {
3191 int rv;
3192
3193 for (rv = 0; rv < 31; rv ++)
3194 if (((HOST_WIDE_INT) 1 << rv) > i)
3195 return rv + 1;
3196 gcc_unreachable ();
3197 }
3198
3199 /* Print an operand to a assembler instruction. */
3200
3201 void
mep_print_operand(FILE * file,rtx x,int code)3202 mep_print_operand (FILE *file, rtx x, int code)
3203 {
3204 int i, j;
3205 const char *real_name;
3206
3207 if (code == '<')
3208 {
3209 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3210 we're using, then skip over the "mep_" part of its name. */
3211 const struct cgen_insn *insn;
3212
3213 if (mep_get_move_insn (mep_cmov, &insn))
3214 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3215 else
3216 mep_intrinsic_unavailable (mep_cmov);
3217 return;
3218 }
3219 if (code == 'L')
3220 {
3221 switch (GET_CODE (x))
3222 {
3223 case AND:
3224 fputs ("clr", file);
3225 return;
3226 case IOR:
3227 fputs ("set", file);
3228 return;
3229 case XOR:
3230 fputs ("not", file);
3231 return;
3232 default:
3233 output_operand_lossage ("invalid %%L code");
3234 }
3235 }
3236 if (code == 'M')
3237 {
3238 /* Print the second operand of a CR <- CR move. If we're using
3239 a two-operand instruction (i.e., a real cmov), then just print
3240 the operand normally. If we're using a "reg, reg, immediate"
3241 instruction such as caddi3, print the operand followed by a
3242 zero field. If we're using a three-register instruction,
3243 print the operand twice. */
3244 const struct cgen_insn *insn;
3245
3246 mep_print_operand (file, x, 0);
3247 if (mep_get_move_insn (mep_cmov, &insn)
3248 && insn_data[insn->icode].n_operands == 3)
3249 {
3250 fputs (", ", file);
3251 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3252 mep_print_operand (file, x, 0);
3253 else
3254 mep_print_operand (file, const0_rtx, 0);
3255 }
3256 return;
3257 }
3258
3259 encode_pattern (x);
3260 for (i = 0; conversions[i].pattern; i++)
3261 if (conversions[i].code == code
3262 && strcmp(conversions[i].pattern, pattern) == 0)
3263 {
3264 for (j = 0; conversions[i].format[j]; j++)
3265 if (conversions[i].format[j] == '\\')
3266 {
3267 fputc (conversions[i].format[j+1], file);
3268 j++;
3269 }
3270 else if (ISDIGIT(conversions[i].format[j]))
3271 {
3272 rtx r = patternr[conversions[i].format[j] - '0'];
3273 switch (GET_CODE (r))
3274 {
3275 case REG:
3276 fprintf (file, "%s", reg_names [REGNO (r)]);
3277 break;
3278 case CONST_INT:
3279 switch (code)
3280 {
3281 case 'b':
3282 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3283 break;
3284 case 'B':
3285 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3286 break;
3287 case 'h':
3288 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3289 break;
3290 case 'U':
3291 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3292 break;
3293 case 'J':
3294 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3295 break;
3296 case 'x':
3297 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3298 && !(INTVAL (r) & 0xff))
3299 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3300 else
3301 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3302 break;
3303 case 'I':
3304 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3305 && conversions[i].format[j+1] == 0)
3306 {
3307 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3308 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3309 }
3310 else
3311 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3312 break;
3313 default:
3314 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3315 break;
3316 }
3317 break;
3318 case CONST_DOUBLE:
3319 fprintf(file, "[const_double 0x%lx]",
3320 (unsigned long) CONST_DOUBLE_HIGH(r));
3321 break;
3322 case SYMBOL_REF:
3323 real_name = targetm.strip_name_encoding (XSTR (r, 0));
3324 assemble_name (file, real_name);
3325 break;
3326 case LABEL_REF:
3327 output_asm_label (r);
3328 break;
3329 default:
3330 fprintf (stderr, "don't know how to print this operand:");
3331 debug_rtx (r);
3332 gcc_unreachable ();
3333 }
3334 }
3335 else
3336 {
3337 if (conversions[i].format[j] == '+'
3338 && (!code || code == 'I')
3339 && ISDIGIT (conversions[i].format[j+1])
3340 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3341 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3342 continue;
3343 fputc(conversions[i].format[j], file);
3344 }
3345 break;
3346 }
3347 if (!conversions[i].pattern)
3348 {
3349 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3350 debug_rtx(x);
3351 }
3352
3353 return;
3354 }
3355
3356 void
mep_final_prescan_insn(rtx_insn * insn,rtx * operands ATTRIBUTE_UNUSED,int noperands ATTRIBUTE_UNUSED)3357 mep_final_prescan_insn (rtx_insn *insn, rtx *operands ATTRIBUTE_UNUSED,
3358 int noperands ATTRIBUTE_UNUSED)
3359 {
3360 /* Despite the fact that MeP is perfectly capable of branching and
3361 doing something else in the same bundle, gcc does jump
3362 optimization *after* scheduling, so we cannot trust the bundling
3363 flags on jump instructions. */
3364 if (GET_MODE (insn) == BImode
3365 && get_attr_slots (insn) != SLOTS_CORE)
3366 fputc ('+', asm_out_file);
3367 }
3368
3369 /* Function args in registers. */
3370
3371 static void
mep_setup_incoming_varargs(cumulative_args_t cum,machine_mode mode ATTRIBUTE_UNUSED,tree type ATTRIBUTE_UNUSED,int * pretend_size,int second_time ATTRIBUTE_UNUSED)3372 mep_setup_incoming_varargs (cumulative_args_t cum,
3373 machine_mode mode ATTRIBUTE_UNUSED,
3374 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3375 int second_time ATTRIBUTE_UNUSED)
3376 {
3377 int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
3378
3379 if (nsave > 0)
3380 cfun->machine->arg_regs_to_save = nsave;
3381 *pretend_size = nsave * 4;
3382 }
3383
3384 static int
bytesize(const_tree type,machine_mode mode)3385 bytesize (const_tree type, machine_mode mode)
3386 {
3387 if (mode == BLKmode)
3388 return int_size_in_bytes (type);
3389 return GET_MODE_SIZE (mode);
3390 }
3391
3392 static rtx
mep_expand_builtin_saveregs(void)3393 mep_expand_builtin_saveregs (void)
3394 {
3395 int bufsize, i, ns;
3396 rtx regbuf;
3397
3398 ns = cfun->machine->arg_regs_to_save;
3399 if (TARGET_IVC2)
3400 {
3401 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3402 regbuf = assign_stack_local (SImode, bufsize, 64);
3403 }
3404 else
3405 {
3406 bufsize = ns * 4;
3407 regbuf = assign_stack_local (SImode, bufsize, 32);
3408 }
3409
3410 move_block_from_reg (5-ns, regbuf, ns);
3411
3412 if (TARGET_IVC2)
3413 {
3414 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3415 int ofs = 8 * ((ns+1)/2);
3416
3417 for (i=0; i<ns; i++)
3418 {
3419 int rn = (4-ns) + i + 49;
3420 rtx ptr;
3421
3422 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3423 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3424 ofs += 8;
3425 }
3426 }
3427 return XEXP (regbuf, 0);
3428 }
3429
3430 static tree
mep_build_builtin_va_list(void)3431 mep_build_builtin_va_list (void)
3432 {
3433 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3434 tree record;
3435
3436
3437 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3438
3439 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3440 get_identifier ("__va_next_gp"), ptr_type_node);
3441 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3442 get_identifier ("__va_next_gp_limit"),
3443 ptr_type_node);
3444 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3445 ptr_type_node);
3446 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3447 ptr_type_node);
3448
3449 DECL_FIELD_CONTEXT (f_next_gp) = record;
3450 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3451 DECL_FIELD_CONTEXT (f_next_cop) = record;
3452 DECL_FIELD_CONTEXT (f_next_stack) = record;
3453
3454 TYPE_FIELDS (record) = f_next_gp;
3455 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3456 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3457 DECL_CHAIN (f_next_cop) = f_next_stack;
3458
3459 layout_type (record);
3460
3461 return record;
3462 }
3463
3464 static void
mep_expand_va_start(tree valist,rtx nextarg)3465 mep_expand_va_start (tree valist, rtx nextarg)
3466 {
3467 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3468 tree next_gp, next_gp_limit, next_cop, next_stack;
3469 tree t, u;
3470 int ns;
3471
3472 ns = cfun->machine->arg_regs_to_save;
3473
3474 f_next_gp = TYPE_FIELDS (va_list_type_node);
3475 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3476 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3477 f_next_stack = DECL_CHAIN (f_next_cop);
3478
3479 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3480 NULL_TREE);
3481 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3482 valist, f_next_gp_limit, NULL_TREE);
3483 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3484 NULL_TREE);
3485 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3486 valist, f_next_stack, NULL_TREE);
3487
3488 /* va_list.next_gp = expand_builtin_saveregs (); */
3489 u = make_tree (sizetype, expand_builtin_saveregs ());
3490 u = fold_convert (ptr_type_node, u);
3491 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3492 TREE_SIDE_EFFECTS (t) = 1;
3493 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3494
3495 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3496 u = fold_build_pointer_plus_hwi (u, 4 * ns);
3497 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3498 TREE_SIDE_EFFECTS (t) = 1;
3499 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3500
3501 u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
3502 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3503 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3504 TREE_SIDE_EFFECTS (t) = 1;
3505 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3506
3507 /* va_list.next_stack = nextarg; */
3508 u = make_tree (ptr_type_node, nextarg);
3509 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3510 TREE_SIDE_EFFECTS (t) = 1;
3511 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3512 }
3513
3514 static tree
mep_gimplify_va_arg_expr(tree valist,tree type,gimple_seq * pre_p,gimple_seq * post_p ATTRIBUTE_UNUSED)3515 mep_gimplify_va_arg_expr (tree valist, tree type,
3516 gimple_seq *pre_p,
3517 gimple_seq *post_p ATTRIBUTE_UNUSED)
3518 {
3519 HOST_WIDE_INT size, rsize;
3520 bool by_reference, ivc2_vec;
3521 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3522 tree next_gp, next_gp_limit, next_cop, next_stack;
3523 tree label_sover, label_selse;
3524 tree tmp, res_addr;
3525
3526 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3527
3528 size = int_size_in_bytes (type);
3529 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3530
3531 if (by_reference)
3532 {
3533 type = build_pointer_type (type);
3534 size = 4;
3535 }
3536 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3537
3538 f_next_gp = TYPE_FIELDS (va_list_type_node);
3539 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3540 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3541 f_next_stack = DECL_CHAIN (f_next_cop);
3542
3543 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3544 NULL_TREE);
3545 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3546 valist, f_next_gp_limit, NULL_TREE);
3547 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3548 NULL_TREE);
3549 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3550 valist, f_next_stack, NULL_TREE);
3551
3552 /* if f_next_gp < f_next_gp_limit
3553 IF (VECTOR_P && IVC2)
3554 val = *f_next_cop;
3555 ELSE
3556 val = *f_next_gp;
3557 f_next_gp += 4;
3558 f_next_cop += 8;
3559 else
3560 label_selse:
3561 val = *f_next_stack;
3562 f_next_stack += rsize;
3563 label_sover:
3564 */
3565
3566 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3567 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3568 res_addr = create_tmp_var (ptr_type_node);
3569
3570 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3571 unshare_expr (next_gp_limit));
3572 tmp = build3 (COND_EXPR, void_type_node, tmp,
3573 build1 (GOTO_EXPR, void_type_node,
3574 unshare_expr (label_selse)),
3575 NULL_TREE);
3576 gimplify_and_add (tmp, pre_p);
3577
3578 if (ivc2_vec)
3579 {
3580 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3581 gimplify_and_add (tmp, pre_p);
3582 }
3583 else
3584 {
3585 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3586 gimplify_and_add (tmp, pre_p);
3587 }
3588
3589 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
3590 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3591
3592 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
3593 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3594
3595 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3596 gimplify_and_add (tmp, pre_p);
3597
3598 /* - - */
3599
3600 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3601 gimplify_and_add (tmp, pre_p);
3602
3603 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3604 gimplify_and_add (tmp, pre_p);
3605
3606 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
3607 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3608
3609 /* - - */
3610
3611 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3612 gimplify_and_add (tmp, pre_p);
3613
3614 res_addr = fold_convert (build_pointer_type (type), res_addr);
3615
3616 if (by_reference)
3617 res_addr = build_va_arg_indirect_ref (res_addr);
3618
3619 return build_va_arg_indirect_ref (res_addr);
3620 }
3621
3622 void
mep_init_cumulative_args(CUMULATIVE_ARGS * pcum,tree fntype,rtx libname ATTRIBUTE_UNUSED,tree fndecl ATTRIBUTE_UNUSED)3623 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3624 rtx libname ATTRIBUTE_UNUSED,
3625 tree fndecl ATTRIBUTE_UNUSED)
3626 {
3627 pcum->nregs = 0;
3628
3629 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3630 pcum->vliw = 1;
3631 else
3632 pcum->vliw = 0;
3633 }
3634
3635 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3636 larger than 4 bytes are passed indirectly. Return value in 0,
3637 unless bigger than 4 bytes, then the caller passes a pointer as the
3638 first arg. For varargs, we copy $1..$4 to the stack. */
3639
3640 static rtx
mep_function_arg(cumulative_args_t cum_v,machine_mode mode,const_tree type ATTRIBUTE_UNUSED,bool named ATTRIBUTE_UNUSED)3641 mep_function_arg (cumulative_args_t cum_v, machine_mode mode,
3642 const_tree type ATTRIBUTE_UNUSED,
3643 bool named ATTRIBUTE_UNUSED)
3644 {
3645 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3646
3647 /* VOIDmode is a signal for the backend to pass data to the call
3648 expander via the second operand to the call pattern. We use
3649 this to determine whether to use "jsr" or "jsrv". */
3650 if (mode == VOIDmode)
3651 return GEN_INT (cum->vliw);
3652
3653 /* If we havn't run out of argument registers, return the next. */
3654 if (cum->nregs < 4)
3655 {
3656 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3657 return gen_rtx_REG (mode, cum->nregs + 49);
3658 else
3659 return gen_rtx_REG (mode, cum->nregs + 1);
3660 }
3661
3662 /* Otherwise the argument goes on the stack. */
3663 return NULL_RTX;
3664 }
3665
3666 static bool
mep_pass_by_reference(cumulative_args_t cum ATTRIBUTE_UNUSED,machine_mode mode,const_tree type,bool named ATTRIBUTE_UNUSED)3667 mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
3668 machine_mode mode,
3669 const_tree type,
3670 bool named ATTRIBUTE_UNUSED)
3671 {
3672 int size = bytesize (type, mode);
3673
3674 /* This is non-obvious, but yes, large values passed after we've run
3675 out of registers are *still* passed by reference - we put the
3676 address of the parameter on the stack, as well as putting the
3677 parameter itself elsewhere on the stack. */
3678
3679 if (size <= 0 || size > 8)
3680 return true;
3681 if (size <= 4)
3682 return false;
3683 if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3684 && type != NULL_TREE && VECTOR_TYPE_P (type))
3685 return false;
3686 return true;
3687 }
3688
3689 static void
mep_function_arg_advance(cumulative_args_t pcum,machine_mode mode ATTRIBUTE_UNUSED,const_tree type ATTRIBUTE_UNUSED,bool named ATTRIBUTE_UNUSED)3690 mep_function_arg_advance (cumulative_args_t pcum,
3691 machine_mode mode ATTRIBUTE_UNUSED,
3692 const_tree type ATTRIBUTE_UNUSED,
3693 bool named ATTRIBUTE_UNUSED)
3694 {
3695 get_cumulative_args (pcum)->nregs += 1;
3696 }
3697
3698 bool
mep_return_in_memory(const_tree type,const_tree decl ATTRIBUTE_UNUSED)3699 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3700 {
3701 int size = bytesize (type, BLKmode);
3702 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3703 return size > 0 && size <= 8 ? 0 : 1;
3704 return size > 0 && size <= 4 ? 0 : 1;
3705 }
3706
3707 static bool
mep_narrow_volatile_bitfield(void)3708 mep_narrow_volatile_bitfield (void)
3709 {
3710 return true;
3711 return false;
3712 }
3713
3714 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3715
3716 rtx
mep_function_value(const_tree type,const_tree func ATTRIBUTE_UNUSED)3717 mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
3718 {
3719 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3720 return gen_rtx_REG (TYPE_MODE (type), 48);
3721 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3722 }
3723
3724 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3725
3726 rtx
mep_libcall_value(machine_mode mode)3727 mep_libcall_value (machine_mode mode)
3728 {
3729 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3730 }
3731
3732 /* Handle pipeline hazards. */
3733
3734 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3735 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3736
3737 static int prev_opcode = 0;
3738
3739 /* This isn't as optimal as it could be, because we don't know what
3740 control register the STC opcode is storing in. We only need to add
3741 the nop if it's the relevant register, but we add it for irrelevant
3742 registers also. */
3743
3744 void
mep_asm_output_opcode(FILE * file,const char * ptr)3745 mep_asm_output_opcode (FILE *file, const char *ptr)
3746 {
3747 int this_opcode = op_none;
3748 const char *hazard = 0;
3749
3750 switch (*ptr)
3751 {
3752 case 'f':
3753 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3754 this_opcode = op_fsft;
3755 break;
3756 case 'r':
3757 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3758 this_opcode = op_ret;
3759 break;
3760 case 's':
3761 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3762 this_opcode = op_stc;
3763 break;
3764 }
3765
3766 if (prev_opcode == op_stc && this_opcode == op_fsft)
3767 hazard = "nop";
3768 if (prev_opcode == op_stc && this_opcode == op_ret)
3769 hazard = "nop";
3770
3771 if (hazard)
3772 fprintf(file, "%s\t# %s-%s hazard\n\t",
3773 hazard, opnames[prev_opcode], opnames[this_opcode]);
3774
3775 prev_opcode = this_opcode;
3776 }
3777
3778 /* Handle attributes. */
3779
3780 static tree
mep_validate_based_tiny(tree * node,tree name,tree args,int flags ATTRIBUTE_UNUSED,bool * no_add)3781 mep_validate_based_tiny (tree *node, tree name, tree args,
3782 int flags ATTRIBUTE_UNUSED, bool *no_add)
3783 {
3784 if (TREE_CODE (*node) != VAR_DECL
3785 && TREE_CODE (*node) != POINTER_TYPE
3786 && TREE_CODE (*node) != TYPE_DECL)
3787 {
3788 warning (0, "%qE attribute only applies to variables", name);
3789 *no_add = true;
3790 }
3791 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3792 {
3793 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3794 {
3795 warning (0, "address region attributes not allowed with auto storage class");
3796 *no_add = true;
3797 }
3798 /* Ignore storage attribute of pointed to variable: char __far * x; */
3799 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3800 {
3801 warning (0, "address region attributes on pointed-to types ignored");
3802 *no_add = true;
3803 }
3804 }
3805
3806 return NULL_TREE;
3807 }
3808
3809 static int
mep_multiple_address_regions(tree list,bool check_section_attr)3810 mep_multiple_address_regions (tree list, bool check_section_attr)
3811 {
3812 tree a;
3813 int count_sections = 0;
3814 int section_attr_count = 0;
3815
3816 for (a = list; a; a = TREE_CHAIN (a))
3817 {
3818 if (is_attribute_p ("based", TREE_PURPOSE (a))
3819 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3820 || is_attribute_p ("near", TREE_PURPOSE (a))
3821 || is_attribute_p ("far", TREE_PURPOSE (a))
3822 || is_attribute_p ("io", TREE_PURPOSE (a)))
3823 count_sections ++;
3824 if (check_section_attr)
3825 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3826 }
3827
3828 if (check_section_attr)
3829 return section_attr_count;
3830 else
3831 return count_sections;
3832 }
3833
3834 #define MEP_ATTRIBUTES(decl) \
3835 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3836 : DECL_ATTRIBUTES (decl) \
3837 ? (DECL_ATTRIBUTES (decl)) \
3838 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3839
3840 static tree
mep_validate_near_far(tree * node,tree name,tree args,int flags ATTRIBUTE_UNUSED,bool * no_add)3841 mep_validate_near_far (tree *node, tree name, tree args,
3842 int flags ATTRIBUTE_UNUSED, bool *no_add)
3843 {
3844 if (TREE_CODE (*node) != VAR_DECL
3845 && TREE_CODE (*node) != FUNCTION_DECL
3846 && TREE_CODE (*node) != METHOD_TYPE
3847 && TREE_CODE (*node) != POINTER_TYPE
3848 && TREE_CODE (*node) != TYPE_DECL)
3849 {
3850 warning (0, "%qE attribute only applies to variables and functions",
3851 name);
3852 *no_add = true;
3853 }
3854 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3855 {
3856 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3857 {
3858 warning (0, "address region attributes not allowed with auto storage class");
3859 *no_add = true;
3860 }
3861 /* Ignore storage attribute of pointed to variable: char __far * x; */
3862 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3863 {
3864 warning (0, "address region attributes on pointed-to types ignored");
3865 *no_add = true;
3866 }
3867 }
3868 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3869 {
3870 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3871 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3872 DECL_ATTRIBUTES (*node) = NULL_TREE;
3873 }
3874 return NULL_TREE;
3875 }
3876
3877 static tree
mep_validate_disinterrupt(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add)3878 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3879 int flags ATTRIBUTE_UNUSED, bool *no_add)
3880 {
3881 if (TREE_CODE (*node) != FUNCTION_DECL
3882 && TREE_CODE (*node) != METHOD_TYPE)
3883 {
3884 warning (0, "%qE attribute only applies to functions", name);
3885 *no_add = true;
3886 }
3887 return NULL_TREE;
3888 }
3889
3890 static tree
mep_validate_interrupt(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add)3891 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3892 int flags ATTRIBUTE_UNUSED, bool *no_add)
3893 {
3894 tree function_type;
3895
3896 if (TREE_CODE (*node) != FUNCTION_DECL)
3897 {
3898 warning (0, "%qE attribute only applies to functions", name);
3899 *no_add = true;
3900 return NULL_TREE;
3901 }
3902
3903 if (DECL_DECLARED_INLINE_P (*node))
3904 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3905 DECL_UNINLINABLE (*node) = 1;
3906
3907 function_type = TREE_TYPE (*node);
3908
3909 if (TREE_TYPE (function_type) != void_type_node)
3910 error ("interrupt function must have return type of void");
3911
3912 if (prototype_p (function_type)
3913 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
3914 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
3915 error ("interrupt function must have no arguments");
3916
3917 return NULL_TREE;
3918 }
3919
3920 static tree
mep_validate_io_cb(tree * node,tree name,tree args,int flags ATTRIBUTE_UNUSED,bool * no_add)3921 mep_validate_io_cb (tree *node, tree name, tree args,
3922 int flags ATTRIBUTE_UNUSED, bool *no_add)
3923 {
3924 if (TREE_CODE (*node) != VAR_DECL)
3925 {
3926 warning (0, "%qE attribute only applies to variables", name);
3927 *no_add = true;
3928 }
3929
3930 if (args != NULL_TREE)
3931 {
3932 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
3933 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
3934 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
3935 {
3936 warning (0, "%qE attribute allows only an integer constant argument",
3937 name);
3938 *no_add = true;
3939 }
3940 }
3941
3942 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
3943 TREE_THIS_VOLATILE (*node) = 1;
3944
3945 return NULL_TREE;
3946 }
3947
3948 static tree
mep_validate_vliw(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add)3949 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3950 int flags ATTRIBUTE_UNUSED, bool *no_add)
3951 {
3952 if (TREE_CODE (*node) != FUNCTION_TYPE
3953 && TREE_CODE (*node) != FUNCTION_DECL
3954 && TREE_CODE (*node) != METHOD_TYPE
3955 && TREE_CODE (*node) != FIELD_DECL
3956 && TREE_CODE (*node) != TYPE_DECL)
3957 {
3958 static int gave_pointer_note = 0;
3959 static int gave_array_note = 0;
3960 static const char * given_type = NULL;
3961
3962 given_type = get_tree_code_name (TREE_CODE (*node));
3963 if (TREE_CODE (*node) == POINTER_TYPE)
3964 given_type = "pointers";
3965 if (TREE_CODE (*node) == ARRAY_TYPE)
3966 given_type = "arrays";
3967
3968 if (given_type)
3969 warning (0, "%qE attribute only applies to functions, not %s",
3970 name, given_type);
3971 else
3972 warning (0, "%qE attribute only applies to functions",
3973 name);
3974 *no_add = true;
3975
3976 if (TREE_CODE (*node) == POINTER_TYPE
3977 && !gave_pointer_note)
3978 {
3979 inform (input_location,
3980 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
3981 " typedef int (__vliw *vfuncptr) ();");
3982 gave_pointer_note = 1;
3983 }
3984
3985 if (TREE_CODE (*node) == ARRAY_TYPE
3986 && !gave_array_note)
3987 {
3988 inform (input_location,
3989 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
3990 " typedef int (__vliw *vfuncptr[]) ();");
3991 gave_array_note = 1;
3992 }
3993 }
3994 if (!TARGET_VLIW)
3995 error ("VLIW functions are not allowed without a VLIW configuration");
3996 return NULL_TREE;
3997 }
3998
3999 static const struct attribute_spec mep_attribute_table[11] =
4000 {
4001 /* name min max decl type func handler
4002 affects_type_identity */
4003 { "based", 0, 0, false, false, false, mep_validate_based_tiny, false },
4004 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny, false },
4005 { "near", 0, 0, false, false, false, mep_validate_near_far, false },
4006 { "far", 0, 0, false, false, false, mep_validate_near_far, false },
4007 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4008 false },
4009 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt, false },
4010 { "io", 0, 1, false, false, false, mep_validate_io_cb, false },
4011 { "cb", 0, 1, false, false, false, mep_validate_io_cb, false },
4012 { "vliw", 0, 0, false, true, false, mep_validate_vliw, false },
4013 { NULL, 0, 0, false, false, false, NULL, false }
4014 };
4015
4016 static bool
mep_function_attribute_inlinable_p(const_tree callee)4017 mep_function_attribute_inlinable_p (const_tree callee)
4018 {
4019 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4020 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4021 return (lookup_attribute ("disinterrupt", attrs) == 0
4022 && lookup_attribute ("interrupt", attrs) == 0);
4023 }
4024
4025 static bool
mep_can_inline_p(tree caller,tree callee)4026 mep_can_inline_p (tree caller, tree callee)
4027 {
4028 if (TREE_CODE (callee) == ADDR_EXPR)
4029 callee = TREE_OPERAND (callee, 0);
4030
4031 if (!mep_vliw_function_p (caller)
4032 && mep_vliw_function_p (callee))
4033 {
4034 return false;
4035 }
4036 return true;
4037 }
4038
4039 #define FUNC_CALL 1
4040 #define FUNC_DISINTERRUPT 2
4041
4042
4043 struct GTY(()) pragma_entry {
4044 int used;
4045 int flag;
4046 };
4047
4048 /* Hash table of farcall-tagged sections. */
4049 static GTY(()) hash_map<nofree_string_hash, pragma_entry> *pragma_htab;
4050
4051 static void
mep_note_pragma_flag(const char * funcname,int flag)4052 mep_note_pragma_flag (const char *funcname, int flag)
4053 {
4054 if (!pragma_htab)
4055 pragma_htab = hash_map<nofree_string_hash, pragma_entry>::create_ggc (31);
4056
4057 bool existed;
4058 const char *name = ggc_strdup (funcname);
4059 pragma_entry *slot = &pragma_htab->get_or_insert (name, &existed);
4060 if (!existed)
4061 {
4062 slot->flag = 0;
4063 slot->used = 0;
4064 }
4065 slot->flag |= flag;
4066 }
4067
4068 static bool
mep_lookup_pragma_flag(const char * funcname,int flag)4069 mep_lookup_pragma_flag (const char *funcname, int flag)
4070 {
4071 if (!pragma_htab)
4072 return false;
4073
4074 if (funcname[0] == '@' && funcname[2] == '.')
4075 funcname += 3;
4076
4077 pragma_entry *slot = pragma_htab->get (funcname);
4078 if (slot && (slot->flag & flag))
4079 {
4080 slot->used |= flag;
4081 return true;
4082 }
4083 return false;
4084 }
4085
4086 bool
mep_lookup_pragma_call(const char * funcname)4087 mep_lookup_pragma_call (const char *funcname)
4088 {
4089 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4090 }
4091
4092 void
mep_note_pragma_call(const char * funcname)4093 mep_note_pragma_call (const char *funcname)
4094 {
4095 mep_note_pragma_flag (funcname, FUNC_CALL);
4096 }
4097
4098 bool
mep_lookup_pragma_disinterrupt(const char * funcname)4099 mep_lookup_pragma_disinterrupt (const char *funcname)
4100 {
4101 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4102 }
4103
4104 void
mep_note_pragma_disinterrupt(const char * funcname)4105 mep_note_pragma_disinterrupt (const char *funcname)
4106 {
4107 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4108 }
4109
4110 bool
note_unused_pragma_disinterrupt(const char * const & s,const pragma_entry & e,void *)4111 note_unused_pragma_disinterrupt (const char *const &s, const pragma_entry &e,
4112 void *)
4113 {
4114 if ((e.flag & FUNC_DISINTERRUPT)
4115 && !(e.used & FUNC_DISINTERRUPT))
4116 warning (0, "\"#pragma disinterrupt %s\" not used", s);
4117 return 1;
4118 }
4119
4120 void
mep_file_cleanups(void)4121 mep_file_cleanups (void)
4122 {
4123 if (pragma_htab)
4124 pragma_htab->traverse<void *, note_unused_pragma_disinterrupt> (NULL);
4125 }
4126
4127 /* These three functions provide a bridge between the pramgas that
4128 affect register classes, and the functions that maintain them. We
4129 can't call those functions directly as pragma handling is part of
4130 the front end and doesn't have direct access to them. */
4131
4132 void
mep_save_register_info(void)4133 mep_save_register_info (void)
4134 {
4135 save_register_info ();
4136 }
4137
4138 void
mep_reinit_regs(void)4139 mep_reinit_regs (void)
4140 {
4141 reinit_regs ();
4142 }
4143
4144 void
mep_init_regs(void)4145 mep_init_regs (void)
4146 {
4147 init_regs ();
4148 }
4149
4150
4151
4152 static int
mep_attrlist_to_encoding(tree list,tree decl)4153 mep_attrlist_to_encoding (tree list, tree decl)
4154 {
4155 if (mep_multiple_address_regions (list, false) > 1)
4156 {
4157 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4158 TREE_PURPOSE (TREE_CHAIN (list)),
4159 DECL_NAME (decl),
4160 DECL_SOURCE_LINE (decl));
4161 TREE_CHAIN (list) = NULL_TREE;
4162 }
4163
4164 while (list)
4165 {
4166 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4167 return 'b';
4168 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4169 return 't';
4170 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4171 return 'n';
4172 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4173 return 'f';
4174 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4175 {
4176 if (TREE_VALUE (list)
4177 && TREE_VALUE (TREE_VALUE (list))
4178 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4179 {
4180 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4181 if (location >= 0
4182 && location <= 0x1000000)
4183 return 'i';
4184 }
4185 return 'I';
4186 }
4187 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4188 return 'c';
4189 list = TREE_CHAIN (list);
4190 }
4191 if (TARGET_TF
4192 && TREE_CODE (decl) == FUNCTION_DECL
4193 && DECL_SECTION_NAME (decl) == 0)
4194 return 'f';
4195 return 0;
4196 }
4197
4198 static int
mep_comp_type_attributes(const_tree t1,const_tree t2)4199 mep_comp_type_attributes (const_tree t1, const_tree t2)
4200 {
4201 int vliw1, vliw2;
4202
4203 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4204 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4205
4206 if (vliw1 != vliw2)
4207 return 0;
4208
4209 return 1;
4210 }
4211
4212 static void
mep_insert_attributes(tree decl,tree * attributes)4213 mep_insert_attributes (tree decl, tree *attributes)
4214 {
4215 int size;
4216 const char *secname = 0;
4217 tree attrib, attrlist;
4218 char encoding;
4219
4220 if (TREE_CODE (decl) == FUNCTION_DECL)
4221 {
4222 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4223
4224 if (mep_lookup_pragma_disinterrupt (funcname))
4225 {
4226 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4227 *attributes = chainon (*attributes, attrib);
4228 }
4229 }
4230
4231 if (TREE_CODE (decl) != VAR_DECL
4232 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4233 return;
4234
4235 if (TREE_READONLY (decl) && TARGET_DC)
4236 /* -mdc means that const variables default to the near section,
4237 regardless of the size cutoff. */
4238 return;
4239
4240 /* User specified an attribute, so override the default.
4241 Ignore storage attribute of pointed to variable. char __far * x; */
4242 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4243 {
4244 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4245 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4246 else if (DECL_ATTRIBUTES (decl) && *attributes)
4247 DECL_ATTRIBUTES (decl) = NULL_TREE;
4248 }
4249
4250 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4251 encoding = mep_attrlist_to_encoding (attrlist, decl);
4252 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4253 {
4254 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4255 encoding = mep_attrlist_to_encoding (attrlist, decl);
4256 }
4257 if (encoding)
4258 {
4259 /* This means that the declaration has a specific section
4260 attribute, so we should not apply the default rules. */
4261
4262 if (encoding == 'i' || encoding == 'I')
4263 {
4264 tree attr = lookup_attribute ("io", attrlist);
4265 if (attr
4266 && TREE_VALUE (attr)
4267 && TREE_VALUE (TREE_VALUE(attr)))
4268 {
4269 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4270 static tree previous_value = 0;
4271 static int previous_location = 0;
4272 static tree previous_name = 0;
4273
4274 /* We take advantage of the fact that gcc will reuse the
4275 same tree pointer when applying an attribute to a
4276 list of decls, but produce a new tree for attributes
4277 on separate source lines, even when they're textually
4278 identical. This is the behavior we want. */
4279 if (TREE_VALUE (attr) == previous_value
4280 && location == previous_location)
4281 {
4282 warning(0, "__io address 0x%x is the same for %qE and %qE",
4283 location, previous_name, DECL_NAME (decl));
4284 }
4285 previous_name = DECL_NAME (decl);
4286 previous_location = location;
4287 previous_value = TREE_VALUE (attr);
4288 }
4289 }
4290 return;
4291 }
4292
4293
4294 /* Declarations of arrays can change size. Don't trust them. */
4295 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4296 size = 0;
4297 else
4298 size = int_size_in_bytes (TREE_TYPE (decl));
4299
4300 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4301 {
4302 if (TREE_PUBLIC (decl)
4303 || DECL_EXTERNAL (decl)
4304 || TREE_STATIC (decl))
4305 {
4306 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4307 int key = 0;
4308
4309 while (*name)
4310 key += *name++;
4311
4312 switch (key & 3)
4313 {
4314 case 0:
4315 secname = "based";
4316 break;
4317 case 1:
4318 secname = "tiny";
4319 break;
4320 case 2:
4321 secname = "far";
4322 break;
4323 default:
4324 ;
4325 }
4326 }
4327 }
4328 else
4329 {
4330 if (size <= mep_based_cutoff && size > 0)
4331 secname = "based";
4332 else if (size <= mep_tiny_cutoff && size > 0)
4333 secname = "tiny";
4334 else if (TARGET_L)
4335 secname = "far";
4336 }
4337
4338 if (mep_const_section && TREE_READONLY (decl))
4339 {
4340 if (strcmp (mep_const_section, "tiny") == 0)
4341 secname = "tiny";
4342 else if (strcmp (mep_const_section, "near") == 0)
4343 return;
4344 else if (strcmp (mep_const_section, "far") == 0)
4345 secname = "far";
4346 }
4347
4348 if (!secname)
4349 return;
4350
4351 if (!mep_multiple_address_regions (*attributes, true)
4352 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4353 {
4354 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4355
4356 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4357 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4358 and mep_validate_based_tiny. */
4359 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4360 }
4361 }
4362
4363 static void
mep_encode_section_info(tree decl,rtx rtl,int first)4364 mep_encode_section_info (tree decl, rtx rtl, int first)
4365 {
4366 rtx rtlname;
4367 const char *oldname;
4368 const char *secname;
4369 char encoding;
4370 char *newname;
4371 tree idp;
4372 int maxsize;
4373 tree type;
4374 tree mep_attributes;
4375
4376 if (! first)
4377 return;
4378
4379 if (TREE_CODE (decl) != VAR_DECL
4380 && TREE_CODE (decl) != FUNCTION_DECL)
4381 return;
4382
4383 rtlname = XEXP (rtl, 0);
4384 if (GET_CODE (rtlname) == SYMBOL_REF)
4385 oldname = XSTR (rtlname, 0);
4386 else if (GET_CODE (rtlname) == MEM
4387 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4388 oldname = XSTR (XEXP (rtlname, 0), 0);
4389 else
4390 gcc_unreachable ();
4391
4392 type = TREE_TYPE (decl);
4393 if (type == error_mark_node)
4394 return;
4395 mep_attributes = MEP_ATTRIBUTES (decl);
4396
4397 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4398
4399 if (encoding)
4400 {
4401 newname = (char *) alloca (strlen (oldname) + 4);
4402 sprintf (newname, "@%c.%s", encoding, oldname);
4403 idp = get_identifier (newname);
4404 XEXP (rtl, 0) =
4405 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4406 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4407 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4408
4409 switch (encoding)
4410 {
4411 case 'b':
4412 maxsize = 128;
4413 secname = "based";
4414 break;
4415 case 't':
4416 maxsize = 65536;
4417 secname = "tiny";
4418 break;
4419 case 'n':
4420 maxsize = 0x1000000;
4421 secname = "near";
4422 break;
4423 default:
4424 maxsize = 0;
4425 secname = 0;
4426 break;
4427 }
4428 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4429 {
4430 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4431 oldname,
4432 (long) int_size_in_bytes (TREE_TYPE (decl)),
4433 secname,
4434 maxsize);
4435 }
4436 }
4437 }
4438
4439 const char *
mep_strip_name_encoding(const char * sym)4440 mep_strip_name_encoding (const char *sym)
4441 {
4442 while (1)
4443 {
4444 if (*sym == '*')
4445 sym++;
4446 else if (*sym == '@' && sym[2] == '.')
4447 sym += 3;
4448 else
4449 return sym;
4450 }
4451 }
4452
4453 static section *
mep_select_section(tree decl,int reloc ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)4454 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4455 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4456 {
4457 int readonly = 1;
4458 int encoding;
4459
4460 switch (TREE_CODE (decl))
4461 {
4462 case VAR_DECL:
4463 if (!TREE_READONLY (decl)
4464 || TREE_SIDE_EFFECTS (decl)
4465 || !DECL_INITIAL (decl)
4466 || (DECL_INITIAL (decl) != error_mark_node
4467 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4468 readonly = 0;
4469 break;
4470 case CONSTRUCTOR:
4471 if (! TREE_CONSTANT (decl))
4472 readonly = 0;
4473 break;
4474
4475 default:
4476 break;
4477 }
4478
4479 if (TREE_CODE (decl) == FUNCTION_DECL)
4480 {
4481 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4482
4483 if (name[0] == '@' && name[2] == '.')
4484 encoding = name[1];
4485 else
4486 encoding = 0;
4487
4488 if (flag_function_sections || DECL_COMDAT_GROUP (decl))
4489 mep_unique_section (decl, 0);
4490 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4491 {
4492 if (encoding == 'f')
4493 return vftext_section;
4494 else
4495 return vtext_section;
4496 }
4497 else if (encoding == 'f')
4498 return ftext_section;
4499 else
4500 return text_section;
4501 }
4502
4503 if (TREE_CODE (decl) == VAR_DECL)
4504 {
4505 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4506
4507 if (name[0] == '@' && name[2] == '.')
4508 switch (name[1])
4509 {
4510 case 'b':
4511 return based_section;
4512
4513 case 't':
4514 if (readonly)
4515 return srodata_section;
4516 if (DECL_INITIAL (decl))
4517 return sdata_section;
4518 return tinybss_section;
4519
4520 case 'f':
4521 if (readonly)
4522 return frodata_section;
4523 return far_section;
4524
4525 case 'i':
4526 case 'I':
4527 error_at (DECL_SOURCE_LOCATION (decl),
4528 "variable %D of type %<io%> must be uninitialized", decl);
4529 return data_section;
4530
4531 case 'c':
4532 error_at (DECL_SOURCE_LOCATION (decl),
4533 "variable %D of type %<cb%> must be uninitialized", decl);
4534 return data_section;
4535 }
4536 }
4537
4538 if (readonly)
4539 return readonly_data_section;
4540
4541 return data_section;
4542 }
4543
4544 static void
mep_unique_section(tree decl,int reloc)4545 mep_unique_section (tree decl, int reloc)
4546 {
4547 static const char *prefixes[][2] =
4548 {
4549 { ".text.", ".gnu.linkonce.t." },
4550 { ".rodata.", ".gnu.linkonce.r." },
4551 { ".data.", ".gnu.linkonce.d." },
4552 { ".based.", ".gnu.linkonce.based." },
4553 { ".sdata.", ".gnu.linkonce.s." },
4554 { ".far.", ".gnu.linkonce.far." },
4555 { ".ftext.", ".gnu.linkonce.ft." },
4556 { ".frodata.", ".gnu.linkonce.frd." },
4557 { ".srodata.", ".gnu.linkonce.srd." },
4558 { ".vtext.", ".gnu.linkonce.v." },
4559 { ".vftext.", ".gnu.linkonce.vf." }
4560 };
4561 int sec = 2; /* .data */
4562 int len;
4563 const char *name, *prefix;
4564 char *string;
4565
4566 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4567 if (DECL_RTL (decl))
4568 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4569
4570 if (TREE_CODE (decl) == FUNCTION_DECL)
4571 {
4572 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4573 sec = 9; /* .vtext */
4574 else
4575 sec = 0; /* .text */
4576 }
4577 else if (decl_readonly_section (decl, reloc))
4578 sec = 1; /* .rodata */
4579
4580 if (name[0] == '@' && name[2] == '.')
4581 {
4582 switch (name[1])
4583 {
4584 case 'b':
4585 sec = 3; /* .based */
4586 break;
4587 case 't':
4588 if (sec == 1)
4589 sec = 8; /* .srodata */
4590 else
4591 sec = 4; /* .sdata */
4592 break;
4593 case 'f':
4594 if (sec == 0)
4595 sec = 6; /* .ftext */
4596 else if (sec == 9)
4597 sec = 10; /* .vftext */
4598 else if (sec == 1)
4599 sec = 7; /* .frodata */
4600 else
4601 sec = 5; /* .far. */
4602 break;
4603 }
4604 name += 3;
4605 }
4606
4607 prefix = prefixes[sec][DECL_COMDAT_GROUP(decl) != NULL];
4608 len = strlen (name) + strlen (prefix);
4609 string = (char *) alloca (len + 1);
4610
4611 sprintf (string, "%s%s", prefix, name);
4612
4613 set_decl_section_name (decl, string);
4614 }
4615
4616 /* Given a decl, a section name, and whether the decl initializer
4617 has relocs, choose attributes for the section. */
4618
4619 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4620
4621 static unsigned int
mep_section_type_flags(tree decl,const char * name,int reloc)4622 mep_section_type_flags (tree decl, const char *name, int reloc)
4623 {
4624 unsigned int flags = default_section_type_flags (decl, name, reloc);
4625
4626 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4627 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4628 flags |= SECTION_MEP_VLIW;
4629
4630 return flags;
4631 }
4632
4633 /* Switch to an arbitrary section NAME with attributes as specified
4634 by FLAGS. ALIGN specifies any known alignment requirements for
4635 the section; 0 if the default should be used.
4636
4637 Differs from the standard ELF version only in support of VLIW mode. */
4638
4639 static void
mep_asm_named_section(const char * name,unsigned int flags,tree decl ATTRIBUTE_UNUSED)4640 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4641 {
4642 char flagchars[8], *f = flagchars;
4643 const char *type;
4644
4645 if (!(flags & SECTION_DEBUG))
4646 *f++ = 'a';
4647 if (flags & SECTION_WRITE)
4648 *f++ = 'w';
4649 if (flags & SECTION_CODE)
4650 *f++ = 'x';
4651 if (flags & SECTION_SMALL)
4652 *f++ = 's';
4653 if (flags & SECTION_MEP_VLIW)
4654 *f++ = 'v';
4655 *f = '\0';
4656
4657 if (flags & SECTION_BSS)
4658 type = "nobits";
4659 else
4660 type = "progbits";
4661
4662 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4663 name, flagchars, type);
4664
4665 if (flags & SECTION_CODE)
4666 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4667 asm_out_file);
4668 }
4669
4670 void
mep_output_aligned_common(FILE * stream,tree decl,const char * name,int size,int align,int global)4671 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4672 int size, int align, int global)
4673 {
4674 /* We intentionally don't use mep_section_tag() here. */
4675 if (name[0] == '@'
4676 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4677 && name[2] == '.')
4678 {
4679 int location = -1;
4680 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4681 DECL_ATTRIBUTES (decl));
4682 if (attr
4683 && TREE_VALUE (attr)
4684 && TREE_VALUE (TREE_VALUE(attr)))
4685 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4686 if (location == -1)
4687 return;
4688 if (global)
4689 {
4690 fprintf (stream, "\t.globl\t");
4691 assemble_name (stream, name);
4692 fprintf (stream, "\n");
4693 }
4694 assemble_name (stream, name);
4695 fprintf (stream, " = %d\n", location);
4696 return;
4697 }
4698 if (name[0] == '@' && name[2] == '.')
4699 {
4700 const char *sec = 0;
4701 switch (name[1])
4702 {
4703 case 'b':
4704 switch_to_section (based_section);
4705 sec = ".based";
4706 break;
4707 case 't':
4708 switch_to_section (tinybss_section);
4709 sec = ".sbss";
4710 break;
4711 case 'f':
4712 switch_to_section (farbss_section);
4713 sec = ".farbss";
4714 break;
4715 }
4716 if (sec)
4717 {
4718 const char *name2;
4719 int p2align = 0;
4720
4721 while (align > BITS_PER_UNIT)
4722 {
4723 align /= 2;
4724 p2align ++;
4725 }
4726 name2 = targetm.strip_name_encoding (name);
4727 if (global)
4728 fprintf (stream, "\t.globl\t%s\n", name2);
4729 fprintf (stream, "\t.p2align %d\n", p2align);
4730 fprintf (stream, "\t.type\t%s,@object\n", name2);
4731 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4732 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4733 return;
4734 }
4735 }
4736
4737 if (!global)
4738 {
4739 fprintf (stream, "\t.local\t");
4740 assemble_name (stream, name);
4741 fprintf (stream, "\n");
4742 }
4743 fprintf (stream, "\t.comm\t");
4744 assemble_name (stream, name);
4745 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4746 }
4747
4748 /* Trampolines. */
4749
4750 static void
mep_trampoline_init(rtx m_tramp,tree fndecl,rtx static_chain)4751 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4752 {
4753 rtx addr = XEXP (m_tramp, 0);
4754 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4755
4756 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4757 LCT_NORMAL, VOIDmode, 3,
4758 addr, Pmode,
4759 fnaddr, Pmode,
4760 static_chain, Pmode);
4761 }
4762
4763 /* Experimental Reorg. */
4764
4765 static bool
mep_mentioned_p(rtx in,rtx reg,int modes_too)4766 mep_mentioned_p (rtx in,
4767 rtx reg, /* NULL for mem */
4768 int modes_too) /* if nonzero, modes must match also. */
4769 {
4770 const char *fmt;
4771 int i;
4772 enum rtx_code code;
4773
4774 if (in == 0)
4775 return false;
4776 if (reg && GET_CODE (reg) != REG)
4777 return false;
4778
4779 if (GET_CODE (in) == LABEL_REF)
4780 return (reg == 0);
4781
4782 code = GET_CODE (in);
4783
4784 switch (code)
4785 {
4786 case MEM:
4787 if (reg)
4788 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4789 return true;
4790
4791 case REG:
4792 if (!reg)
4793 return false;
4794 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4795 return false;
4796 return (REGNO (in) == REGNO (reg));
4797
4798 case SCRATCH:
4799 case CC0:
4800 case PC:
4801 case CONST_INT:
4802 case CONST_DOUBLE:
4803 return false;
4804
4805 default:
4806 break;
4807 }
4808
4809 /* Set's source should be read-only. */
4810 if (code == SET && !reg)
4811 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4812
4813 fmt = GET_RTX_FORMAT (code);
4814
4815 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4816 {
4817 if (fmt[i] == 'E')
4818 {
4819 register int j;
4820 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4821 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4822 return true;
4823 }
4824 else if (fmt[i] == 'e'
4825 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4826 return true;
4827 }
4828 return false;
4829 }
4830
4831 #define EXPERIMENTAL_REGMOVE_REORG 1
4832
4833 #if EXPERIMENTAL_REGMOVE_REORG
4834
4835 static int
mep_compatible_reg_class(int r1,int r2)4836 mep_compatible_reg_class (int r1, int r2)
4837 {
4838 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4839 return 1;
4840 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4841 return 1;
4842 return 0;
4843 }
4844
4845 static void
mep_reorg_regmove(rtx_insn * insns)4846 mep_reorg_regmove (rtx_insn *insns)
4847 {
4848 rtx_insn *insn, *next, *follow;
4849 rtx pat, *where;
4850 int count = 0, done = 0, replace, before = 0;
4851
4852 if (dump_file)
4853 for (insn = insns; insn; insn = NEXT_INSN (insn))
4854 if (NONJUMP_INSN_P (insn))
4855 before++;
4856
4857 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4858 set that uses the r2 and r2 dies there. We replace r2 with r1
4859 and see if it's still a valid insn. If so, delete the first set.
4860 Copied from reorg.c. */
4861
4862 while (!done)
4863 {
4864 done = 1;
4865 for (insn = insns; insn; insn = next)
4866 {
4867 next = next_nonnote_nondebug_insn (insn);
4868 if (! NONJUMP_INSN_P (insn))
4869 continue;
4870 pat = PATTERN (insn);
4871
4872 replace = 0;
4873
4874 if (GET_CODE (pat) == SET
4875 && GET_CODE (SET_SRC (pat)) == REG
4876 && GET_CODE (SET_DEST (pat)) == REG
4877 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4878 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4879 {
4880 follow = next_nonnote_nondebug_insn (insn);
4881 if (dump_file)
4882 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4883
4884 while (follow && NONJUMP_INSN_P (follow)
4885 && GET_CODE (PATTERN (follow)) == SET
4886 && !dead_or_set_p (follow, SET_SRC (pat))
4887 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4888 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4889 {
4890 if (dump_file)
4891 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4892 follow = next_nonnote_insn (follow);
4893 }
4894
4895 if (dump_file)
4896 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
4897 if (follow && NONJUMP_INSN_P (follow)
4898 && GET_CODE (PATTERN (follow)) == SET
4899 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
4900 {
4901 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
4902 {
4903 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
4904 {
4905 replace = 1;
4906 where = & SET_SRC (PATTERN (follow));
4907 }
4908 }
4909 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
4910 {
4911 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
4912 {
4913 replace = 1;
4914 where = & PATTERN (follow);
4915 }
4916 }
4917 }
4918 }
4919
4920 /* If so, follow is the corresponding insn */
4921 if (replace)
4922 {
4923 if (dump_file)
4924 {
4925 rtx_insn *x;
4926
4927 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
4928 for (x = insn; x ;x = NEXT_INSN (x))
4929 {
4930 print_rtl_single (dump_file, x);
4931 if (x == follow)
4932 break;
4933 fprintf (dump_file, "\n");
4934 }
4935 }
4936
4937 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
4938 follow, where))
4939 {
4940 count ++;
4941 delete_insn (insn);
4942 if (dump_file)
4943 {
4944 fprintf (dump_file, "\n----- Success! new insn:\n\n");
4945 print_rtl_single (dump_file, follow);
4946 }
4947 done = 0;
4948 }
4949 }
4950 }
4951 }
4952
4953 if (dump_file)
4954 {
4955 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
4956 fprintf (dump_file, "=====\n");
4957 }
4958 }
4959 #endif
4960
4961
4962 /* Figure out where to put LABEL, which is the label for a repeat loop.
4963 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
4964 the loop ends just before LAST_INSN. If SHARED, insns other than the
4965 "repeat" might use LABEL to jump to the loop's continuation point.
4966
4967 Return the last instruction in the adjusted loop. */
4968
4969 static rtx_insn *
mep_insert_repeat_label_last(rtx_insn * last_insn,rtx_code_label * label,bool including,bool shared)4970 mep_insert_repeat_label_last (rtx_insn *last_insn, rtx_code_label *label,
4971 bool including, bool shared)
4972 {
4973 rtx_insn *next, *prev;
4974 int count = 0, code, icode;
4975
4976 if (dump_file)
4977 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
4978 INSN_UID (last_insn));
4979
4980 /* Set PREV to the last insn in the loop. */
4981 prev = last_insn;
4982 if (!including)
4983 prev = PREV_INSN (prev);
4984
4985 /* Set NEXT to the next insn after the repeat label. */
4986 next = last_insn;
4987 if (!shared)
4988 while (prev != 0)
4989 {
4990 code = GET_CODE (prev);
4991 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
4992 break;
4993
4994 if (INSN_P (prev))
4995 {
4996 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
4997 prev = as_a <rtx_insn *> (XVECEXP (PATTERN (prev), 0, 1));
4998
4999 /* Other insns that should not be in the last two opcodes. */
5000 icode = recog_memoized (prev);
5001 if (icode < 0
5002 || icode == CODE_FOR_repeat
5003 || icode == CODE_FOR_erepeat
5004 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5005 break;
5006
5007 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5008 is the second instruction in a VLIW bundle. In that case,
5009 loop again: if the first instruction also satisfies the
5010 conditions above then we will reach here again and put
5011 both of them into the repeat epilogue. Otherwise both
5012 should remain outside. */
5013 if (GET_MODE (prev) != BImode)
5014 {
5015 count++;
5016 next = prev;
5017 if (dump_file)
5018 print_rtl_single (dump_file, next);
5019 if (count == 2)
5020 break;
5021 }
5022 }
5023 prev = PREV_INSN (prev);
5024 }
5025
5026 /* See if we're adding the label immediately after the repeat insn.
5027 If so, we need to separate them with a nop. */
5028 prev = prev_real_insn (next);
5029 if (prev)
5030 switch (recog_memoized (prev))
5031 {
5032 case CODE_FOR_repeat:
5033 case CODE_FOR_erepeat:
5034 if (dump_file)
5035 fprintf (dump_file, "Adding nop inside loop\n");
5036 emit_insn_before (gen_nop (), next);
5037 break;
5038
5039 default:
5040 break;
5041 }
5042
5043 /* Insert the label. */
5044 emit_label_before (label, next);
5045
5046 /* Insert the nops. */
5047 if (dump_file && count < 2)
5048 fprintf (dump_file, "Adding %d nop%s\n\n",
5049 2 - count, count == 1 ? "" : "s");
5050
5051 for (; count < 2; count++)
5052 if (including)
5053 last_insn = emit_insn_after (gen_nop (), last_insn);
5054 else
5055 emit_insn_before (gen_nop (), last_insn);
5056
5057 return last_insn;
5058 }
5059
5060
5061 void
mep_emit_doloop(rtx * operands,int is_end)5062 mep_emit_doloop (rtx *operands, int is_end)
5063 {
5064 rtx tag;
5065
5066 if (cfun->machine->doloop_tags == 0
5067 || cfun->machine->doloop_tag_from_end == is_end)
5068 {
5069 cfun->machine->doloop_tags++;
5070 cfun->machine->doloop_tag_from_end = is_end;
5071 }
5072
5073 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5074 if (is_end)
5075 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[1], tag));
5076 else
5077 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5078 }
5079
5080
5081 /* Code for converting doloop_begins and doloop_ends into valid
5082 MeP instructions. A doloop_begin is just a placeholder:
5083
5084 $count = unspec ($count)
5085
5086 where $count is initially the number of iterations - 1.
5087 doloop_end has the form:
5088
5089 if ($count-- == 0) goto label
5090
5091 The counter variable is private to the doloop insns, nothing else
5092 relies on its value.
5093
5094 There are three cases, in decreasing order of preference:
5095
5096 1. A loop has exactly one doloop_begin and one doloop_end.
5097 The doloop_end branches to the first instruction after
5098 the doloop_begin.
5099
5100 In this case we can replace the doloop_begin with a repeat
5101 instruction and remove the doloop_end. I.e.:
5102
5103 $count1 = unspec ($count1)
5104 label:
5105 ...
5106 insn1
5107 insn2
5108 if ($count2-- == 0) goto label
5109
5110 becomes:
5111
5112 repeat $count1,repeat_label
5113 label:
5114 ...
5115 repeat_label:
5116 insn1
5117 insn2
5118 # end repeat
5119
5120 2. As for (1), except there are several doloop_ends. One of them
5121 (call it X) falls through to a label L. All the others fall
5122 through to branches to L.
5123
5124 In this case, we remove X and replace the other doloop_ends
5125 with branches to the repeat label. For example:
5126
5127 $count1 = unspec ($count1)
5128 start:
5129 ...
5130 if ($count2-- == 0) goto label
5131 end:
5132 ...
5133 if ($count3-- == 0) goto label
5134 goto end
5135
5136 becomes:
5137
5138 repeat $count1,repeat_label
5139 start:
5140 ...
5141 repeat_label:
5142 nop
5143 nop
5144 # end repeat
5145 end:
5146 ...
5147 goto repeat_label
5148
5149 3. The fallback case. Replace doloop_begins with:
5150
5151 $count = $count + 1
5152
5153 Replace doloop_ends with the equivalent of:
5154
5155 $count = $count - 1
5156 if ($count == 0) goto label
5157
5158 Note that this might need a scratch register if $count
5159 is stored in memory. */
5160
5161 /* A structure describing one doloop_begin. */
5162 struct mep_doloop_begin {
5163 /* The next doloop_begin with the same tag. */
5164 struct mep_doloop_begin *next;
5165
5166 /* The instruction itself. */
5167 rtx_insn *insn;
5168
5169 /* The initial counter value. This is known to be a general register. */
5170 rtx counter;
5171 };
5172
5173 /* A structure describing a doloop_end. */
5174 struct mep_doloop_end {
5175 /* The next doloop_end with the same loop tag. */
5176 struct mep_doloop_end *next;
5177
5178 /* The instruction itself. */
5179 rtx_insn *insn;
5180
5181 /* The first instruction after INSN when the branch isn't taken. */
5182 rtx_insn *fallthrough;
5183
5184 /* The location of the counter value. Since doloop_end_internal is a
5185 jump instruction, it has to allow the counter to be stored anywhere
5186 (any non-fixed register or memory location). */
5187 rtx counter;
5188
5189 /* The target label (the place where the insn branches when the counter
5190 isn't zero). */
5191 rtx label;
5192
5193 /* A scratch register. Only available when COUNTER isn't stored
5194 in a general register. */
5195 rtx scratch;
5196 };
5197
5198
5199 /* One do-while loop. */
5200 struct mep_doloop {
5201 /* All the doloop_begins for this loop (in no particular order). */
5202 struct mep_doloop_begin *begin;
5203
5204 /* All the doloop_ends. When there is more than one, arrange things
5205 so that the first one is the most likely to be X in case (2) above. */
5206 struct mep_doloop_end *end;
5207 };
5208
5209
5210 /* Return true if LOOP can be converted into repeat/repeat_end form
5211 (that is, if it matches cases (1) or (2) above). */
5212
5213 static bool
mep_repeat_loop_p(struct mep_doloop * loop)5214 mep_repeat_loop_p (struct mep_doloop *loop)
5215 {
5216 struct mep_doloop_end *end;
5217 rtx fallthrough;
5218
5219 /* There must be exactly one doloop_begin and at least one doloop_end. */
5220 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5221 return false;
5222
5223 /* The first doloop_end (X) must branch back to the insn after
5224 the doloop_begin. */
5225 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5226 return false;
5227
5228 /* All the other doloop_ends must branch to the same place as X.
5229 When the branch isn't taken, they must jump to the instruction
5230 after X. */
5231 fallthrough = loop->end->fallthrough;
5232 for (end = loop->end->next; end != 0; end = end->next)
5233 if (end->label != loop->end->label
5234 || !simplejump_p (end->fallthrough)
5235 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5236 return false;
5237
5238 return true;
5239 }
5240
5241
5242 /* The main repeat reorg function. See comment above for details. */
5243
5244 static void
mep_reorg_repeat(rtx_insn * insns)5245 mep_reorg_repeat (rtx_insn *insns)
5246 {
5247 rtx_insn *insn;
5248 struct mep_doloop *loops, *loop;
5249 struct mep_doloop_begin *begin;
5250 struct mep_doloop_end *end;
5251
5252 /* Quick exit if we haven't created any loops. */
5253 if (cfun->machine->doloop_tags == 0)
5254 return;
5255
5256 /* Create an array of mep_doloop structures. */
5257 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5258 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5259
5260 /* Search the function for do-while insns and group them by loop tag. */
5261 for (insn = insns; insn; insn = NEXT_INSN (insn))
5262 if (INSN_P (insn))
5263 switch (recog_memoized (insn))
5264 {
5265 case CODE_FOR_doloop_begin_internal:
5266 insn_extract (insn);
5267 loop = &loops[INTVAL (recog_data.operand[2])];
5268
5269 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5270 begin->next = loop->begin;
5271 begin->insn = insn;
5272 begin->counter = recog_data.operand[0];
5273
5274 loop->begin = begin;
5275 break;
5276
5277 case CODE_FOR_doloop_end_internal:
5278 insn_extract (insn);
5279 loop = &loops[INTVAL (recog_data.operand[2])];
5280
5281 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5282 end->insn = insn;
5283 end->fallthrough = next_real_insn (insn);
5284 end->counter = recog_data.operand[0];
5285 end->label = recog_data.operand[1];
5286 end->scratch = recog_data.operand[3];
5287
5288 /* If this insn falls through to an unconditional jump,
5289 give it a lower priority than the others. */
5290 if (loop->end != 0 && simplejump_p (end->fallthrough))
5291 {
5292 end->next = loop->end->next;
5293 loop->end->next = end;
5294 }
5295 else
5296 {
5297 end->next = loop->end;
5298 loop->end = end;
5299 }
5300 break;
5301 }
5302
5303 /* Convert the insns for each loop in turn. */
5304 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5305 if (mep_repeat_loop_p (loop))
5306 {
5307 /* Case (1) or (2). */
5308 rtx_code_label *repeat_label;
5309 rtx label_ref;
5310
5311 /* Create a new label for the repeat insn. */
5312 repeat_label = gen_label_rtx ();
5313
5314 /* Replace the doloop_begin with a repeat. */
5315 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5316 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5317 loop->begin->insn);
5318 delete_insn (loop->begin->insn);
5319
5320 /* Insert the repeat label before the first doloop_end.
5321 Fill the gap with nops if there are other doloop_ends. */
5322 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5323 false, loop->end->next != 0);
5324
5325 /* Emit a repeat_end (to improve the readability of the output). */
5326 emit_insn_before (gen_repeat_end (), loop->end->insn);
5327
5328 /* Delete the first doloop_end. */
5329 delete_insn (loop->end->insn);
5330
5331 /* Replace the others with branches to REPEAT_LABEL. */
5332 for (end = loop->end->next; end != 0; end = end->next)
5333 {
5334 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5335 delete_insn (end->insn);
5336 delete_insn (end->fallthrough);
5337 }
5338 }
5339 else
5340 {
5341 /* Case (3). First replace all the doloop_begins with increment
5342 instructions. */
5343 for (begin = loop->begin; begin != 0; begin = begin->next)
5344 {
5345 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5346 begin->counter, const1_rtx),
5347 begin->insn);
5348 delete_insn (begin->insn);
5349 }
5350
5351 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5352 for (end = loop->end; end != 0; end = end->next)
5353 {
5354 rtx reg;
5355
5356 start_sequence ();
5357
5358 /* Load the counter value into a general register. */
5359 reg = end->counter;
5360 if (!REG_P (reg) || REGNO (reg) > 15)
5361 {
5362 reg = end->scratch;
5363 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5364 }
5365
5366 /* Decrement the counter. */
5367 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5368 constm1_rtx));
5369
5370 /* Copy it back to its original location. */
5371 if (reg != end->counter)
5372 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5373
5374 /* Jump back to the start label. */
5375 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5376 end->label));
5377 JUMP_LABEL (insn) = end->label;
5378 LABEL_NUSES (end->label)++;
5379
5380 /* Emit the whole sequence before the doloop_end. */
5381 insn = get_insns ();
5382 end_sequence ();
5383 emit_insn_before (insn, end->insn);
5384
5385 /* Delete the doloop_end. */
5386 delete_insn (end->insn);
5387 }
5388 }
5389 }
5390
5391
5392 static bool
mep_invertable_branch_p(rtx_insn * insn)5393 mep_invertable_branch_p (rtx_insn *insn)
5394 {
5395 rtx cond, set;
5396 enum rtx_code old_code;
5397 int i;
5398
5399 set = PATTERN (insn);
5400 if (GET_CODE (set) != SET)
5401 return false;
5402 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5403 return false;
5404 cond = XEXP (XEXP (set, 1), 0);
5405 old_code = GET_CODE (cond);
5406 switch (old_code)
5407 {
5408 case EQ:
5409 PUT_CODE (cond, NE);
5410 break;
5411 case NE:
5412 PUT_CODE (cond, EQ);
5413 break;
5414 case LT:
5415 PUT_CODE (cond, GE);
5416 break;
5417 case GE:
5418 PUT_CODE (cond, LT);
5419 break;
5420 default:
5421 return false;
5422 }
5423 INSN_CODE (insn) = -1;
5424 i = recog_memoized (insn);
5425 PUT_CODE (cond, old_code);
5426 INSN_CODE (insn) = -1;
5427 return i >= 0;
5428 }
5429
5430 static void
mep_invert_branch(rtx_insn * insn,rtx_insn * after)5431 mep_invert_branch (rtx_insn *insn, rtx_insn *after)
5432 {
5433 rtx cond, set, label;
5434 int i;
5435
5436 set = PATTERN (insn);
5437
5438 gcc_assert (GET_CODE (set) == SET);
5439 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5440
5441 cond = XEXP (XEXP (set, 1), 0);
5442 switch (GET_CODE (cond))
5443 {
5444 case EQ:
5445 PUT_CODE (cond, NE);
5446 break;
5447 case NE:
5448 PUT_CODE (cond, EQ);
5449 break;
5450 case LT:
5451 PUT_CODE (cond, GE);
5452 break;
5453 case GE:
5454 PUT_CODE (cond, LT);
5455 break;
5456 default:
5457 gcc_unreachable ();
5458 }
5459 label = gen_label_rtx ();
5460 emit_label_after (label, after);
5461 for (i=1; i<=2; i++)
5462 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5463 {
5464 rtx ref = XEXP (XEXP (set, 1), i);
5465 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5466 delete_insn (XEXP (ref, 0));
5467 XEXP (ref, 0) = label;
5468 LABEL_NUSES (label) ++;
5469 JUMP_LABEL (insn) = label;
5470 }
5471 INSN_CODE (insn) = -1;
5472 i = recog_memoized (insn);
5473 gcc_assert (i >= 0);
5474 }
5475
5476 static void
mep_reorg_erepeat(rtx_insn * insns)5477 mep_reorg_erepeat (rtx_insn *insns)
5478 {
5479 rtx_insn *insn, *prev;
5480 rtx_code_label *l;
5481 rtx x;
5482 int count;
5483
5484 for (insn = insns; insn; insn = NEXT_INSN (insn))
5485 if (JUMP_P (insn)
5486 && mep_invertable_branch_p (insn))
5487 {
5488 if (dump_file)
5489 {
5490 fprintf (dump_file, "\n------------------------------\n");
5491 fprintf (dump_file, "erepeat: considering this jump:\n");
5492 print_rtl_single (dump_file, insn);
5493 }
5494 count = simplejump_p (insn) ? 0 : 1;
5495 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5496 {
5497 if (CALL_P (prev) || BARRIER_P (prev))
5498 break;
5499
5500 if (prev == JUMP_LABEL (insn))
5501 {
5502 rtx_insn *newlast;
5503 if (dump_file)
5504 fprintf (dump_file, "found loop top, %d insns\n", count);
5505
5506 if (LABEL_NUSES (prev) == 1)
5507 /* We're the only user, always safe */ ;
5508 else if (LABEL_NUSES (prev) == 2)
5509 {
5510 /* See if there's a barrier before this label. If
5511 so, we know nobody inside the loop uses it.
5512 But we must be careful to put the erepeat
5513 *after* the label. */
5514 rtx_insn *barrier;
5515 for (barrier = PREV_INSN (prev);
5516 barrier && NOTE_P (barrier);
5517 barrier = PREV_INSN (barrier))
5518 ;
5519 if (barrier && ! BARRIER_P (barrier))
5520 break;
5521 }
5522 else
5523 {
5524 /* We don't know who else, within or without our loop, uses this */
5525 if (dump_file)
5526 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5527 break;
5528 }
5529
5530 /* Generate a label to be used by the erepat insn. */
5531 l = gen_label_rtx ();
5532
5533 /* Insert the erepeat after INSN's target label. */
5534 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5535 LABEL_NUSES (l)++;
5536 emit_insn_after (x, prev);
5537
5538 /* Insert the erepeat label. */
5539 newlast = (mep_insert_repeat_label_last
5540 (insn, l, !simplejump_p (insn), false));
5541 if (simplejump_p (insn))
5542 {
5543 emit_insn_before (gen_erepeat_end (), insn);
5544 delete_insn (insn);
5545 }
5546 else
5547 {
5548 mep_invert_branch (insn, newlast);
5549 emit_insn_after (gen_erepeat_end (), newlast);
5550 }
5551 break;
5552 }
5553
5554 if (LABEL_P (prev))
5555 {
5556 /* A label is OK if there is exactly one user, and we
5557 can find that user before the next label. */
5558 rtx_insn *user = 0;
5559 int safe = 0;
5560 if (LABEL_NUSES (prev) == 1)
5561 {
5562 for (user = PREV_INSN (prev);
5563 user && (INSN_P (user) || NOTE_P (user));
5564 user = PREV_INSN (user))
5565 if (JUMP_P (user) && JUMP_LABEL (user) == prev)
5566 {
5567 safe = INSN_UID (user);
5568 break;
5569 }
5570 }
5571 if (!safe)
5572 break;
5573 if (dump_file)
5574 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5575 safe, INSN_UID (prev));
5576 }
5577
5578 if (INSN_P (prev))
5579 {
5580 count ++;
5581 }
5582 }
5583 }
5584 if (dump_file)
5585 fprintf (dump_file, "\n==============================\n");
5586 }
5587
5588 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5589 always do this on its own. */
5590
5591 static void
mep_jmp_return_reorg(rtx_insn * insns)5592 mep_jmp_return_reorg (rtx_insn *insns)
5593 {
5594 rtx_insn *insn, *label, *ret;
5595 int ret_code;
5596
5597 for (insn = insns; insn; insn = NEXT_INSN (insn))
5598 if (simplejump_p (insn))
5599 {
5600 /* Find the fist real insn the jump jumps to. */
5601 label = ret = safe_as_a <rtx_insn *> (JUMP_LABEL (insn));
5602 while (ret
5603 && (NOTE_P (ret)
5604 || LABEL_P (ret)
5605 || GET_CODE (PATTERN (ret)) == USE))
5606 ret = NEXT_INSN (ret);
5607
5608 if (ret)
5609 {
5610 /* Is it a return? */
5611 ret_code = recog_memoized (ret);
5612 if (ret_code == CODE_FOR_return_internal
5613 || ret_code == CODE_FOR_eh_return_internal)
5614 {
5615 /* It is. Replace the jump with a return. */
5616 LABEL_NUSES (label) --;
5617 if (LABEL_NUSES (label) == 0)
5618 delete_insn (label);
5619 PATTERN (insn) = copy_rtx (PATTERN (ret));
5620 INSN_CODE (insn) = -1;
5621 }
5622 }
5623 }
5624 }
5625
5626
5627 static void
mep_reorg_addcombine(rtx_insn * insns)5628 mep_reorg_addcombine (rtx_insn *insns)
5629 {
5630 rtx_insn *i, *n;
5631
5632 for (i = insns; i; i = NEXT_INSN (i))
5633 if (INSN_P (i)
5634 && INSN_CODE (i) == CODE_FOR_addsi3
5635 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5636 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5637 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5638 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5639 {
5640 n = NEXT_INSN (i);
5641 if (INSN_P (n)
5642 && INSN_CODE (n) == CODE_FOR_addsi3
5643 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5644 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5645 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5646 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5647 {
5648 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5649 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5650 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5651 && ic + nc < 32767
5652 && ic + nc > -32768)
5653 {
5654 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5655 SET_NEXT_INSN (i) = NEXT_INSN (n);
5656 if (NEXT_INSN (i))
5657 SET_PREV_INSN (NEXT_INSN (i)) = i;
5658 }
5659 }
5660 }
5661 }
5662
5663 /* If this insn adjusts the stack, return the adjustment, else return
5664 zero. */
5665 static int
add_sp_insn_p(rtx_insn * insn)5666 add_sp_insn_p (rtx_insn *insn)
5667 {
5668 rtx pat;
5669
5670 if (! single_set (insn))
5671 return 0;
5672 pat = PATTERN (insn);
5673 if (GET_CODE (SET_DEST (pat)) != REG)
5674 return 0;
5675 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5676 return 0;
5677 if (GET_CODE (SET_SRC (pat)) != PLUS)
5678 return 0;
5679 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5680 return 0;
5681 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5682 return 0;
5683 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5684 return 0;
5685 return INTVAL (XEXP (SET_SRC (pat), 1));
5686 }
5687
5688 /* Check for trivial functions that set up an unneeded stack
5689 frame. */
5690 static void
mep_reorg_noframe(rtx_insn * insns)5691 mep_reorg_noframe (rtx_insn *insns)
5692 {
5693 rtx_insn *start_frame_insn;
5694 rtx_insn *end_frame_insn = 0;
5695 int sp_adjust, sp2;
5696 rtx sp;
5697
5698 /* The first insn should be $sp = $sp + N */
5699 while (insns && ! INSN_P (insns))
5700 insns = NEXT_INSN (insns);
5701 if (!insns)
5702 return;
5703
5704 sp_adjust = add_sp_insn_p (insns);
5705 if (sp_adjust == 0)
5706 return;
5707
5708 start_frame_insn = insns;
5709 sp = SET_DEST (PATTERN (start_frame_insn));
5710
5711 insns = next_real_insn (insns);
5712
5713 while (insns)
5714 {
5715 rtx_insn *next = next_real_insn (insns);
5716 if (!next)
5717 break;
5718
5719 sp2 = add_sp_insn_p (insns);
5720 if (sp2)
5721 {
5722 if (end_frame_insn)
5723 return;
5724 end_frame_insn = insns;
5725 if (sp2 != -sp_adjust)
5726 return;
5727 }
5728 else if (mep_mentioned_p (insns, sp, 0))
5729 return;
5730 else if (CALL_P (insns))
5731 return;
5732
5733 insns = next;
5734 }
5735
5736 if (end_frame_insn)
5737 {
5738 delete_insn (start_frame_insn);
5739 delete_insn (end_frame_insn);
5740 }
5741 }
5742
5743 static void
mep_reorg(void)5744 mep_reorg (void)
5745 {
5746 rtx_insn *insns = get_insns ();
5747
5748 /* We require accurate REG_DEAD notes. */
5749 compute_bb_for_insn ();
5750 df_note_add_problem ();
5751 df_analyze ();
5752
5753 mep_reorg_addcombine (insns);
5754 #if EXPERIMENTAL_REGMOVE_REORG
5755 /* VLIW packing has been done already, so we can't just delete things. */
5756 if (!mep_vliw_function_p (cfun->decl))
5757 mep_reorg_regmove (insns);
5758 #endif
5759 mep_jmp_return_reorg (insns);
5760 mep_bundle_insns (insns);
5761 mep_reorg_repeat (insns);
5762 if (optimize
5763 && !profile_flag
5764 && !profile_arc_flag
5765 && TARGET_OPT_REPEAT
5766 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5767 mep_reorg_erepeat (insns);
5768
5769 /* This may delete *insns so make sure it's last. */
5770 mep_reorg_noframe (insns);
5771
5772 df_finish_pass (false);
5773 }
5774
5775
5776
5777 /*----------------------------------------------------------------------*/
5778 /* Builtins */
5779 /*----------------------------------------------------------------------*/
5780
5781 /* Element X gives the index into cgen_insns[] of the most general
5782 implementation of intrinsic X. Unimplemented intrinsics are
5783 mapped to -1. */
5784 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5785
5786 /* Element X gives the index of another instruction that is mapped to
5787 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5788 instruction.
5789
5790 Things are set up so that mep_intrinsic_chain[X] < X. */
5791 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5792
5793 /* The bitmask for the current ISA. The ISA masks are declared
5794 in mep-intrin.h. */
5795 unsigned int mep_selected_isa;
5796
5797 struct mep_config {
5798 const char *config_name;
5799 unsigned int isa;
5800 };
5801
5802 static struct mep_config mep_configs[] = {
5803 #ifdef COPROC_SELECTION_TABLE
5804 COPROC_SELECTION_TABLE,
5805 #endif
5806 { 0, 0 }
5807 };
5808
5809 /* Initialize the global intrinsics variables above. */
5810
5811 static void
mep_init_intrinsics(void)5812 mep_init_intrinsics (void)
5813 {
5814 size_t i;
5815
5816 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5817 mep_selected_isa = mep_configs[0].isa;
5818 if (mep_config_string != 0)
5819 for (i = 0; mep_configs[i].config_name; i++)
5820 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5821 {
5822 mep_selected_isa = mep_configs[i].isa;
5823 break;
5824 }
5825
5826 /* Assume all intrinsics are unavailable. */
5827 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5828 mep_intrinsic_insn[i] = -1;
5829
5830 /* Build up the global intrinsic tables. */
5831 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5832 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5833 {
5834 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5835 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5836 }
5837 /* See whether we can directly move values between one coprocessor
5838 register and another. */
5839 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5840 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5841 mep_have_copro_copro_moves_p = true;
5842
5843 /* See whether we can directly move values between core and
5844 coprocessor registers. */
5845 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5846 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5847
5848 mep_have_core_copro_moves_p = 1;
5849 }
5850
5851 /* Declare all available intrinsic functions. Called once only. */
5852
5853 static tree cp_data_bus_int_type_node;
5854 static tree opaque_vector_type_node;
5855 static tree v8qi_type_node;
5856 static tree v4hi_type_node;
5857 static tree v2si_type_node;
5858 static tree v8uqi_type_node;
5859 static tree v4uhi_type_node;
5860 static tree v2usi_type_node;
5861
5862 static tree
mep_cgen_regnum_to_type(enum cgen_regnum_operand_type cr)5863 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5864 {
5865 switch (cr)
5866 {
5867 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5868 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5869 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5870 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5871 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5872 case cgen_regnum_operand_type_CHAR: return char_type_node;
5873 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5874 case cgen_regnum_operand_type_SI: return intSI_type_node;
5875 case cgen_regnum_operand_type_DI: return intDI_type_node;
5876 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5877 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5878 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5879 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5880 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5881 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5882 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5883 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5884 default:
5885 return void_type_node;
5886 }
5887 }
5888
5889 static void
mep_init_builtins(void)5890 mep_init_builtins (void)
5891 {
5892 size_t i;
5893
5894 if (TARGET_64BIT_CR_REGS)
5895 cp_data_bus_int_type_node = long_long_integer_type_node;
5896 else
5897 cp_data_bus_int_type_node = long_integer_type_node;
5898
5899 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5900 v8qi_type_node = build_vector_type (intQI_type_node, 8);
5901 v4hi_type_node = build_vector_type (intHI_type_node, 4);
5902 v2si_type_node = build_vector_type (intSI_type_node, 2);
5903 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
5904 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
5905 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
5906
5907 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node);
5908
5909 add_builtin_type ("cp_vector", opaque_vector_type_node);
5910
5911 add_builtin_type ("cp_v8qi", v8qi_type_node);
5912 add_builtin_type ("cp_v4hi", v4hi_type_node);
5913 add_builtin_type ("cp_v2si", v2si_type_node);
5914
5915 add_builtin_type ("cp_v8uqi", v8uqi_type_node);
5916 add_builtin_type ("cp_v4uhi", v4uhi_type_node);
5917 add_builtin_type ("cp_v2usi", v2usi_type_node);
5918
5919 /* Intrinsics like mep_cadd3 are implemented with two groups of
5920 instructions, one which uses UNSPECs and one which uses a specific
5921 rtl code such as PLUS. Instructions in the latter group belong
5922 to GROUP_KNOWN_CODE.
5923
5924 In such cases, the intrinsic will have two entries in the global
5925 tables above. The unspec form is accessed using builtin functions
5926 while the specific form is accessed using the mep_* enum in
5927 mep-intrin.h.
5928
5929 The idea is that __cop arithmetic and builtin functions have
5930 different optimization requirements. If mep_cadd3() appears in
5931 the source code, the user will surely except gcc to use cadd3
5932 rather than a work-alike such as add3. However, if the user
5933 just writes "a + b", where a or b are __cop variables, it is
5934 reasonable for gcc to choose a core instruction rather than
5935 cadd3 if it believes that is more optimal. */
5936 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5937 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
5938 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
5939 {
5940 tree ret_type = void_type_node;
5941 tree bi_type;
5942
5943 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
5944 continue;
5945
5946 if (cgen_insns[i].cret_p)
5947 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
5948
5949 bi_type = build_function_type_list (ret_type, NULL_TREE);
5950 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
5951 bi_type,
5952 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
5953 }
5954 }
5955
5956 /* Report the unavailablity of the given intrinsic. */
5957
5958 #if 1
5959 static void
mep_intrinsic_unavailable(int intrinsic)5960 mep_intrinsic_unavailable (int intrinsic)
5961 {
5962 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
5963
5964 if (already_reported_p[intrinsic])
5965 return;
5966
5967 if (mep_intrinsic_insn[intrinsic] < 0)
5968 error ("coprocessor intrinsic %qs is not available in this configuration",
5969 cgen_intrinsics[intrinsic]);
5970 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
5971 error ("%qs is not available in VLIW functions",
5972 cgen_intrinsics[intrinsic]);
5973 else
5974 error ("%qs is not available in non-VLIW functions",
5975 cgen_intrinsics[intrinsic]);
5976
5977 already_reported_p[intrinsic] = 1;
5978 }
5979 #endif
5980
5981
5982 /* See if any implementation of INTRINSIC is available to the
5983 current function. If so, store the most general implementation
5984 in *INSN_PTR and return true. Return false otherwise. */
5985
5986 static bool
mep_get_intrinsic_insn(int intrinsic ATTRIBUTE_UNUSED,const struct cgen_insn ** insn_ptr ATTRIBUTE_UNUSED)5987 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
5988 {
5989 int i;
5990
5991 i = mep_intrinsic_insn[intrinsic];
5992 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
5993 i = mep_intrinsic_chain[i];
5994
5995 if (i >= 0)
5996 {
5997 *insn_ptr = &cgen_insns[i];
5998 return true;
5999 }
6000 return false;
6001 }
6002
6003
6004 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6005 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6006 try using a work-alike instead. In this case, the returned insn
6007 may have three operands rather than two. */
6008
6009 static bool
mep_get_move_insn(int intrinsic,const struct cgen_insn ** cgen_insn)6010 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6011 {
6012 size_t i;
6013
6014 if (intrinsic == mep_cmov)
6015 {
6016 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6017 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6018 return true;
6019 return false;
6020 }
6021 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6022 }
6023
6024
6025 /* If ARG is a register operand that is the same size as MODE, convert it
6026 to MODE using a subreg. Otherwise return ARG as-is. */
6027
6028 static rtx
mep_convert_arg(machine_mode mode,rtx arg)6029 mep_convert_arg (machine_mode mode, rtx arg)
6030 {
6031 if (GET_MODE (arg) != mode
6032 && register_operand (arg, VOIDmode)
6033 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6034 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6035 return arg;
6036 }
6037
6038
6039 /* Apply regnum conversions to ARG using the description given by REGNUM.
6040 Return the new argument on success and null on failure. */
6041
6042 static rtx
mep_convert_regnum(const struct cgen_regnum_operand * regnum,rtx arg)6043 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6044 {
6045 if (regnum->count == 0)
6046 return arg;
6047
6048 if (GET_CODE (arg) != CONST_INT
6049 || INTVAL (arg) < 0
6050 || INTVAL (arg) >= regnum->count)
6051 return 0;
6052
6053 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6054 }
6055
6056
6057 /* Try to make intrinsic argument ARG match the given operand.
6058 UNSIGNED_P is true if the argument has an unsigned type. */
6059
6060 static rtx
mep_legitimize_arg(const struct insn_operand_data * operand,rtx arg,int unsigned_p)6061 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6062 int unsigned_p)
6063 {
6064 if (GET_CODE (arg) == CONST_INT)
6065 {
6066 /* CONST_INTs can only be bound to integer operands. */
6067 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6068 return 0;
6069 }
6070 else if (GET_CODE (arg) == CONST_DOUBLE)
6071 /* These hold vector constants. */;
6072 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6073 {
6074 /* If the argument is a different size from what's expected, we must
6075 have a value in the right mode class in order to convert it. */
6076 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6077 return 0;
6078
6079 /* If the operand is an rvalue, promote or demote it to match the
6080 operand's size. This might not need extra instructions when
6081 ARG is a register value. */
6082 if (operand->constraint[0] != '=')
6083 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6084 }
6085
6086 /* If the operand is an lvalue, bind the operand to a new register.
6087 The caller will copy this value into ARG after the main
6088 instruction. By doing this always, we produce slightly more
6089 optimal code. */
6090 /* But not for control registers. */
6091 if (operand->constraint[0] == '='
6092 && (! REG_P (arg)
6093 || ! (CONTROL_REGNO_P (REGNO (arg))
6094 || CCR_REGNO_P (REGNO (arg))
6095 || CR_REGNO_P (REGNO (arg)))
6096 ))
6097 return gen_reg_rtx (operand->mode);
6098
6099 /* Try simple mode punning. */
6100 arg = mep_convert_arg (operand->mode, arg);
6101 if (operand->predicate (arg, operand->mode))
6102 return arg;
6103
6104 /* See if forcing the argument into a register will make it match. */
6105 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6106 arg = force_reg (operand->mode, arg);
6107 else
6108 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6109 if (operand->predicate (arg, operand->mode))
6110 return arg;
6111
6112 return 0;
6113 }
6114
6115
6116 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6117 function FNNAME. OPERAND describes the operand to which ARGNUM
6118 is mapped. */
6119
6120 static void
mep_incompatible_arg(const struct insn_operand_data * operand,rtx arg,int argnum,tree fnname)6121 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6122 int argnum, tree fnname)
6123 {
6124 size_t i;
6125
6126 if (GET_CODE (arg) == CONST_INT)
6127 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6128 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6129 {
6130 const struct cgen_immediate_predicate *predicate;
6131 HOST_WIDE_INT argval;
6132
6133 predicate = &cgen_immediate_predicates[i];
6134 argval = INTVAL (arg);
6135 if (argval < predicate->lower || argval >= predicate->upper)
6136 error ("argument %d of %qE must be in the range %d...%d",
6137 argnum, fnname, predicate->lower, predicate->upper - 1);
6138 else
6139 error ("argument %d of %qE must be a multiple of %d",
6140 argnum, fnname, predicate->align);
6141 return;
6142 }
6143
6144 error ("incompatible type for argument %d of %qE", argnum, fnname);
6145 }
6146
6147 static rtx
mep_expand_builtin(tree exp,rtx target ATTRIBUTE_UNUSED,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)6148 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6149 rtx subtarget ATTRIBUTE_UNUSED,
6150 machine_mode mode ATTRIBUTE_UNUSED,
6151 int ignore ATTRIBUTE_UNUSED)
6152 {
6153 rtx pat, op[10], arg[10];
6154 unsigned int a;
6155 int opindex, unsigned_p[10];
6156 tree fndecl, args;
6157 unsigned int n_args;
6158 tree fnname;
6159 const struct cgen_insn *cgen_insn;
6160 const struct insn_data_d *idata;
6161 unsigned int first_arg = 0;
6162 unsigned int builtin_n_args;
6163
6164 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6165 fnname = DECL_NAME (fndecl);
6166
6167 /* Find out which instruction we should emit. Note that some coprocessor
6168 intrinsics may only be available in VLIW mode, or only in normal mode. */
6169 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6170 {
6171 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6172 return NULL_RTX;
6173 }
6174 idata = &insn_data[cgen_insn->icode];
6175
6176 builtin_n_args = cgen_insn->num_args;
6177
6178 if (cgen_insn->cret_p)
6179 {
6180 if (cgen_insn->cret_p > 1)
6181 builtin_n_args ++;
6182 first_arg = 1;
6183 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6184 builtin_n_args --;
6185 }
6186
6187 /* Evaluate each argument. */
6188 n_args = call_expr_nargs (exp);
6189
6190 if (n_args < builtin_n_args)
6191 {
6192 error ("too few arguments to %qE", fnname);
6193 return NULL_RTX;
6194 }
6195 if (n_args > builtin_n_args)
6196 {
6197 error ("too many arguments to %qE", fnname);
6198 return NULL_RTX;
6199 }
6200
6201 for (a = first_arg; a < builtin_n_args + first_arg; a++)
6202 {
6203 tree value;
6204
6205 args = CALL_EXPR_ARG (exp, a - first_arg);
6206
6207 value = args;
6208
6209 #if 0
6210 if (cgen_insn->regnums[a].reference_p)
6211 {
6212 if (TREE_CODE (value) != ADDR_EXPR)
6213 {
6214 debug_tree(value);
6215 error ("argument %d of %qE must be an address", a+1, fnname);
6216 return NULL_RTX;
6217 }
6218 value = TREE_OPERAND (value, 0);
6219 }
6220 #endif
6221
6222 /* If the argument has been promoted to int, get the unpromoted
6223 value. This is necessary when sub-int memory values are bound
6224 to reference parameters. */
6225 if (TREE_CODE (value) == NOP_EXPR
6226 && TREE_TYPE (value) == integer_type_node
6227 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6228 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6229 < TYPE_PRECISION (TREE_TYPE (value))))
6230 value = TREE_OPERAND (value, 0);
6231
6232 /* If the argument has been promoted to double, get the unpromoted
6233 SFmode value. This is necessary for FMAX support, for example. */
6234 if (TREE_CODE (value) == NOP_EXPR
6235 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6236 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6237 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6238 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6239 value = TREE_OPERAND (value, 0);
6240
6241 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6242 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6243 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6244 if (cgen_insn->regnums[a].reference_p)
6245 {
6246 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6247 machine_mode pointed_mode = TYPE_MODE (pointed_to);
6248
6249 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6250 }
6251 if (arg[a] == 0)
6252 {
6253 error ("argument %d of %qE must be in the range %d...%d",
6254 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6255 return NULL_RTX;
6256 }
6257 }
6258
6259 for (a = 0; a < first_arg; a++)
6260 {
6261 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6262 arg[a] = target;
6263 else
6264 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6265 }
6266
6267 /* Convert the arguments into a form suitable for the intrinsic.
6268 Report an error if this isn't possible. */
6269 for (opindex = 0; opindex < idata->n_operands; opindex++)
6270 {
6271 a = cgen_insn->op_mapping[opindex];
6272 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6273 arg[a], unsigned_p[a]);
6274 if (op[opindex] == 0)
6275 {
6276 mep_incompatible_arg (&idata->operand[opindex],
6277 arg[a], a + 1 - first_arg, fnname);
6278 return NULL_RTX;
6279 }
6280 }
6281
6282 /* Emit the instruction. */
6283 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6284 op[5], op[6], op[7], op[8], op[9]);
6285
6286 if (GET_CODE (pat) == SET
6287 && GET_CODE (SET_DEST (pat)) == PC
6288 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6289 emit_jump_insn (pat);
6290 else
6291 emit_insn (pat);
6292
6293 /* Copy lvalues back to their final locations. */
6294 for (opindex = 0; opindex < idata->n_operands; opindex++)
6295 if (idata->operand[opindex].constraint[0] == '=')
6296 {
6297 a = cgen_insn->op_mapping[opindex];
6298 if (a >= first_arg)
6299 {
6300 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6301 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6302 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6303 op[opindex]));
6304 else
6305 {
6306 /* First convert the operand to the right mode, then copy it
6307 into the destination. Doing the conversion as a separate
6308 step (rather than using convert_move) means that we can
6309 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6310 refer to the same register. */
6311 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6312 op[opindex], unsigned_p[a]);
6313 if (!rtx_equal_p (arg[a], op[opindex]))
6314 emit_move_insn (arg[a], op[opindex]);
6315 }
6316 }
6317 }
6318
6319 if (first_arg > 0 && target && target != op[0])
6320 {
6321 emit_move_insn (target, op[0]);
6322 }
6323
6324 return target;
6325 }
6326
6327 static bool
mep_vector_mode_supported_p(machine_mode mode ATTRIBUTE_UNUSED)6328 mep_vector_mode_supported_p (machine_mode mode ATTRIBUTE_UNUSED)
6329 {
6330 return false;
6331 }
6332
6333 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6334 a global register. */
6335
6336 static bool
global_reg_mentioned_p_1(const_rtx x)6337 global_reg_mentioned_p_1 (const_rtx x)
6338 {
6339 int regno;
6340
6341 switch (GET_CODE (x))
6342 {
6343 case SUBREG:
6344 if (REG_P (SUBREG_REG (x)))
6345 {
6346 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6347 && global_regs[subreg_regno (x)])
6348 return true;
6349 return false;
6350 }
6351 break;
6352
6353 case REG:
6354 regno = REGNO (x);
6355 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6356 return true;
6357 return false;
6358
6359 case CALL:
6360 /* A non-constant call might use a global register. */
6361 return true;
6362
6363 default:
6364 break;
6365 }
6366
6367 return false;
6368 }
6369
6370 /* Returns nonzero if X mentions a global register. */
6371
6372 static bool
global_reg_mentioned_p(rtx x)6373 global_reg_mentioned_p (rtx x)
6374 {
6375 if (INSN_P (x))
6376 {
6377 if (CALL_P (x))
6378 {
6379 if (! RTL_CONST_OR_PURE_CALL_P (x))
6380 return true;
6381 x = CALL_INSN_FUNCTION_USAGE (x);
6382 if (x == 0)
6383 return false;
6384 }
6385 else
6386 x = PATTERN (x);
6387 }
6388
6389 subrtx_iterator::array_type array;
6390 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
6391 if (global_reg_mentioned_p_1 (*iter))
6392 return true;
6393 return false;
6394 }
6395 /* Scheduling hooks for VLIW mode.
6396
6397 Conceptually this is very simple: we have a two-pack architecture
6398 that takes one core insn and one coprocessor insn to make up either
6399 a 32- or 64-bit instruction word (depending on the option bit set in
6400 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6401 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6402 and one 48-bit cop insn or two 32-bit core/cop insns.
6403
6404 In practice, instruction selection will be a bear. Consider in
6405 VL64 mode the following insns
6406
6407 add $1, 1
6408 cmov $cr0, $0
6409
6410 these cannot pack, since the add is a 16-bit core insn and cmov
6411 is a 32-bit cop insn. However,
6412
6413 add3 $1, $1, 1
6414 cmov $cr0, $0
6415
6416 packs just fine. For good VLIW code generation in VL64 mode, we
6417 will have to have 32-bit alternatives for many of the common core
6418 insns. Not implemented. */
6419
6420 static int
mep_adjust_cost(rtx_insn * insn,rtx link,rtx_insn * dep_insn,int cost)6421 mep_adjust_cost (rtx_insn *insn, rtx link, rtx_insn *dep_insn, int cost)
6422 {
6423 int cost_specified;
6424
6425 if (REG_NOTE_KIND (link) != 0)
6426 {
6427 /* See whether INSN and DEP_INSN are intrinsics that set the same
6428 hard register. If so, it is more important to free up DEP_INSN
6429 than it is to free up INSN.
6430
6431 Note that intrinsics like mep_mulr are handled differently from
6432 the equivalent mep.md patterns. In mep.md, if we don't care
6433 about the value of $lo and $hi, the pattern will just clobber
6434 the registers, not set them. Since clobbers don't count as
6435 output dependencies, it is often possible to reorder two mulrs,
6436 even after reload.
6437
6438 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6439 so any pair of mep_mulr()s will be inter-dependent. We should
6440 therefore give the first mep_mulr() a higher priority. */
6441 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6442 && global_reg_mentioned_p (PATTERN (insn))
6443 && global_reg_mentioned_p (PATTERN (dep_insn)))
6444 return 1;
6445
6446 /* If the dependence is an anti or output dependence, assume it
6447 has no cost. */
6448 return 0;
6449 }
6450
6451 /* If we can't recognize the insns, we can't really do anything. */
6452 if (recog_memoized (dep_insn) < 0)
6453 return cost;
6454
6455 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6456 attribute instead. */
6457 if (!TARGET_H1)
6458 {
6459 cost_specified = get_attr_latency (dep_insn);
6460 if (cost_specified != 0)
6461 return cost_specified;
6462 }
6463
6464 return cost;
6465 }
6466
6467 /* ??? We don't properly compute the length of a load/store insn,
6468 taking into account the addressing mode. */
6469
6470 static int
mep_issue_rate(void)6471 mep_issue_rate (void)
6472 {
6473 return TARGET_IVC2 ? 3 : 2;
6474 }
6475
6476 /* Return true if function DECL was declared with the vliw attribute. */
6477
6478 bool
mep_vliw_function_p(tree decl)6479 mep_vliw_function_p (tree decl)
6480 {
6481 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6482 }
6483
6484 static rtx_insn *
mep_find_ready_insn(rtx_insn ** ready,int nready,enum attr_slot slot,int length)6485 mep_find_ready_insn (rtx_insn **ready, int nready, enum attr_slot slot,
6486 int length)
6487 {
6488 int i;
6489
6490 for (i = nready - 1; i >= 0; --i)
6491 {
6492 rtx_insn *insn = ready[i];
6493 if (recog_memoized (insn) >= 0
6494 && get_attr_slot (insn) == slot
6495 && get_attr_length (insn) == length)
6496 return insn;
6497 }
6498
6499 return NULL;
6500 }
6501
6502 static void
mep_move_ready_insn(rtx_insn ** ready,int nready,rtx_insn * insn)6503 mep_move_ready_insn (rtx_insn **ready, int nready, rtx_insn *insn)
6504 {
6505 int i;
6506
6507 for (i = 0; i < nready; ++i)
6508 if (ready[i] == insn)
6509 {
6510 for (; i < nready - 1; ++i)
6511 ready[i] = ready[i + 1];
6512 ready[i] = insn;
6513 return;
6514 }
6515
6516 gcc_unreachable ();
6517 }
6518
6519 static void
mep_print_sched_insn(FILE * dump,rtx_insn * insn)6520 mep_print_sched_insn (FILE *dump, rtx_insn *insn)
6521 {
6522 const char *slots = "none";
6523 const char *name = NULL;
6524 int code;
6525 char buf[30];
6526
6527 if (GET_CODE (PATTERN (insn)) == SET
6528 || GET_CODE (PATTERN (insn)) == PARALLEL)
6529 {
6530 switch (get_attr_slots (insn))
6531 {
6532 case SLOTS_CORE: slots = "core"; break;
6533 case SLOTS_C3: slots = "c3"; break;
6534 case SLOTS_P0: slots = "p0"; break;
6535 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6536 case SLOTS_P0_P1: slots = "p0,p1"; break;
6537 case SLOTS_P0S: slots = "p0s"; break;
6538 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6539 case SLOTS_P1: slots = "p1"; break;
6540 default:
6541 sprintf(buf, "%d", get_attr_slots (insn));
6542 slots = buf;
6543 break;
6544 }
6545 }
6546 if (GET_CODE (PATTERN (insn)) == USE)
6547 slots = "use";
6548
6549 code = INSN_CODE (insn);
6550 if (code >= 0)
6551 name = get_insn_name (code);
6552 if (!name)
6553 name = "{unknown}";
6554
6555 fprintf (dump,
6556 "insn %4d %4d %8s %s\n",
6557 code,
6558 INSN_UID (insn),
6559 name,
6560 slots);
6561 }
6562
6563 static int
mep_sched_reorder(FILE * dump ATTRIBUTE_UNUSED,int sched_verbose ATTRIBUTE_UNUSED,rtx_insn ** ready,int * pnready,int clock ATTRIBUTE_UNUSED)6564 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6565 int sched_verbose ATTRIBUTE_UNUSED, rtx_insn **ready,
6566 int *pnready, int clock ATTRIBUTE_UNUSED)
6567 {
6568 int nready = *pnready;
6569 rtx_insn *core_insn, *cop_insn;
6570 int i;
6571
6572 if (dump && sched_verbose > 1)
6573 {
6574 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6575 for (i=0; i<nready; i++)
6576 mep_print_sched_insn (dump, ready[i]);
6577 fprintf (dump, "\n");
6578 }
6579
6580 if (!mep_vliw_function_p (cfun->decl))
6581 return 1;
6582 if (nready < 2)
6583 return 1;
6584
6585 /* IVC2 uses a DFA to determine what's ready and what's not. */
6586 if (TARGET_IVC2)
6587 return nready;
6588
6589 /* We can issue either a core or coprocessor instruction.
6590 Look for a matched pair of insns to reorder. If we don't
6591 find any, don't second-guess the scheduler's priorities. */
6592
6593 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6594 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6595 TARGET_OPT_VL64 ? 6 : 2)))
6596 ;
6597 else if (TARGET_OPT_VL64
6598 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6599 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6600 ;
6601 else
6602 /* We didn't find a pair. Issue the single insn at the head
6603 of the ready list. */
6604 return 1;
6605
6606 /* Reorder the two insns first. */
6607 mep_move_ready_insn (ready, nready, core_insn);
6608 mep_move_ready_insn (ready, nready - 1, cop_insn);
6609 return 2;
6610 }
6611
6612 /* Return true if X contains a register that is set by insn PREV. */
6613
6614 static bool
mep_store_find_set(const_rtx x,const rtx_insn * prev)6615 mep_store_find_set (const_rtx x, const rtx_insn *prev)
6616 {
6617 subrtx_iterator::array_type array;
6618 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
6619 if (REG_P (x) && reg_set_p (x, prev))
6620 return true;
6621 return false;
6622 }
6623
6624 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6625 not the containing insn. */
6626
6627 static bool
mep_store_data_bypass_1(rtx_insn * prev,rtx pat)6628 mep_store_data_bypass_1 (rtx_insn *prev, rtx pat)
6629 {
6630 /* Cope with intrinsics like swcpa. */
6631 if (GET_CODE (pat) == PARALLEL)
6632 {
6633 int i;
6634
6635 for (i = 0; i < XVECLEN (pat, 0); i++)
6636 if (mep_store_data_bypass_p (prev,
6637 as_a <rtx_insn *> (XVECEXP (pat, 0, i))))
6638 return true;
6639
6640 return false;
6641 }
6642
6643 /* Check for some sort of store. */
6644 if (GET_CODE (pat) != SET
6645 || GET_CODE (SET_DEST (pat)) != MEM)
6646 return false;
6647
6648 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6649 The first operand to the unspec is the store data and the other operands
6650 are used to calculate the address. */
6651 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6652 {
6653 rtx src;
6654 int i;
6655
6656 src = SET_SRC (pat);
6657 for (i = 1; i < XVECLEN (src, 0); i++)
6658 if (mep_store_find_set (XVECEXP (src, 0, i), prev))
6659 return false;
6660
6661 return true;
6662 }
6663
6664 /* Otherwise just check that PREV doesn't modify any register mentioned
6665 in the memory destination. */
6666 return !mep_store_find_set (SET_DEST (pat), prev);
6667 }
6668
6669 /* Return true if INSN is a store instruction and if the store address
6670 has no true dependence on PREV. */
6671
6672 bool
mep_store_data_bypass_p(rtx_insn * prev,rtx_insn * insn)6673 mep_store_data_bypass_p (rtx_insn *prev, rtx_insn *insn)
6674 {
6675 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6676 }
6677
6678 /* Return true if, apart from HI/LO, there are no true dependencies
6679 between multiplication instructions PREV and INSN. */
6680
6681 bool
mep_mul_hilo_bypass_p(rtx_insn * prev,rtx_insn * insn)6682 mep_mul_hilo_bypass_p (rtx_insn *prev, rtx_insn *insn)
6683 {
6684 rtx pat;
6685
6686 pat = PATTERN (insn);
6687 if (GET_CODE (pat) == PARALLEL)
6688 pat = XVECEXP (pat, 0, 0);
6689 if (GET_CODE (pat) != SET)
6690 return false;
6691 subrtx_iterator::array_type array;
6692 FOR_EACH_SUBRTX (iter, array, SET_SRC (pat), NONCONST)
6693 {
6694 const_rtx x = *iter;
6695 if (REG_P (x)
6696 && REGNO (x) != LO_REGNO
6697 && REGNO (x) != HI_REGNO
6698 && reg_set_p (x, prev))
6699 return false;
6700 }
6701 return true;
6702 }
6703
6704 /* Return true if INSN is an ldc instruction that issues to the
6705 MeP-h1 integer pipeline. This is true for instructions that
6706 read from PSW, LP, SAR, HI and LO. */
6707
6708 bool
mep_ipipe_ldc_p(rtx_insn * insn)6709 mep_ipipe_ldc_p (rtx_insn *insn)
6710 {
6711 rtx pat, src;
6712
6713 pat = PATTERN (insn);
6714
6715 /* Cope with instrinsics that set both a hard register and its shadow.
6716 The set of the hard register comes first. */
6717 if (GET_CODE (pat) == PARALLEL)
6718 pat = XVECEXP (pat, 0, 0);
6719
6720 if (GET_CODE (pat) == SET)
6721 {
6722 src = SET_SRC (pat);
6723
6724 /* Cope with intrinsics. The first operand to the unspec is
6725 the source register. */
6726 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6727 src = XVECEXP (src, 0, 0);
6728
6729 if (REG_P (src))
6730 switch (REGNO (src))
6731 {
6732 case PSW_REGNO:
6733 case LP_REGNO:
6734 case SAR_REGNO:
6735 case HI_REGNO:
6736 case LO_REGNO:
6737 return true;
6738 }
6739 }
6740 return false;
6741 }
6742
6743 /* Create a VLIW bundle from core instruction CORE and coprocessor
6744 instruction COP. COP always satisfies INSN_P, but CORE can be
6745 either a new pattern or an existing instruction.
6746
6747 Emit the bundle in place of COP and return it. */
6748
6749 static rtx_insn *
mep_make_bundle(rtx core_insn_or_pat,rtx_insn * cop)6750 mep_make_bundle (rtx core_insn_or_pat, rtx_insn *cop)
6751 {
6752 rtx seq;
6753 rtx_insn *core_insn;
6754 rtx_insn *insn;
6755
6756 /* If CORE is an existing instruction, remove it, otherwise put
6757 the new pattern in an INSN harness. */
6758 if (INSN_P (core_insn_or_pat))
6759 {
6760 core_insn = as_a <rtx_insn *> (core_insn_or_pat);
6761 remove_insn (core_insn);
6762 }
6763 else
6764 core_insn = make_insn_raw (core_insn_or_pat);
6765
6766 /* Generate the bundle sequence and replace COP with it. */
6767 seq = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core_insn, cop));
6768 insn = emit_insn_after (seq, cop);
6769 remove_insn (cop);
6770
6771 /* Set up the links of the insns inside the SEQUENCE. */
6772 SET_PREV_INSN (core_insn) = PREV_INSN (insn);
6773 SET_NEXT_INSN (core_insn) = cop;
6774 SET_PREV_INSN (cop) = core_insn;
6775 SET_NEXT_INSN (cop) = NEXT_INSN (insn);
6776
6777 /* Set the VLIW flag for the coprocessor instruction. */
6778 PUT_MODE (core_insn, VOIDmode);
6779 PUT_MODE (cop, BImode);
6780
6781 /* Derive a location for the bundle. Individual instructions cannot
6782 have their own location because there can be no assembler labels
6783 between CORE_INSN and COP. */
6784 INSN_LOCATION (insn) = INSN_LOCATION (INSN_LOCATION (core_insn) ? core_insn : cop);
6785 INSN_LOCATION (core_insn) = 0;
6786 INSN_LOCATION (cop) = 0;
6787
6788 return insn;
6789 }
6790
6791 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6792
6793 static void
mep_insn_dependent_p_1(rtx x,const_rtx pat ATTRIBUTE_UNUSED,void * data)6794 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6795 {
6796 rtx * pinsn = (rtx *) data;
6797
6798 if (*pinsn && reg_mentioned_p (x, *pinsn))
6799 *pinsn = NULL_RTX;
6800 }
6801
6802 /* Return true if anything in insn X is (anti,output,true) dependent on
6803 anything in insn Y. */
6804
6805 static int
mep_insn_dependent_p(rtx x,rtx y)6806 mep_insn_dependent_p (rtx x, rtx y)
6807 {
6808 rtx tmp;
6809
6810 gcc_assert (INSN_P (x));
6811 gcc_assert (INSN_P (y));
6812
6813 tmp = PATTERN (y);
6814 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6815 if (tmp == NULL_RTX)
6816 return 1;
6817
6818 tmp = PATTERN (x);
6819 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6820 if (tmp == NULL_RTX)
6821 return 1;
6822
6823 return 0;
6824 }
6825
6826 static int
core_insn_p(rtx_insn * insn)6827 core_insn_p (rtx_insn *insn)
6828 {
6829 if (GET_CODE (PATTERN (insn)) == USE)
6830 return 0;
6831 if (get_attr_slot (insn) == SLOT_CORE)
6832 return 1;
6833 return 0;
6834 }
6835
6836 /* Mark coprocessor instructions that can be bundled together with
6837 the immediately preceding core instruction. This is later used
6838 to emit the "+" that tells the assembler to create a VLIW insn.
6839
6840 For unbundled insns, the assembler will automatically add coprocessor
6841 nops, and 16-bit core nops. Due to an apparent oversight in the
6842 spec, the assembler will _not_ automatically add 32-bit core nops,
6843 so we have to emit those here.
6844
6845 Called from mep_insn_reorg. */
6846
6847 static void
mep_bundle_insns(rtx_insn * insns)6848 mep_bundle_insns (rtx_insn *insns)
6849 {
6850 rtx_insn *insn, *last = NULL, *first = NULL;
6851 int saw_scheduling = 0;
6852
6853 /* Only do bundling if we're in vliw mode. */
6854 if (!mep_vliw_function_p (cfun->decl))
6855 return;
6856
6857 /* The first insn in a bundle are TImode, the remainder are
6858 VOIDmode. After this function, the first has VOIDmode and the
6859 rest have BImode. */
6860
6861 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6862
6863 /* First, move any NOTEs that are within a bundle, to the beginning
6864 of the bundle. */
6865 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6866 {
6867 if (NOTE_P (insn) && first)
6868 /* Don't clear FIRST. */;
6869
6870 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
6871 first = insn;
6872
6873 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
6874 {
6875 rtx_insn *note, *prev;
6876
6877 /* INSN is part of a bundle; FIRST is the first insn in that
6878 bundle. Move all intervening notes out of the bundle.
6879 In addition, since the debug pass may insert a label
6880 whenever the current line changes, set the location info
6881 for INSN to match FIRST. */
6882
6883 INSN_LOCATION (insn) = INSN_LOCATION (first);
6884
6885 note = PREV_INSN (insn);
6886 while (note && note != first)
6887 {
6888 prev = PREV_INSN (note);
6889
6890 if (NOTE_P (note))
6891 {
6892 /* Remove NOTE from here... */
6893 SET_PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
6894 SET_NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
6895 /* ...and put it in here. */
6896 SET_NEXT_INSN (note) = first;
6897 SET_PREV_INSN (note) = PREV_INSN (first);
6898 SET_NEXT_INSN (PREV_INSN (note)) = note;
6899 SET_PREV_INSN (NEXT_INSN (note)) = note;
6900 }
6901
6902 note = prev;
6903 }
6904 }
6905
6906 else if (!NONJUMP_INSN_P (insn))
6907 first = 0;
6908 }
6909
6910 /* Now fix up the bundles. */
6911 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6912 {
6913 if (NOTE_P (insn))
6914 continue;
6915
6916 if (!NONJUMP_INSN_P (insn))
6917 {
6918 last = 0;
6919 continue;
6920 }
6921
6922 /* If we're not optimizing enough, there won't be scheduling
6923 info. We detect that here. */
6924 if (GET_MODE (insn) == TImode)
6925 saw_scheduling = 1;
6926 if (!saw_scheduling)
6927 continue;
6928
6929 if (TARGET_IVC2)
6930 {
6931 rtx_insn *core_insn = NULL;
6932
6933 /* IVC2 slots are scheduled by DFA, so we just accept
6934 whatever the scheduler gives us. However, we must make
6935 sure the core insn (if any) is the first in the bundle.
6936 The IVC2 assembler can insert whatever NOPs are needed,
6937 and allows a COP insn to be first. */
6938
6939 if (NONJUMP_INSN_P (insn)
6940 && GET_CODE (PATTERN (insn)) != USE
6941 && GET_MODE (insn) == TImode)
6942 {
6943 for (last = insn;
6944 NEXT_INSN (last)
6945 && GET_MODE (NEXT_INSN (last)) == VOIDmode
6946 && NONJUMP_INSN_P (NEXT_INSN (last));
6947 last = NEXT_INSN (last))
6948 {
6949 if (core_insn_p (last))
6950 core_insn = last;
6951 }
6952 if (core_insn_p (last))
6953 core_insn = last;
6954
6955 if (core_insn && core_insn != insn)
6956 {
6957 /* Swap core insn to first in the bundle. */
6958
6959 /* Remove core insn. */
6960 if (PREV_INSN (core_insn))
6961 SET_NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
6962 if (NEXT_INSN (core_insn))
6963 SET_PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
6964
6965 /* Re-insert core insn. */
6966 SET_PREV_INSN (core_insn) = PREV_INSN (insn);
6967 SET_NEXT_INSN (core_insn) = insn;
6968
6969 if (PREV_INSN (core_insn))
6970 SET_NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
6971 SET_PREV_INSN (insn) = core_insn;
6972
6973 PUT_MODE (core_insn, TImode);
6974 PUT_MODE (insn, VOIDmode);
6975 }
6976 }
6977
6978 /* The first insn has TImode, the rest have VOIDmode */
6979 if (GET_MODE (insn) == TImode)
6980 PUT_MODE (insn, VOIDmode);
6981 else
6982 PUT_MODE (insn, BImode);
6983 continue;
6984 }
6985
6986 PUT_MODE (insn, VOIDmode);
6987 if (recog_memoized (insn) >= 0
6988 && get_attr_slot (insn) == SLOT_COP)
6989 {
6990 if (JUMP_P (insn)
6991 || ! last
6992 || recog_memoized (last) < 0
6993 || get_attr_slot (last) != SLOT_CORE
6994 || (get_attr_length (insn)
6995 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
6996 || mep_insn_dependent_p (insn, last))
6997 {
6998 switch (get_attr_length (insn))
6999 {
7000 case 8:
7001 break;
7002 case 6:
7003 insn = mep_make_bundle (gen_nop (), insn);
7004 break;
7005 case 4:
7006 if (TARGET_OPT_VL64)
7007 insn = mep_make_bundle (gen_nop32 (), insn);
7008 break;
7009 case 2:
7010 if (TARGET_OPT_VL64)
7011 error ("2 byte cop instructions are"
7012 " not allowed in 64-bit VLIW mode");
7013 else
7014 insn = mep_make_bundle (gen_nop (), insn);
7015 break;
7016 default:
7017 error ("unexpected %d byte cop instruction",
7018 get_attr_length (insn));
7019 break;
7020 }
7021 }
7022 else
7023 insn = mep_make_bundle (last, insn);
7024 }
7025
7026 last = insn;
7027 }
7028 }
7029
7030
7031 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7032 Return true on success. This function can fail if the intrinsic
7033 is unavailable or if the operands don't satisfy their predicates. */
7034
7035 bool
mep_emit_intrinsic(int intrinsic,const rtx * operands)7036 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7037 {
7038 const struct cgen_insn *cgen_insn;
7039 const struct insn_data_d *idata;
7040 rtx newop[10];
7041 int i;
7042
7043 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7044 return false;
7045
7046 idata = &insn_data[cgen_insn->icode];
7047 for (i = 0; i < idata->n_operands; i++)
7048 {
7049 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7050 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7051 return false;
7052 }
7053
7054 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7055 newop[3], newop[4], newop[5],
7056 newop[6], newop[7], newop[8]));
7057
7058 return true;
7059 }
7060
7061
7062 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7063 OPERANDS[0]. Report an error if the instruction could not
7064 be synthesized. OPERANDS[1] is a register_operand. For sign
7065 and zero extensions, it may be smaller than SImode. */
7066
7067 bool
mep_expand_unary_intrinsic(int ATTRIBUTE_UNUSED intrinsic,rtx * operands ATTRIBUTE_UNUSED)7068 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7069 rtx * operands ATTRIBUTE_UNUSED)
7070 {
7071 return false;
7072 }
7073
7074
7075 /* Likewise, but apply a binary operation to OPERANDS[1] and
7076 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7077 can be a general_operand.
7078
7079 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7080 third operand. REG and REG3 take register operands only. */
7081
7082 bool
mep_expand_binary_intrinsic(int ATTRIBUTE_UNUSED immediate,int ATTRIBUTE_UNUSED immediate3,int ATTRIBUTE_UNUSED reg,int ATTRIBUTE_UNUSED reg3,rtx * operands ATTRIBUTE_UNUSED)7083 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7084 int ATTRIBUTE_UNUSED immediate3,
7085 int ATTRIBUTE_UNUSED reg,
7086 int ATTRIBUTE_UNUSED reg3,
7087 rtx * operands ATTRIBUTE_UNUSED)
7088 {
7089 return false;
7090 }
7091
7092 static bool
mep_rtx_cost(rtx x,machine_mode mode ATTRIBUTE_UNUSED,int outer_code ATTRIBUTE_UNUSED,int opno ATTRIBUTE_UNUSED,int * total,bool ATTRIBUTE_UNUSED speed_t)7093 mep_rtx_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
7094 int outer_code ATTRIBUTE_UNUSED,
7095 int opno ATTRIBUTE_UNUSED, int *total,
7096 bool ATTRIBUTE_UNUSED speed_t)
7097 {
7098 int code = GET_CODE (x);
7099
7100 switch (code)
7101 {
7102 case CONST_INT:
7103 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7104 *total = 0;
7105 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7106 *total = 1;
7107 else
7108 *total = 3;
7109 return true;
7110
7111 case SYMBOL_REF:
7112 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7113 return true;
7114
7115 case MULT:
7116 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7117 ? COSTS_N_INSNS (3)
7118 : COSTS_N_INSNS (2));
7119 return true;
7120 }
7121 return false;
7122 }
7123
7124 static int
mep_address_cost(rtx addr ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,addr_space_t as ATTRIBUTE_UNUSED,bool ATTRIBUTE_UNUSED speed_p)7125 mep_address_cost (rtx addr ATTRIBUTE_UNUSED,
7126 machine_mode mode ATTRIBUTE_UNUSED,
7127 addr_space_t as ATTRIBUTE_UNUSED,
7128 bool ATTRIBUTE_UNUSED speed_p)
7129 {
7130 return 1;
7131 }
7132
7133 static void
mep_asm_init_sections(void)7134 mep_asm_init_sections (void)
7135 {
7136 based_section
7137 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7138 "\t.section .based,\"aw\"");
7139
7140 tinybss_section
7141 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7142 "\t.section .sbss,\"aw\"");
7143
7144 sdata_section
7145 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7146 "\t.section .sdata,\"aw\",@progbits");
7147
7148 far_section
7149 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7150 "\t.section .far,\"aw\"");
7151
7152 farbss_section
7153 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7154 "\t.section .farbss,\"aw\"");
7155
7156 frodata_section
7157 = get_unnamed_section (0, output_section_asm_op,
7158 "\t.section .frodata,\"a\"");
7159
7160 srodata_section
7161 = get_unnamed_section (0, output_section_asm_op,
7162 "\t.section .srodata,\"a\"");
7163
7164 vtext_section
7165 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7166 "\t.section .vtext,\"axv\"\n\t.vliw");
7167
7168 vftext_section
7169 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7170 "\t.section .vftext,\"axv\"\n\t.vliw");
7171
7172 ftext_section
7173 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7174 "\t.section .ftext,\"ax\"\n\t.core");
7175
7176 }
7177
7178 /* Initialize the GCC target structure. */
7179
7180 #undef TARGET_ASM_FUNCTION_PROLOGUE
7181 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7182 #undef TARGET_ATTRIBUTE_TABLE
7183 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7184 #undef TARGET_COMP_TYPE_ATTRIBUTES
7185 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7186 #undef TARGET_INSERT_ATTRIBUTES
7187 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7188 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7189 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7190 #undef TARGET_CAN_INLINE_P
7191 #define TARGET_CAN_INLINE_P mep_can_inline_p
7192 #undef TARGET_SECTION_TYPE_FLAGS
7193 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7194 #undef TARGET_ASM_NAMED_SECTION
7195 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7196 #undef TARGET_INIT_BUILTINS
7197 #define TARGET_INIT_BUILTINS mep_init_builtins
7198 #undef TARGET_EXPAND_BUILTIN
7199 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7200 #undef TARGET_SCHED_ADJUST_COST
7201 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7202 #undef TARGET_SCHED_ISSUE_RATE
7203 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7204 #undef TARGET_SCHED_REORDER
7205 #define TARGET_SCHED_REORDER mep_sched_reorder
7206 #undef TARGET_STRIP_NAME_ENCODING
7207 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7208 #undef TARGET_ASM_SELECT_SECTION
7209 #define TARGET_ASM_SELECT_SECTION mep_select_section
7210 #undef TARGET_ASM_UNIQUE_SECTION
7211 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7212 #undef TARGET_ENCODE_SECTION_INFO
7213 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7214 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7215 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7216 #undef TARGET_RTX_COSTS
7217 #define TARGET_RTX_COSTS mep_rtx_cost
7218 #undef TARGET_ADDRESS_COST
7219 #define TARGET_ADDRESS_COST mep_address_cost
7220 #undef TARGET_MACHINE_DEPENDENT_REORG
7221 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7222 #undef TARGET_SETUP_INCOMING_VARARGS
7223 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7224 #undef TARGET_PASS_BY_REFERENCE
7225 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7226 #undef TARGET_FUNCTION_ARG
7227 #define TARGET_FUNCTION_ARG mep_function_arg
7228 #undef TARGET_FUNCTION_ARG_ADVANCE
7229 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7230 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7231 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7232 #undef TARGET_OPTION_OVERRIDE
7233 #define TARGET_OPTION_OVERRIDE mep_option_override
7234 #undef TARGET_ALLOCATE_INITIAL_VALUE
7235 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7236 #undef TARGET_ASM_INIT_SECTIONS
7237 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7238 #undef TARGET_RETURN_IN_MEMORY
7239 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7240 #undef TARGET_NARROW_VOLATILE_BITFIELD
7241 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7242 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7243 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7244 #undef TARGET_BUILD_BUILTIN_VA_LIST
7245 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7246 #undef TARGET_EXPAND_BUILTIN_VA_START
7247 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7248 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7249 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7250 #undef TARGET_CAN_ELIMINATE
7251 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7252 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7253 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7254 #undef TARGET_TRAMPOLINE_INIT
7255 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7256 #undef TARGET_LEGITIMATE_CONSTANT_P
7257 #define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
7258 #undef TARGET_CAN_USE_DOLOOP_P
7259 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
7260
7261 struct gcc_target targetm = TARGET_INITIALIZER;
7262
7263 #include "gt-mep.h"
7264