1 /* Definitions for Toshiba Media Processor
2    Copyright (C) 2001-2013 Free Software Foundation, Inc.
3    Contributed by Red Hat, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "except.h"
40 #include "function.h"
41 #include "optabs.h"
42 #include "reload.h"
43 #include "tm_p.h"
44 #include "ggc.h"
45 #include "diagnostic-core.h"
46 #include "target.h"
47 #include "target-def.h"
48 #include "langhooks.h"
49 #include "df.h"
50 #include "gimple.h"
51 #include "opts.h"
52 #include "dumpfile.h"
53 
54 /* Structure of this file:
55 
56  + Command Line Option Support
57  + Pattern support - constraints, predicates, expanders
58  + Reload Support
59  + Costs
60  + Functions to save and restore machine-specific function data.
61  + Frame/Epilog/Prolog Related
62  + Operand Printing
63  + Function args in registers
64  + Handle pipeline hazards
65  + Handle attributes
66  + Trampolines
67  + Machine-dependent Reorg
68  + Builtins.  */
69 
70 /* Symbol encodings:
71 
72    Symbols are encoded as @ <char> . <name> where <char> is one of these:
73 
74    b - based
75    t - tiny
76    n - near
77    f - far
78    i - io, near
79    I - io, far
80    c - cb (control bus)  */
81 
82 struct GTY(()) machine_function
83 {
84   int mep_frame_pointer_needed;
85 
86   /* For varargs. */
87   int arg_regs_to_save;
88   int regsave_filler;
89   int frame_filler;
90   int frame_locked;
91 
92   /* Records __builtin_return address.  */
93   rtx eh_stack_adjust;
94 
95   int reg_save_size;
96   int reg_save_slot[FIRST_PSEUDO_REGISTER];
97   unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
98 
99   /* 2 if the current function has an interrupt attribute, 1 if not, 0
100      if unknown.  This is here because resource.c uses EPILOGUE_USES
101      which needs it.  */
102   int interrupt_handler;
103 
104   /* Likewise, for disinterrupt attribute.  */
105   int disable_interrupts;
106 
107   /* Number of doloop tags used so far.  */
108   int doloop_tags;
109 
110   /* True if the last tag was allocated to a doloop_end.  */
111   bool doloop_tag_from_end;
112 
113   /* True if reload changes $TP.  */
114   bool reload_changes_tp;
115 
116   /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
117      We only set this if the function is an interrupt handler.  */
118   int asms_without_operands;
119 };
120 
121 #define MEP_CONTROL_REG(x) \
122   (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
123 
124 static GTY(()) section * based_section;
125 static GTY(()) section * tinybss_section;
126 static GTY(()) section * far_section;
127 static GTY(()) section * farbss_section;
128 static GTY(()) section * frodata_section;
129 static GTY(()) section * srodata_section;
130 
131 static GTY(()) section * vtext_section;
132 static GTY(()) section * vftext_section;
133 static GTY(()) section * ftext_section;
134 
135 static void mep_set_leaf_registers (int);
136 static bool symbol_p (rtx);
137 static bool symbolref_p (rtx);
138 static void encode_pattern_1 (rtx);
139 static void encode_pattern (rtx);
140 static bool const_in_range (rtx, int, int);
141 static void mep_rewrite_mult (rtx, rtx);
142 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
143 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
144 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
145 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
146 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
147 static bool mep_nongeneral_reg (rtx);
148 static bool mep_general_copro_reg (rtx);
149 static bool mep_nonregister (rtx);
150 static struct machine_function* mep_init_machine_status (void);
151 static rtx mep_tp_rtx (void);
152 static rtx mep_gp_rtx (void);
153 static bool mep_interrupt_p (void);
154 static bool mep_disinterrupt_p (void);
155 static bool mep_reg_set_p (rtx, rtx);
156 static bool mep_reg_set_in_function (int);
157 static bool mep_interrupt_saved_reg (int);
158 static bool mep_call_saves_register (int);
159 static rtx F (rtx);
160 static void add_constant (int, int, int, int);
161 static rtx maybe_dead_move (rtx, rtx, bool);
162 static void mep_reload_pointer (int, const char *);
163 static void mep_start_function (FILE *, HOST_WIDE_INT);
164 static bool mep_function_ok_for_sibcall (tree, tree);
165 static int unique_bit_in (HOST_WIDE_INT);
166 static int bit_size_for_clip (HOST_WIDE_INT);
167 static int bytesize (const_tree, enum machine_mode);
168 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
169 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
170 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
171 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
172 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
173 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
174 static bool mep_function_attribute_inlinable_p (const_tree);
175 static bool mep_can_inline_p (tree, tree);
176 static bool mep_lookup_pragma_disinterrupt (const char *);
177 static int mep_multiple_address_regions (tree, bool);
178 static int mep_attrlist_to_encoding (tree, tree);
179 static void mep_insert_attributes (tree, tree *);
180 static void mep_encode_section_info (tree, rtx, int);
181 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
182 static void mep_unique_section (tree, int);
183 static unsigned int mep_section_type_flags (tree, const char *, int);
184 static void mep_asm_named_section (const char *, unsigned int, tree);
185 static bool mep_mentioned_p (rtx, rtx, int);
186 static void mep_reorg_regmove (rtx);
187 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
188 static void mep_reorg_repeat (rtx);
189 static bool mep_invertable_branch_p (rtx);
190 static void mep_invert_branch (rtx, rtx);
191 static void mep_reorg_erepeat (rtx);
192 static void mep_jmp_return_reorg (rtx);
193 static void mep_reorg_addcombine (rtx);
194 static void mep_reorg (void);
195 static void mep_init_intrinsics (void);
196 static void mep_init_builtins (void);
197 static void mep_intrinsic_unavailable (int);
198 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
199 static bool mep_get_move_insn (int, const struct cgen_insn **);
200 static rtx mep_convert_arg (enum machine_mode, rtx);
201 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
202 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
203 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
204 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
205 static int mep_adjust_cost (rtx, rtx, rtx, int);
206 static int mep_issue_rate (void);
207 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
208 static void mep_move_ready_insn (rtx *, int, rtx);
209 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
210 static rtx mep_make_bundle (rtx, rtx);
211 static void mep_bundle_insns (rtx);
212 static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
213 static int mep_address_cost (rtx, enum machine_mode, addr_space_t, bool);
214 static void mep_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
215 					tree, int *, int);
216 static bool mep_pass_by_reference (cumulative_args_t cum, enum machine_mode,
217 				   const_tree, bool);
218 static rtx mep_function_arg (cumulative_args_t, enum machine_mode,
219 			     const_tree, bool);
220 static void mep_function_arg_advance (cumulative_args_t, enum machine_mode,
221 				      const_tree, bool);
222 static bool mep_vector_mode_supported_p (enum machine_mode);
223 static rtx  mep_allocate_initial_value (rtx);
224 static void mep_asm_init_sections (void);
225 static int mep_comp_type_attributes (const_tree, const_tree);
226 static bool mep_narrow_volatile_bitfield (void);
227 static rtx mep_expand_builtin_saveregs (void);
228 static tree mep_build_builtin_va_list (void);
229 static void mep_expand_va_start (tree, rtx);
230 static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
231 static bool mep_can_eliminate (const int, const int);
232 static void mep_conditional_register_usage (void);
233 static void mep_trampoline_init (rtx, tree, rtx);
234 
235 #define WANT_GCC_DEFINITIONS
236 #include "mep-intrin.h"
237 #undef WANT_GCC_DEFINITIONS
238 
239 
240 /* Command Line Option Support.  */
241 
242 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
243 
244 /* True if we can use cmov instructions to move values back and forth
245    between core and coprocessor registers.  */
246 bool mep_have_core_copro_moves_p;
247 
248 /* True if we can use cmov instructions (or a work-alike) to move
249    values between coprocessor registers.  */
250 bool mep_have_copro_copro_moves_p;
251 
252 /* A table of all coprocessor instructions that can act like
253    a coprocessor-to-coprocessor cmov.  */
254 static const int mep_cmov_insns[] = {
255   mep_cmov,
256   mep_cpmov,
257   mep_fmovs,
258   mep_caddi3,
259   mep_csubi3,
260   mep_candi3,
261   mep_cori3,
262   mep_cxori3,
263   mep_cand3,
264   mep_cor3
265 };
266 
267 
268 static void
mep_set_leaf_registers(int enable)269 mep_set_leaf_registers (int enable)
270 {
271   int i;
272 
273   if (mep_leaf_registers[0] != enable)
274     for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
275       mep_leaf_registers[i] = enable;
276 }
277 
278 static void
mep_conditional_register_usage(void)279 mep_conditional_register_usage (void)
280 {
281   int i;
282 
283   if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
284     {
285       fixed_regs[HI_REGNO] = 1;
286       fixed_regs[LO_REGNO] = 1;
287       call_used_regs[HI_REGNO] = 1;
288       call_used_regs[LO_REGNO] = 1;
289     }
290 
291   for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
292     global_regs[i] = 1;
293 }
294 
295 static void
mep_option_override(void)296 mep_option_override (void)
297 {
298   unsigned int i;
299   int j;
300   cl_deferred_option *opt;
301   vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) mep_deferred_options;
302 
303   if (v)
304     FOR_EACH_VEC_ELT (*v, i, opt)
305       {
306 	switch (opt->opt_index)
307 	  {
308 	  case OPT_mivc2:
309 	    for (j = 0; j < 32; j++)
310 	      fixed_regs[j + 48] = 0;
311 	    for (j = 0; j < 32; j++)
312 	      call_used_regs[j + 48] = 1;
313 	    for (j = 6; j < 8; j++)
314 	      call_used_regs[j + 48] = 0;
315 
316 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
317 	    RN (0, "$csar0");
318 	    RN (1, "$cc");
319 	    RN (4, "$cofr0");
320 	    RN (5, "$cofr1");
321 	    RN (6, "$cofa0");
322 	    RN (7, "$cofa1");
323 	    RN (15, "$csar1");
324 
325 	    RN (16, "$acc0_0");
326 	    RN (17, "$acc0_1");
327 	    RN (18, "$acc0_2");
328 	    RN (19, "$acc0_3");
329 	    RN (20, "$acc0_4");
330 	    RN (21, "$acc0_5");
331 	    RN (22, "$acc0_6");
332 	    RN (23, "$acc0_7");
333 
334 	    RN (24, "$acc1_0");
335 	    RN (25, "$acc1_1");
336 	    RN (26, "$acc1_2");
337 	    RN (27, "$acc1_3");
338 	    RN (28, "$acc1_4");
339 	    RN (29, "$acc1_5");
340 	    RN (30, "$acc1_6");
341 	    RN (31, "$acc1_7");
342 #undef RN
343 	    break;
344 
345 	  default:
346 	    gcc_unreachable ();
347 	  }
348       }
349 
350   if (flag_pic == 1)
351     warning (OPT_fpic, "-fpic is not supported");
352   if (flag_pic == 2)
353     warning (OPT_fPIC, "-fPIC is not supported");
354   if (TARGET_S && TARGET_M)
355     error ("only one of -ms and -mm may be given");
356   if (TARGET_S && TARGET_L)
357     error ("only one of -ms and -ml may be given");
358   if (TARGET_M && TARGET_L)
359     error ("only one of -mm and -ml may be given");
360   if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
361     error ("only one of -ms and -mtiny= may be given");
362   if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
363     error ("only one of -mm and -mtiny= may be given");
364   if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
365     warning (0, "-mclip currently has no effect without -mminmax");
366 
367   if (mep_const_section)
368     {
369       if (strcmp (mep_const_section, "tiny") != 0
370 	  && strcmp (mep_const_section, "near") != 0
371 	  && strcmp (mep_const_section, "far") != 0)
372 	error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
373     }
374 
375   if (TARGET_S)
376     mep_tiny_cutoff = 65536;
377   if (TARGET_M)
378     mep_tiny_cutoff = 0;
379   if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
380     mep_tiny_cutoff = 0;
381 
382   if (TARGET_64BIT_CR_REGS)
383     flag_split_wide_types = 0;
384 
385   init_machine_status = mep_init_machine_status;
386   mep_init_intrinsics ();
387 }
388 
389 /* Pattern Support - constraints, predicates, expanders.  */
390 
391 /* MEP has very few instructions that can refer to the span of
392    addresses used by symbols, so it's common to check for them.  */
393 
394 static bool
symbol_p(rtx x)395 symbol_p (rtx x)
396 {
397   int c = GET_CODE (x);
398 
399   return (c == CONST_INT
400 	  || c == CONST
401 	  || c == SYMBOL_REF);
402 }
403 
404 static bool
symbolref_p(rtx x)405 symbolref_p (rtx x)
406 {
407   int c;
408 
409   if (GET_CODE (x) != MEM)
410     return false;
411 
412   c = GET_CODE (XEXP (x, 0));
413   return (c == CONST_INT
414 	  || c == CONST
415 	  || c == SYMBOL_REF);
416 }
417 
418 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
419 
420 #define GEN_REG(R, STRICT)				\
421   (GR_REGNO_P (R)					\
422    || (!STRICT						\
423        && ((R) == ARG_POINTER_REGNUM			\
424 	   || (R) >= FIRST_PSEUDO_REGISTER)))
425 
426 static char pattern[12], *patternp;
427 static GTY(()) rtx patternr[12];
428 #define RTX_IS(x) (strcmp (pattern, x) == 0)
429 
430 static void
encode_pattern_1(rtx x)431 encode_pattern_1 (rtx x)
432 {
433   int i;
434 
435   if (patternp == pattern + sizeof (pattern) - 2)
436     {
437       patternp[-1] = '?';
438       return;
439     }
440 
441   patternr[patternp-pattern] = x;
442 
443   switch (GET_CODE (x))
444     {
445     case REG:
446       *patternp++ = 'r';
447       break;
448     case MEM:
449       *patternp++ = 'm';
450     case CONST:
451       encode_pattern_1 (XEXP(x, 0));
452       break;
453     case PLUS:
454       *patternp++ = '+';
455       encode_pattern_1 (XEXP(x, 0));
456       encode_pattern_1 (XEXP(x, 1));
457       break;
458     case LO_SUM:
459       *patternp++ = 'L';
460       encode_pattern_1 (XEXP(x, 0));
461       encode_pattern_1 (XEXP(x, 1));
462       break;
463     case HIGH:
464       *patternp++ = 'H';
465       encode_pattern_1 (XEXP(x, 0));
466       break;
467     case SYMBOL_REF:
468       *patternp++ = 's';
469       break;
470     case LABEL_REF:
471       *patternp++ = 'l';
472       break;
473     case CONST_INT:
474     case CONST_DOUBLE:
475       *patternp++ = 'i';
476       break;
477     case UNSPEC:
478       *patternp++ = 'u';
479       *patternp++ = '0' + XCINT(x, 1, UNSPEC);
480       for (i=0; i<XVECLEN (x, 0); i++)
481 	encode_pattern_1 (XVECEXP (x, 0, i));
482       break;
483     case USE:
484       *patternp++ = 'U';
485       break;
486     default:
487       *patternp++ = '?';
488 #if 0
489       fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
490       debug_rtx (x);
491       gcc_unreachable ();
492 #endif
493       break;
494     }
495 }
496 
497 static void
encode_pattern(rtx x)498 encode_pattern (rtx x)
499 {
500   patternp = pattern;
501   encode_pattern_1 (x);
502   *patternp = 0;
503 }
504 
505 int
mep_section_tag(rtx x)506 mep_section_tag (rtx x)
507 {
508   const char *name;
509 
510   while (1)
511     {
512       switch (GET_CODE (x))
513 	{
514 	case MEM:
515 	case CONST:
516 	  x = XEXP (x, 0);
517 	  break;
518 	case UNSPEC:
519 	  x = XVECEXP (x, 0, 0);
520 	  break;
521 	case PLUS:
522 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
523 	    return 0;
524 	  x = XEXP (x, 0);
525 	  break;
526 	default:
527 	  goto done;
528 	}
529     }
530  done:
531   if (GET_CODE (x) != SYMBOL_REF)
532     return 0;
533   name = XSTR (x, 0);
534   if (name[0] == '@' && name[2] == '.')
535     {
536       if (name[1] == 'i' || name[1] == 'I')
537 	{
538 	  if (name[1] == 'I')
539 	    return 'f'; /* near */
540 	  return 'n'; /* far */
541 	}
542       return name[1];
543     }
544   return 0;
545 }
546 
547 int
mep_regno_reg_class(int regno)548 mep_regno_reg_class (int regno)
549 {
550   switch (regno)
551     {
552     case SP_REGNO:		return SP_REGS;
553     case TP_REGNO:		return TP_REGS;
554     case GP_REGNO:		return GP_REGS;
555     case 0: 			return R0_REGS;
556     case HI_REGNO:		return HI_REGS;
557     case LO_REGNO:		return LO_REGS;
558     case ARG_POINTER_REGNUM:	return GENERAL_REGS;
559     }
560 
561   if (GR_REGNO_P (regno))
562     return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
563   if (CONTROL_REGNO_P (regno))
564     return CONTROL_REGS;
565 
566   if (CR_REGNO_P (regno))
567     {
568       int i, j;
569 
570       /* Search for the register amongst user-defined subclasses of
571 	 the coprocessor registers.  */
572       for (i = USER0_REGS; i <= USER3_REGS; ++i)
573 	{
574 	  if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
575 	    continue;
576 	  for (j = 0; j < N_REG_CLASSES; ++j)
577 	    {
578 	      enum reg_class sub = reg_class_subclasses[i][j];
579 
580 	      if (sub == LIM_REG_CLASSES)
581 		return i;
582 	      if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
583 		break;
584 	    }
585 	}
586 
587       return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
588     }
589 
590   if (CCR_REGNO_P (regno))
591     return CCR_REGS;
592 
593   gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
594   return NO_REGS;
595 }
596 
597 static bool
const_in_range(rtx x,int minv,int maxv)598 const_in_range (rtx x, int minv, int maxv)
599 {
600   return (GET_CODE (x) == CONST_INT
601 	  && INTVAL (x) >= minv
602 	  && INTVAL (x) <= maxv);
603 }
604 
605 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
606    such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2.  If a move
607    is needed, emit it before INSN if INSN is nonnull, otherwise emit it
608    at the end of the insn stream.  */
609 
610 rtx
mep_mulr_source(rtx insn,rtx dest,rtx src1,rtx src2)611 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
612 {
613   if (rtx_equal_p (dest, src1))
614     return src2;
615   else if (rtx_equal_p (dest, src2))
616     return src1;
617   else
618     {
619       if (insn == 0)
620 	emit_insn (gen_movsi (copy_rtx (dest), src1));
621       else
622 	emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
623       return src2;
624     }
625 }
626 
627 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
628    Change the last element of PATTERN from (clobber (scratch:SI))
629    to (clobber (reg:SI HI_REGNO)).  */
630 
631 static void
mep_rewrite_mult(rtx insn,rtx pattern)632 mep_rewrite_mult (rtx insn, rtx pattern)
633 {
634   rtx hi_clobber;
635 
636   hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
637   XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
638   PATTERN (insn) = pattern;
639   INSN_CODE (insn) = -1;
640 }
641 
642 /* Subroutine of mep_reuse_lo_p.  Rewrite instruction INSN so that it
643    calculates SRC1 * SRC2 and stores the result in $lo.  Also make it
644    store the result in DEST if nonnull.  */
645 
646 static void
mep_rewrite_mulsi3(rtx insn,rtx dest,rtx src1,rtx src2)647 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
648 {
649   rtx lo, pattern;
650 
651   lo = gen_rtx_REG (SImode, LO_REGNO);
652   if (dest)
653     pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
654 			   mep_mulr_source (insn, dest, src1, src2));
655   else
656     pattern = gen_mulsi3_lo (lo, src1, src2);
657   mep_rewrite_mult (insn, pattern);
658 }
659 
660 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3.  First copy
661    SRC3 into $lo, then use either madd or maddr.  The move into $lo will
662    be deleted by a peephole2 if SRC3 is already in $lo.  */
663 
664 static void
mep_rewrite_maddsi3(rtx insn,rtx dest,rtx src1,rtx src2,rtx src3)665 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
666 {
667   rtx lo, pattern;
668 
669   lo = gen_rtx_REG (SImode, LO_REGNO);
670   emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
671   if (dest)
672     pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
673 			    mep_mulr_source (insn, dest, src1, src2),
674 			    copy_rtx (lo));
675   else
676     pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
677   mep_rewrite_mult (insn, pattern);
678 }
679 
680 /* Return true if $lo has the same value as integer register GPR when
681    instruction INSN is reached.  If necessary, rewrite the instruction
682    that sets $lo so that it uses a proper SET, not a CLOBBER.  LO is an
683    rtx for (reg:SI LO_REGNO).
684 
685    This function is intended to be used by the peephole2 pass.  Since
686    that pass goes from the end of a basic block to the beginning, and
687    propagates liveness information on the way, there is no need to
688    update register notes here.
689 
690    If GPR_DEAD_P is true on entry, and this function returns true,
691    then the caller will replace _every_ use of GPR in and after INSN
692    with LO.  This means that if the instruction that sets $lo is a
693    mulr- or maddr-type instruction, we can rewrite it to use mul or
694    madd instead.  In combination with the copy progagation pass,
695    this allows us to replace sequences like:
696 
697 	mov GPR,R1
698 	mulr GPR,R2
699 
700    with:
701 
702 	mul R1,R2
703 
704    if GPR is no longer used.  */
705 
706 static bool
mep_reuse_lo_p_1(rtx lo,rtx gpr,rtx insn,bool gpr_dead_p)707 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
708 {
709   do
710     {
711       insn = PREV_INSN (insn);
712       if (INSN_P (insn))
713 	switch (recog_memoized (insn))
714 	  {
715 	  case CODE_FOR_mulsi3_1:
716 	    extract_insn (insn);
717 	    if (rtx_equal_p (recog_data.operand[0], gpr))
718 	      {
719 		mep_rewrite_mulsi3 (insn,
720 				    gpr_dead_p ? NULL : recog_data.operand[0],
721 				    recog_data.operand[1],
722 				    recog_data.operand[2]);
723 		return true;
724 	      }
725 	    return false;
726 
727 	  case CODE_FOR_maddsi3:
728 	    extract_insn (insn);
729 	    if (rtx_equal_p (recog_data.operand[0], gpr))
730 	      {
731 		mep_rewrite_maddsi3 (insn,
732 				     gpr_dead_p ? NULL : recog_data.operand[0],
733 				     recog_data.operand[1],
734 				     recog_data.operand[2],
735 				     recog_data.operand[3]);
736 		return true;
737 	      }
738 	    return false;
739 
740 	  case CODE_FOR_mulsi3r:
741 	  case CODE_FOR_maddsi3r:
742 	    extract_insn (insn);
743 	    return rtx_equal_p (recog_data.operand[1], gpr);
744 
745 	  default:
746 	    if (reg_set_p (lo, insn)
747 		|| reg_set_p (gpr, insn)
748 		|| volatile_insn_p (PATTERN (insn)))
749 	      return false;
750 
751 	    if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
752 	      gpr_dead_p = false;
753 	    break;
754 	  }
755     }
756   while (!NOTE_INSN_BASIC_BLOCK_P (insn));
757   return false;
758 }
759 
760 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data.  */
761 
762 bool
mep_reuse_lo_p(rtx lo,rtx gpr,rtx insn,bool gpr_dead_p)763 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
764 {
765   bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
766   extract_insn (insn);
767   return result;
768 }
769 
770 /* Return true if SET can be turned into a post-modify load or store
771    that adds OFFSET to GPR.  In other words, return true if SET can be
772    changed into:
773 
774        (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
775 
776    It's OK to change SET to an equivalent operation in order to
777    make it match.  */
778 
779 static bool
mep_use_post_modify_for_set_p(rtx set,rtx gpr,rtx offset)780 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
781 {
782   rtx *reg, *mem;
783   unsigned int reg_bytes, mem_bytes;
784   enum machine_mode reg_mode, mem_mode;
785 
786   /* Only simple SETs can be converted.  */
787   if (GET_CODE (set) != SET)
788     return false;
789 
790   /* Point REG to what we hope will be the register side of the set and
791      MEM to what we hope will be the memory side.  */
792   if (GET_CODE (SET_DEST (set)) == MEM)
793     {
794       mem = &SET_DEST (set);
795       reg = &SET_SRC (set);
796     }
797   else
798     {
799       reg = &SET_DEST (set);
800       mem = &SET_SRC (set);
801       if (GET_CODE (*mem) == SIGN_EXTEND)
802 	mem = &XEXP (*mem, 0);
803     }
804 
805   /* Check that *REG is a suitable coprocessor register.  */
806   if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
807     return false;
808 
809   /* Check that *MEM is a suitable memory reference.  */
810   if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
811     return false;
812 
813   /* Get the number of bytes in each operand.  */
814   mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
815   reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
816 
817   /* Check that OFFSET is suitably aligned.  */
818   if (INTVAL (offset) & (mem_bytes - 1))
819     return false;
820 
821   /* Convert *MEM to a normal integer mode.  */
822   mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
823   *mem = change_address (*mem, mem_mode, NULL);
824 
825   /* Adjust *REG as well.  */
826   *reg = shallow_copy_rtx (*reg);
827   if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
828     {
829       /* SET is a subword load.  Convert it to an explicit extension.  */
830       PUT_MODE (*reg, SImode);
831       *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
832     }
833   else
834     {
835       reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
836       PUT_MODE (*reg, reg_mode);
837     }
838   return true;
839 }
840 
841 /* Return the effect of frame-related instruction INSN.  */
842 
843 static rtx
mep_frame_expr(rtx insn)844 mep_frame_expr (rtx insn)
845 {
846   rtx note, expr;
847 
848   note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
849   expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
850   RTX_FRAME_RELATED_P (expr) = 1;
851   return expr;
852 }
853 
854 /* Merge instructions INSN1 and INSN2 using a PARALLEL.  Store the
855    new pattern in INSN1; INSN2 will be deleted by the caller.  */
856 
857 static void
mep_make_parallel(rtx insn1,rtx insn2)858 mep_make_parallel (rtx insn1, rtx insn2)
859 {
860   rtx expr;
861 
862   if (RTX_FRAME_RELATED_P (insn2))
863     {
864       expr = mep_frame_expr (insn2);
865       if (RTX_FRAME_RELATED_P (insn1))
866 	expr = gen_rtx_SEQUENCE (VOIDmode,
867 				 gen_rtvec (2, mep_frame_expr (insn1), expr));
868       set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
869       RTX_FRAME_RELATED_P (insn1) = 1;
870     }
871 
872   PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
873 				      gen_rtvec (2, PATTERN (insn1),
874 						 PATTERN (insn2)));
875   INSN_CODE (insn1) = -1;
876 }
877 
878 /* SET_INSN is an instruction that adds OFFSET to REG.  Go back through
879    the basic block to see if any previous load or store instruction can
880    be persuaded to do SET_INSN as a side-effect.  Return true if so.  */
881 
882 static bool
mep_use_post_modify_p_1(rtx set_insn,rtx reg,rtx offset)883 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
884 {
885   rtx insn;
886 
887   insn = set_insn;
888   do
889     {
890       insn = PREV_INSN (insn);
891       if (INSN_P (insn))
892 	{
893 	  if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
894 	    {
895 	      mep_make_parallel (insn, set_insn);
896 	      return true;
897 	    }
898 
899 	  if (reg_set_p (reg, insn)
900 	      || reg_referenced_p (reg, PATTERN (insn))
901 	      || volatile_insn_p (PATTERN (insn)))
902 	    return false;
903 	}
904     }
905   while (!NOTE_INSN_BASIC_BLOCK_P (insn));
906   return false;
907 }
908 
909 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data.  */
910 
911 bool
mep_use_post_modify_p(rtx insn,rtx reg,rtx offset)912 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
913 {
914   bool result = mep_use_post_modify_p_1 (insn, reg, offset);
915   extract_insn (insn);
916   return result;
917 }
918 
919 bool
mep_allow_clip(rtx ux,rtx lx,int s)920 mep_allow_clip (rtx ux, rtx lx, int s)
921 {
922   HOST_WIDE_INT u = INTVAL (ux);
923   HOST_WIDE_INT l = INTVAL (lx);
924   int i;
925 
926   if (!TARGET_OPT_CLIP)
927     return false;
928 
929   if (s)
930     {
931       for (i = 0; i < 30; i ++)
932 	if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
933 	    && (l == - ((HOST_WIDE_INT) 1 << i)))
934 	  return true;
935     }
936   else
937     {
938       if (l != 0)
939 	return false;
940 
941       for (i = 0; i < 30; i ++)
942 	if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
943 	  return true;
944     }
945   return false;
946 }
947 
948 bool
mep_bit_position_p(rtx x,bool looking_for)949 mep_bit_position_p (rtx x, bool looking_for)
950 {
951   if (GET_CODE (x) != CONST_INT)
952     return false;
953   switch ((int) INTVAL(x) & 0xff)
954     {
955     case 0x01: case 0x02: case 0x04: case 0x08:
956     case 0x10: case 0x20: case 0x40: case 0x80:
957       return looking_for;
958     case 0xfe: case 0xfd: case 0xfb: case 0xf7:
959     case 0xef: case 0xdf: case 0xbf: case 0x7f:
960       return !looking_for;
961     }
962   return false;
963 }
964 
965 static bool
move_needs_splitting(rtx dest,rtx src,enum machine_mode mode ATTRIBUTE_UNUSED)966 move_needs_splitting (rtx dest, rtx src,
967 		      enum machine_mode mode ATTRIBUTE_UNUSED)
968 {
969   int s = mep_section_tag (src);
970 
971   while (1)
972     {
973       if (GET_CODE (src) == CONST
974 	  || GET_CODE (src) == MEM)
975 	src = XEXP (src, 0);
976       else if (GET_CODE (src) == SYMBOL_REF
977 	       || GET_CODE (src) == LABEL_REF
978 	       || GET_CODE (src) == PLUS)
979 	break;
980       else
981 	return false;
982     }
983   if (s == 'f'
984       || (GET_CODE (src) == PLUS
985 	  && GET_CODE (XEXP (src, 1)) == CONST_INT
986 	  && (INTVAL (XEXP (src, 1)) < -65536
987 	      || INTVAL (XEXP (src, 1)) > 0xffffff))
988       || (GET_CODE (dest) == REG
989 	  && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
990     return true;
991   return false;
992 }
993 
994 bool
mep_split_mov(rtx * operands,int symbolic)995 mep_split_mov (rtx *operands, int symbolic)
996 {
997   if (symbolic)
998     {
999       if (move_needs_splitting (operands[0], operands[1], SImode))
1000 	return true;
1001       return false;
1002     }
1003 
1004   if (GET_CODE (operands[1]) != CONST_INT)
1005     return false;
1006 
1007   if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1008       || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1009       || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1010     return false;
1011 
1012   if (((!reload_completed && !reload_in_progress)
1013        || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1014       && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1015     return false;
1016 
1017   return true;
1018 }
1019 
1020 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1021    it to one specific value.  So the insn chosen depends on whether
1022    the source and destination modes match.  */
1023 
1024 bool
mep_vliw_mode_match(rtx tgt)1025 mep_vliw_mode_match (rtx tgt)
1026 {
1027   bool src_vliw = mep_vliw_function_p (cfun->decl);
1028   bool tgt_vliw = INTVAL (tgt);
1029 
1030   return src_vliw == tgt_vliw;
1031 }
1032 
1033 /* Like the above, but also test for near/far mismatches.  */
1034 
1035 bool
mep_vliw_jmp_match(rtx tgt)1036 mep_vliw_jmp_match (rtx tgt)
1037 {
1038   bool src_vliw = mep_vliw_function_p (cfun->decl);
1039   bool tgt_vliw = INTVAL (tgt);
1040 
1041   if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1042     return false;
1043 
1044   return src_vliw == tgt_vliw;
1045 }
1046 
1047 bool
mep_multi_slot(rtx x)1048 mep_multi_slot (rtx x)
1049 {
1050   return get_attr_slot (x) == SLOT_MULTI;
1051 }
1052 
1053 /* Implement TARGET_LEGITIMATE_CONSTANT_P.  */
1054 
1055 static bool
mep_legitimate_constant_p(enum machine_mode mode ATTRIBUTE_UNUSED,rtx x)1056 mep_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1057 {
1058   /* We can't convert symbol values to gp- or tp-rel values after
1059      reload, as reload might have used $gp or $tp for other
1060      purposes.  */
1061   if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1062     {
1063       char e = mep_section_tag (x);
1064       return (e != 't' && e != 'b');
1065     }
1066   return 1;
1067 }
1068 
1069 /* Be careful not to use macros that need to be compiled one way for
1070    strict, and another way for not-strict, like REG_OK_FOR_BASE_P.  */
1071 
1072 bool
mep_legitimate_address(enum machine_mode mode,rtx x,int strict)1073 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1074 {
1075   int the_tag;
1076 
1077 #define DEBUG_LEGIT 0
1078 #if DEBUG_LEGIT
1079   fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1080   debug_rtx (x);
1081 #endif
1082 
1083   if (GET_CODE (x) == LO_SUM
1084       && GET_CODE (XEXP (x, 0)) == REG
1085       && GEN_REG (REGNO (XEXP (x, 0)), strict)
1086       && CONSTANT_P (XEXP (x, 1)))
1087     {
1088       if (GET_MODE_SIZE (mode) > 4)
1089 	{
1090 	  /* We will end up splitting this, and lo_sums are not
1091 	     offsettable for us.  */
1092 #if DEBUG_LEGIT
1093 	  fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1094 #endif
1095 	  return false;
1096 	}
1097 #if DEBUG_LEGIT
1098       fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1099 #endif
1100       return true;
1101     }
1102 
1103   if (GET_CODE (x) == REG
1104       && GEN_REG (REGNO (x), strict))
1105     {
1106 #if DEBUG_LEGIT
1107       fprintf (stderr, " - yup, [reg]\n");
1108 #endif
1109       return true;
1110     }
1111 
1112   if (GET_CODE (x) == PLUS
1113       && GET_CODE (XEXP (x, 0)) == REG
1114       && GEN_REG (REGNO (XEXP (x, 0)), strict)
1115       && const_in_range (XEXP (x, 1), -32768, 32767))
1116     {
1117 #if DEBUG_LEGIT
1118       fprintf (stderr, " - yup, [reg+const]\n");
1119 #endif
1120       return true;
1121     }
1122 
1123   if (GET_CODE (x) == PLUS
1124       && GET_CODE (XEXP (x, 0)) == REG
1125       && GEN_REG (REGNO (XEXP (x, 0)), strict)
1126       && GET_CODE (XEXP (x, 1)) == CONST
1127       && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1128 	  || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1129 	      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1130 	      && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1131     {
1132 #if DEBUG_LEGIT
1133       fprintf (stderr, " - yup, [reg+unspec]\n");
1134 #endif
1135       return true;
1136     }
1137 
1138   the_tag = mep_section_tag (x);
1139 
1140   if (the_tag == 'f')
1141     {
1142 #if DEBUG_LEGIT
1143       fprintf (stderr, " - nope, [far]\n");
1144 #endif
1145       return false;
1146     }
1147 
1148   if (mode == VOIDmode
1149       && GET_CODE (x) == SYMBOL_REF)
1150     {
1151 #if DEBUG_LEGIT
1152       fprintf (stderr, " - yup, call [symbol]\n");
1153 #endif
1154       return true;
1155     }
1156 
1157   if ((mode == SImode || mode == SFmode)
1158       && CONSTANT_P (x)
1159       && mep_legitimate_constant_p (mode, x)
1160       && the_tag != 't' && the_tag != 'b')
1161     {
1162       if (GET_CODE (x) != CONST_INT
1163 	  || (INTVAL (x) <= 0xfffff
1164 	      && INTVAL (x) >= 0
1165 	      && (INTVAL (x) % 4) == 0))
1166 	{
1167 #if DEBUG_LEGIT
1168 	  fprintf (stderr, " - yup, [const]\n");
1169 #endif
1170 	  return true;
1171 	}
1172     }
1173 
1174 #if DEBUG_LEGIT
1175   fprintf (stderr, " - nope.\n");
1176 #endif
1177   return false;
1178 }
1179 
1180 int
mep_legitimize_reload_address(rtx * x,enum machine_mode mode,int opnum,int type_i,int ind_levels ATTRIBUTE_UNUSED)1181 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1182 			       int type_i,
1183 			       int ind_levels ATTRIBUTE_UNUSED)
1184 {
1185   enum reload_type type = (enum reload_type) type_i;
1186 
1187   if (GET_CODE (*x) == PLUS
1188       && GET_CODE (XEXP (*x, 0)) == MEM
1189       && GET_CODE (XEXP (*x, 1)) == REG)
1190     {
1191       /* GCC will by default copy the MEM into a REG, which results in
1192 	 an invalid address.  For us, the best thing to do is move the
1193 	 whole expression to a REG.  */
1194       push_reload (*x, NULL_RTX, x, NULL,
1195 		   GENERAL_REGS, mode, VOIDmode,
1196 		   0, 0, opnum, type);
1197       return 1;
1198     }
1199 
1200   if (GET_CODE (*x) == PLUS
1201       && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1202       && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1203     {
1204       char e = mep_section_tag (XEXP (*x, 0));
1205 
1206       if (e != 't' && e != 'b')
1207 	{
1208 	  /* GCC thinks that (sym+const) is a valid address.  Well,
1209 	     sometimes it is, this time it isn't.  The best thing to
1210 	     do is reload the symbol to a register, since reg+int
1211 	     tends to work, and we can't just add the symbol and
1212 	     constant anyway.  */
1213 	  push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1214 		       GENERAL_REGS, mode, VOIDmode,
1215 		       0, 0, opnum, type);
1216 	  return 1;
1217 	}
1218     }
1219   return 0;
1220 }
1221 
1222 int
mep_core_address_length(rtx insn,int opn)1223 mep_core_address_length (rtx insn, int opn)
1224 {
1225   rtx set = single_set (insn);
1226   rtx mem = XEXP (set, opn);
1227   rtx other = XEXP (set, 1-opn);
1228   rtx addr = XEXP (mem, 0);
1229 
1230   if (register_operand (addr, Pmode))
1231     return 2;
1232   if (GET_CODE (addr) == PLUS)
1233     {
1234       rtx addend = XEXP (addr, 1);
1235 
1236       gcc_assert (REG_P (XEXP (addr, 0)));
1237 
1238       switch (REGNO (XEXP (addr, 0)))
1239 	{
1240 	case STACK_POINTER_REGNUM:
1241 	  if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1242 	      && mep_imm7a4_operand (addend, VOIDmode))
1243 	    return 2;
1244 	  break;
1245 
1246 	case 13: /* TP */
1247 	  gcc_assert (REG_P (other));
1248 
1249 	  if (REGNO (other) >= 8)
1250 	    break;
1251 
1252 	  if (GET_CODE (addend) == CONST
1253 	      && GET_CODE (XEXP (addend, 0)) == UNSPEC
1254 	      && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1255 	    return 2;
1256 
1257 	  if (GET_CODE (addend) == CONST_INT
1258 	      && INTVAL (addend) >= 0
1259 	      && INTVAL (addend) <= 127
1260 	      && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1261 	    return 2;
1262 	  break;
1263 	}
1264     }
1265 
1266   return 4;
1267 }
1268 
1269 int
mep_cop_address_length(rtx insn,int opn)1270 mep_cop_address_length (rtx insn, int opn)
1271 {
1272   rtx set = single_set (insn);
1273   rtx mem = XEXP (set, opn);
1274   rtx addr = XEXP (mem, 0);
1275 
1276   if (GET_CODE (mem) != MEM)
1277     return 2;
1278   if (register_operand (addr, Pmode))
1279     return 2;
1280   if (GET_CODE (addr) == POST_INC)
1281     return 2;
1282 
1283   return 4;
1284 }
1285 
1286 #define DEBUG_EXPAND_MOV 0
1287 bool
mep_expand_mov(rtx * operands,enum machine_mode mode)1288 mep_expand_mov (rtx *operands, enum machine_mode mode)
1289 {
1290   int i, t;
1291   int tag[2];
1292   rtx tpsym, tpoffs;
1293   int post_reload = 0;
1294 
1295   tag[0] = mep_section_tag (operands[0]);
1296   tag[1] = mep_section_tag (operands[1]);
1297 
1298   if (!reload_in_progress
1299       && !reload_completed
1300       && GET_CODE (operands[0]) != REG
1301       && GET_CODE (operands[0]) != SUBREG
1302       && GET_CODE (operands[1]) != REG
1303       && GET_CODE (operands[1]) != SUBREG)
1304     operands[1] = copy_to_mode_reg (mode, operands[1]);
1305 
1306 #if DEBUG_EXPAND_MOV
1307   fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1308 	  reload_in_progress || reload_completed);
1309   debug_rtx (operands[0]);
1310   debug_rtx (operands[1]);
1311 #endif
1312 
1313   if (mode == DImode || mode == DFmode)
1314     return false;
1315 
1316   if (reload_in_progress || reload_completed)
1317     {
1318       rtx r;
1319 
1320       if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1321 	cfun->machine->reload_changes_tp = true;
1322 
1323       if (tag[0] == 't' || tag[1] == 't')
1324 	{
1325 	  r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1326 	  if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1327 	    post_reload = 1;
1328 	}
1329       if (tag[0] == 'b' || tag[1] == 'b')
1330 	{
1331 	  r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1332 	  if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1333 	    post_reload = 1;
1334 	}
1335       if (cfun->machine->reload_changes_tp == true)
1336 	post_reload = 1;
1337     }
1338 
1339   if (!post_reload)
1340     {
1341       rtx n;
1342       if (symbol_p (operands[1]))
1343 	{
1344 	  t = mep_section_tag (operands[1]);
1345 	  if (t == 'b' || t == 't')
1346 	    {
1347 
1348 	      if (GET_CODE (operands[1]) == SYMBOL_REF)
1349 		{
1350 		  tpsym = operands[1];
1351 		  n = gen_rtx_UNSPEC (mode,
1352 				      gen_rtvec (1, operands[1]),
1353 				      t == 'b' ? UNS_TPREL : UNS_GPREL);
1354 		  n = gen_rtx_CONST (mode, n);
1355 		}
1356 	      else if (GET_CODE (operands[1]) == CONST
1357 		       && GET_CODE (XEXP (operands[1], 0)) == PLUS
1358 		       && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1359 		       && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1360 		{
1361 		  tpsym = XEXP (XEXP (operands[1], 0), 0);
1362 		  tpoffs = XEXP (XEXP (operands[1], 0), 1);
1363 		  n = gen_rtx_UNSPEC (mode,
1364 				      gen_rtvec (1, tpsym),
1365 				      t == 'b' ? UNS_TPREL : UNS_GPREL);
1366 		  n = gen_rtx_PLUS (mode, n, tpoffs);
1367 		  n = gen_rtx_CONST (mode, n);
1368 		}
1369 	      else if (GET_CODE (operands[1]) == CONST
1370 		       && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1371 		return false;
1372 	      else
1373 		{
1374 		  error ("unusual TP-relative address");
1375 		  return false;
1376 		}
1377 
1378 	      n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1379 				       : mep_gp_rtx ()), n);
1380 	      n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1381 #if DEBUG_EXPAND_MOV
1382 	      fprintf(stderr, "mep_expand_mov emitting ");
1383 	      debug_rtx(n);
1384 #endif
1385 	      return true;
1386 	    }
1387 	}
1388 
1389       for (i=0; i < 2; i++)
1390 	{
1391 	  t = mep_section_tag (operands[i]);
1392 	  if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1393 	    {
1394 	      rtx sym, n, r;
1395 	      int u;
1396 
1397 	      sym = XEXP (operands[i], 0);
1398 	      if (GET_CODE (sym) == CONST
1399 		  && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1400 		sym = XVECEXP (XEXP (sym, 0), 0, 0);
1401 
1402 	      if (t == 'b')
1403 		{
1404 		  r = mep_tp_rtx ();
1405 		  u = UNS_TPREL;
1406 		}
1407 	      else
1408 		{
1409 		  r = mep_gp_rtx ();
1410 		  u = UNS_GPREL;
1411 		}
1412 
1413 	      n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1414 	      n = gen_rtx_CONST (Pmode, n);
1415 	      n = gen_rtx_PLUS (Pmode, r, n);
1416 	      operands[i] = replace_equiv_address (operands[i], n);
1417 	    }
1418 	}
1419     }
1420 
1421   if ((GET_CODE (operands[1]) != REG
1422        && MEP_CONTROL_REG (operands[0]))
1423       || (GET_CODE (operands[0]) != REG
1424 	  && MEP_CONTROL_REG (operands[1])))
1425     {
1426       rtx temp;
1427 #if DEBUG_EXPAND_MOV
1428       fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1429 #endif
1430       temp = gen_reg_rtx (mode);
1431       emit_move_insn (temp, operands[1]);
1432       operands[1] = temp;
1433     }
1434 
1435   if (symbolref_p (operands[0])
1436       && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1437 	  || (GET_MODE_SIZE (mode) != 4)))
1438     {
1439       rtx temp;
1440 
1441       gcc_assert (!reload_in_progress && !reload_completed);
1442 
1443       temp = force_reg (Pmode, XEXP (operands[0], 0));
1444       operands[0] = replace_equiv_address (operands[0], temp);
1445       emit_move_insn (operands[0], operands[1]);
1446       return true;
1447     }
1448 
1449   if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1450     tag[1] = 0;
1451 
1452   if (symbol_p (operands[1])
1453       && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1454     {
1455       emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1456       emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1457       return true;
1458     }
1459 
1460   if (symbolref_p (operands[1])
1461       && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1462     {
1463       rtx temp;
1464 
1465       if (reload_in_progress || reload_completed)
1466 	temp = operands[0];
1467       else
1468 	temp = gen_reg_rtx (Pmode);
1469 
1470       emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1471       emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1472       emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1473       return true;
1474     }
1475 
1476   return false;
1477 }
1478 
1479 /* Cases where the pattern can't be made to use at all.  */
1480 
1481 bool
mep_mov_ok(rtx * operands,enum machine_mode mode ATTRIBUTE_UNUSED)1482 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1483 {
1484   int i;
1485 
1486 #define DEBUG_MOV_OK 0
1487 #if DEBUG_MOV_OK
1488   fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1489 	   mep_section_tag (operands[1]));
1490   debug_rtx (operands[0]);
1491   debug_rtx (operands[1]);
1492 #endif
1493 
1494   /* We want the movh patterns to get these.  */
1495   if (GET_CODE (operands[1]) == HIGH)
1496     return false;
1497 
1498   /* We can't store a register to a far variable without using a
1499      scratch register to hold the address.  Using far variables should
1500      be split by mep_emit_mov anyway.  */
1501   if (mep_section_tag (operands[0]) == 'f'
1502       || mep_section_tag (operands[1]) == 'f')
1503     {
1504 #if DEBUG_MOV_OK
1505       fprintf (stderr, " - no, f\n");
1506 #endif
1507       return false;
1508     }
1509   i = mep_section_tag (operands[1]);
1510   if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1511     /* These are supposed to be generated with adds of the appropriate
1512        register.  During and after reload, however, we allow them to
1513        be accessed as normal symbols because adding a dependency on
1514        the base register now might cause problems.  */
1515     {
1516 #if DEBUG_MOV_OK
1517       fprintf (stderr, " - no, bt\n");
1518 #endif
1519       return false;
1520     }
1521 
1522   /* The only moves we can allow involve at least one general
1523      register, so require it.  */
1524   for (i = 0; i < 2; i ++)
1525     {
1526       /* Allow subregs too, before reload.  */
1527       rtx x = operands[i];
1528 
1529       if (GET_CODE (x) == SUBREG)
1530 	x = XEXP (x, 0);
1531       if (GET_CODE (x) == REG
1532 	  && ! MEP_CONTROL_REG (x))
1533 	{
1534 #if DEBUG_MOV_OK
1535 	  fprintf (stderr, " - ok\n");
1536 #endif
1537 	  return true;
1538 	}
1539     }
1540 #if DEBUG_MOV_OK
1541   fprintf (stderr, " - no, no gen reg\n");
1542 #endif
1543   return false;
1544 }
1545 
1546 #define DEBUG_SPLIT_WIDE_MOVE 0
1547 void
mep_split_wide_move(rtx * operands,enum machine_mode mode)1548 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1549 {
1550   int i;
1551 
1552 #if DEBUG_SPLIT_WIDE_MOVE
1553   fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1554   debug_rtx (operands[0]);
1555   debug_rtx (operands[1]);
1556 #endif
1557 
1558   for (i = 0; i <= 1; i++)
1559     {
1560       rtx op = operands[i], hi, lo;
1561 
1562       switch (GET_CODE (op))
1563 	{
1564 	case REG:
1565 	  {
1566 	    unsigned int regno = REGNO (op);
1567 
1568 	    if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1569 	      {
1570 		rtx i32;
1571 
1572 		lo = gen_rtx_REG (SImode, regno);
1573 		i32 = GEN_INT (32);
1574 		hi = gen_rtx_ZERO_EXTRACT (SImode,
1575 					   gen_rtx_REG (DImode, regno),
1576 					   i32, i32);
1577 	      }
1578 	    else
1579 	      {
1580 		hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1581 		lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1582 	      }
1583 	  }
1584 	  break;
1585 
1586 	case CONST_INT:
1587 	case CONST_DOUBLE:
1588 	case MEM:
1589 	  hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1590 	  lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1591 	  break;
1592 
1593 	default:
1594 	  gcc_unreachable ();
1595 	}
1596 
1597       /* The high part of CR <- GPR moves must be done after the low part.  */
1598       operands [i + 4] = lo;
1599       operands [i + 2] = hi;
1600     }
1601 
1602   if (reg_mentioned_p (operands[2], operands[5])
1603       || GET_CODE (operands[2]) == ZERO_EXTRACT
1604       || GET_CODE (operands[4]) == ZERO_EXTRACT)
1605     {
1606       rtx tmp;
1607 
1608       /* Overlapping register pairs -- make sure we don't
1609 	 early-clobber ourselves.  */
1610       tmp = operands[2];
1611       operands[2] = operands[4];
1612       operands[4] = tmp;
1613       tmp = operands[3];
1614       operands[3] = operands[5];
1615       operands[5] = tmp;
1616     }
1617 
1618 #if DEBUG_SPLIT_WIDE_MOVE
1619   fprintf(stderr, "\033[34m");
1620   debug_rtx (operands[2]);
1621   debug_rtx (operands[3]);
1622   debug_rtx (operands[4]);
1623   debug_rtx (operands[5]);
1624   fprintf(stderr, "\033[0m");
1625 #endif
1626 }
1627 
1628 /* Emit a setcc instruction in its entirity.  */
1629 
1630 static bool
mep_expand_setcc_1(enum rtx_code code,rtx dest,rtx op1,rtx op2)1631 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1632 {
1633   rtx tmp;
1634 
1635   switch (code)
1636     {
1637     case GT:
1638     case GTU:
1639       tmp = op1, op1 = op2, op2 = tmp;
1640       code = swap_condition (code);
1641       /* FALLTHRU */
1642 
1643     case LT:
1644     case LTU:
1645       op1 = force_reg (SImode, op1);
1646       emit_insn (gen_rtx_SET (VOIDmode, dest,
1647 			      gen_rtx_fmt_ee (code, SImode, op1, op2)));
1648       return true;
1649 
1650     case EQ:
1651       if (op2 != const0_rtx)
1652 	op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1653       mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1654       return true;
1655 
1656     case NE:
1657       /* Branchful sequence:
1658 		mov dest, 0		16-bit
1659 		beq op1, op2, Lover	16-bit (op2 < 16), 32-bit otherwise
1660 		mov dest, 1		16-bit
1661 
1662 	 Branchless sequence:
1663 		add3 tmp, op1, -op2	32-bit (or mov + sub)
1664 		sltu3 tmp, tmp, 1	16-bit
1665 		xor3 dest, tmp, 1	32-bit
1666 	*/
1667       if (optimize_size && op2 != const0_rtx)
1668 	return false;
1669 
1670       if (op2 != const0_rtx)
1671 	op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1672 
1673       op2 = gen_reg_rtx (SImode);
1674       mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1675 
1676       emit_insn (gen_rtx_SET (VOIDmode, dest,
1677 			      gen_rtx_XOR (SImode, op2, const1_rtx)));
1678       return true;
1679 
1680     case LE:
1681       if (GET_CODE (op2) != CONST_INT
1682 	  || INTVAL (op2) == 0x7ffffff)
1683 	return false;
1684       op2 = GEN_INT (INTVAL (op2) + 1);
1685       return mep_expand_setcc_1 (LT, dest, op1, op2);
1686 
1687     case LEU:
1688       if (GET_CODE (op2) != CONST_INT
1689 	  || INTVAL (op2) == -1)
1690 	return false;
1691       op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1692       return mep_expand_setcc_1 (LTU, dest, op1, op2);
1693 
1694     case GE:
1695       if (GET_CODE (op2) != CONST_INT
1696 	  || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1697 	return false;
1698       op2 = GEN_INT (INTVAL (op2) - 1);
1699       return mep_expand_setcc_1 (GT, dest, op1, op2);
1700 
1701     case GEU:
1702       if (GET_CODE (op2) != CONST_INT
1703 	  || op2 == const0_rtx)
1704 	return false;
1705       op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1706       return mep_expand_setcc_1 (GTU, dest, op1, op2);
1707 
1708     default:
1709       gcc_unreachable ();
1710     }
1711 }
1712 
1713 bool
mep_expand_setcc(rtx * operands)1714 mep_expand_setcc (rtx *operands)
1715 {
1716   rtx dest = operands[0];
1717   enum rtx_code code = GET_CODE (operands[1]);
1718   rtx op0 = operands[2];
1719   rtx op1 = operands[3];
1720 
1721   return mep_expand_setcc_1 (code, dest, op0, op1);
1722 }
1723 
1724 rtx
mep_expand_cbranch(rtx * operands)1725 mep_expand_cbranch (rtx *operands)
1726 {
1727   enum rtx_code code = GET_CODE (operands[0]);
1728   rtx op0 = operands[1];
1729   rtx op1 = operands[2];
1730   rtx tmp;
1731 
1732  restart:
1733   switch (code)
1734     {
1735     case LT:
1736       if (mep_imm4_operand (op1, SImode))
1737 	break;
1738 
1739       tmp = gen_reg_rtx (SImode);
1740       gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1741       code = NE;
1742       op0 = tmp;
1743       op1 = const0_rtx;
1744       break;
1745 
1746     case GE:
1747       if (mep_imm4_operand (op1, SImode))
1748 	break;
1749 
1750       tmp = gen_reg_rtx (SImode);
1751       gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1752 
1753       code = EQ;
1754       op0 = tmp;
1755       op1 = const0_rtx;
1756       break;
1757 
1758     case EQ:
1759     case NE:
1760       if (! mep_reg_or_imm4_operand (op1, SImode))
1761 	op1 = force_reg (SImode, op1);
1762       break;
1763 
1764     case LE:
1765     case GT:
1766       if (GET_CODE (op1) == CONST_INT
1767 	  && INTVAL (op1) != 0x7fffffff)
1768 	{
1769 	  op1 = GEN_INT (INTVAL (op1) + 1);
1770 	  code = (code == LE ? LT : GE);
1771 	  goto restart;
1772 	}
1773 
1774       tmp = gen_reg_rtx (SImode);
1775       gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1776 
1777       code = (code == LE ? EQ : NE);
1778       op0 = tmp;
1779       op1 = const0_rtx;
1780       break;
1781 
1782     case LTU:
1783       if (op1 == const1_rtx)
1784 	{
1785 	  code = EQ;
1786 	  op1 = const0_rtx;
1787 	  break;
1788 	}
1789 
1790       tmp = gen_reg_rtx (SImode);
1791       gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1792       code = NE;
1793       op0 = tmp;
1794       op1 = const0_rtx;
1795       break;
1796 
1797     case LEU:
1798       tmp = gen_reg_rtx (SImode);
1799       if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1800 	code = NE;
1801       else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1802 	code = EQ;
1803       else
1804 	gcc_unreachable ();
1805       op0 = tmp;
1806       op1 = const0_rtx;
1807       break;
1808 
1809     case GTU:
1810       tmp = gen_reg_rtx (SImode);
1811       gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1812 		  || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1813       code = NE;
1814       op0 = tmp;
1815       op1 = const0_rtx;
1816       break;
1817 
1818     case GEU:
1819       tmp = gen_reg_rtx (SImode);
1820       if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1821 	code = NE;
1822       else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1823 	code = EQ;
1824       else
1825 	gcc_unreachable ();
1826       op0 = tmp;
1827       op1 = const0_rtx;
1828       break;
1829 
1830     default:
1831       gcc_unreachable ();
1832     }
1833 
1834   return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1835 }
1836 
1837 const char *
mep_emit_cbranch(rtx * operands,int ne)1838 mep_emit_cbranch (rtx *operands, int ne)
1839 {
1840   if (GET_CODE (operands[1]) == REG)
1841     return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1842   else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
1843     return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1844   else
1845     return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1846 }
1847 
1848 void
mep_expand_call(rtx * operands,int returns_value)1849 mep_expand_call (rtx *operands, int returns_value)
1850 {
1851   rtx addr = operands[returns_value];
1852   rtx tp = mep_tp_rtx ();
1853   rtx gp = mep_gp_rtx ();
1854 
1855   gcc_assert (GET_CODE (addr) == MEM);
1856 
1857   addr = XEXP (addr, 0);
1858 
1859   if (! mep_call_address_operand (addr, VOIDmode))
1860     addr = force_reg (SImode, addr);
1861 
1862   if (! operands[returns_value+2])
1863     operands[returns_value+2] = const0_rtx;
1864 
1865   if (returns_value)
1866     emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1867 					     operands[3], tp, gp));
1868   else
1869     emit_call_insn (gen_call_internal (addr, operands[1],
1870 				       operands[2], tp, gp));
1871 }
1872 
1873 /* Aliasing Support.  */
1874 
1875 /* If X is a machine specific address (i.e. a symbol or label being
1876    referenced as a displacement from the GOT implemented using an
1877    UNSPEC), then return the base term.  Otherwise return X.  */
1878 
1879 rtx
mep_find_base_term(rtx x)1880 mep_find_base_term (rtx x)
1881 {
1882   rtx base, term;
1883   int unspec;
1884 
1885   if (GET_CODE (x) != PLUS)
1886     return x;
1887   base = XEXP (x, 0);
1888   term = XEXP (x, 1);
1889 
1890   if (has_hard_reg_initial_val(Pmode, TP_REGNO)
1891       && base == mep_tp_rtx ())
1892     unspec = UNS_TPREL;
1893   else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
1894 	   && base == mep_gp_rtx ())
1895     unspec = UNS_GPREL;
1896   else
1897     return x;
1898 
1899   if (GET_CODE (term) != CONST)
1900     return x;
1901   term = XEXP (term, 0);
1902 
1903   if (GET_CODE (term) != UNSPEC
1904       || XINT (term, 1) != unspec)
1905     return x;
1906 
1907   return XVECEXP (term, 0, 0);
1908 }
1909 
1910 /* Reload Support.  */
1911 
1912 /* Return true if the registers in CLASS cannot represent the change from
1913    modes FROM to TO.  */
1914 
1915 bool
mep_cannot_change_mode_class(enum machine_mode from,enum machine_mode to,enum reg_class regclass)1916 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
1917 			       enum reg_class regclass)
1918 {
1919   if (from == to)
1920     return false;
1921 
1922   /* 64-bit COP regs must remain 64-bit COP regs.  */
1923   if (TARGET_64BIT_CR_REGS
1924       && (regclass == CR_REGS
1925 	  || regclass == LOADABLE_CR_REGS)
1926       && (GET_MODE_SIZE (to) < 8
1927 	  || GET_MODE_SIZE (from) < 8))
1928     return true;
1929 
1930   return false;
1931 }
1932 
1933 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1934 
1935 static bool
mep_general_reg(rtx x)1936 mep_general_reg (rtx x)
1937 {
1938   while (GET_CODE (x) == SUBREG)
1939     x = XEXP (x, 0);
1940   return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
1941 }
1942 
1943 static bool
mep_nongeneral_reg(rtx x)1944 mep_nongeneral_reg (rtx x)
1945 {
1946   while (GET_CODE (x) == SUBREG)
1947     x = XEXP (x, 0);
1948   return (GET_CODE (x) == REG
1949 	  && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
1950 }
1951 
1952 static bool
mep_general_copro_reg(rtx x)1953 mep_general_copro_reg (rtx x)
1954 {
1955   while (GET_CODE (x) == SUBREG)
1956     x = XEXP (x, 0);
1957   return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
1958 }
1959 
1960 static bool
mep_nonregister(rtx x)1961 mep_nonregister (rtx x)
1962 {
1963   while (GET_CODE (x) == SUBREG)
1964     x = XEXP (x, 0);
1965   return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
1966 }
1967 
1968 #define DEBUG_RELOAD 0
1969 
1970 /* Return the secondary reload class needed for moving value X to or
1971    from a register in coprocessor register class CLASS.  */
1972 
1973 static enum reg_class
mep_secondary_copro_reload_class(enum reg_class rclass,rtx x)1974 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
1975 {
1976   if (mep_general_reg (x))
1977     /* We can do the move directly if mep_have_core_copro_moves_p,
1978        otherwise we need to go through memory.  Either way, no secondary
1979        register is needed.  */
1980     return NO_REGS;
1981 
1982   if (mep_general_copro_reg (x))
1983     {
1984       /* We can do the move directly if mep_have_copro_copro_moves_p.  */
1985       if (mep_have_copro_copro_moves_p)
1986 	return NO_REGS;
1987 
1988       /* Otherwise we can use a temporary if mep_have_core_copro_moves_p.  */
1989       if (mep_have_core_copro_moves_p)
1990 	return GENERAL_REGS;
1991 
1992       /* Otherwise we need to do it through memory.  No secondary
1993 	 register is needed.  */
1994       return NO_REGS;
1995     }
1996 
1997   if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
1998       && constraint_satisfied_p (x, CONSTRAINT_U))
1999     /* X is a memory value that we can access directly.  */
2000     return NO_REGS;
2001 
2002   /* We have to move X into a GPR first and then copy it to
2003      the coprocessor register.  The move from the GPR to the
2004      coprocessor might be done directly or through memory,
2005      depending on mep_have_core_copro_moves_p. */
2006   return GENERAL_REGS;
2007 }
2008 
2009 /* Copying X to register in RCLASS.  */
2010 
2011 enum reg_class
mep_secondary_input_reload_class(enum reg_class rclass,enum machine_mode mode ATTRIBUTE_UNUSED,rtx x)2012 mep_secondary_input_reload_class (enum reg_class rclass,
2013 				  enum machine_mode mode ATTRIBUTE_UNUSED,
2014 				  rtx x)
2015 {
2016   int rv = NO_REGS;
2017 
2018 #if DEBUG_RELOAD
2019   fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2020   debug_rtx (x);
2021 #endif
2022 
2023   if (reg_class_subset_p (rclass, CR_REGS))
2024     rv = mep_secondary_copro_reload_class (rclass, x);
2025   else if (MEP_NONGENERAL_CLASS (rclass)
2026 	   && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2027     rv = GENERAL_REGS;
2028 
2029 #if DEBUG_RELOAD
2030   fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2031 #endif
2032   return (enum reg_class) rv;
2033 }
2034 
2035 /* Copying register in RCLASS to X.  */
2036 
2037 enum reg_class
mep_secondary_output_reload_class(enum reg_class rclass,enum machine_mode mode ATTRIBUTE_UNUSED,rtx x)2038 mep_secondary_output_reload_class (enum reg_class rclass,
2039 				   enum machine_mode mode ATTRIBUTE_UNUSED,
2040 				   rtx x)
2041 {
2042   int rv = NO_REGS;
2043 
2044 #if DEBUG_RELOAD
2045   fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2046   debug_rtx (x);
2047 #endif
2048 
2049   if (reg_class_subset_p (rclass, CR_REGS))
2050     rv = mep_secondary_copro_reload_class (rclass, x);
2051   else if (MEP_NONGENERAL_CLASS (rclass)
2052 	   && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2053     rv = GENERAL_REGS;
2054 
2055 #if DEBUG_RELOAD
2056   fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2057 #endif
2058 
2059   return (enum reg_class) rv;
2060 }
2061 
2062 /* Implement SECONDARY_MEMORY_NEEDED.  */
2063 
2064 bool
mep_secondary_memory_needed(enum reg_class rclass1,enum reg_class rclass2,enum machine_mode mode ATTRIBUTE_UNUSED)2065 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2066 			     enum machine_mode mode ATTRIBUTE_UNUSED)
2067 {
2068   if (!mep_have_core_copro_moves_p)
2069     {
2070       if (reg_classes_intersect_p (rclass1, CR_REGS)
2071 	  && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2072 	return true;
2073       if (reg_classes_intersect_p (rclass2, CR_REGS)
2074 	  && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2075 	return true;
2076       if (!mep_have_copro_copro_moves_p
2077 	  && reg_classes_intersect_p (rclass1, CR_REGS)
2078 	  && reg_classes_intersect_p (rclass2, CR_REGS))
2079 	return true;
2080     }
2081   return false;
2082 }
2083 
2084 void
mep_expand_reload(rtx * operands,enum machine_mode mode)2085 mep_expand_reload (rtx *operands, enum machine_mode mode)
2086 {
2087   /* There are three cases for each direction:
2088      register, farsym
2089      control, farsym
2090      control, nearsym */
2091 
2092   int s0 = mep_section_tag (operands[0]) == 'f';
2093   int s1 = mep_section_tag (operands[1]) == 'f';
2094   int c0 = mep_nongeneral_reg (operands[0]);
2095   int c1 = mep_nongeneral_reg (operands[1]);
2096   int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2097 
2098 #if DEBUG_RELOAD
2099   fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2100   debug_rtx (operands[0]);
2101   debug_rtx (operands[1]);
2102 #endif
2103 
2104   switch (which)
2105     {
2106     case 00: /* Don't know why this gets here.  */
2107     case 02: /* general = far */
2108       emit_move_insn (operands[0], operands[1]);
2109       return;
2110 
2111     case 10: /* cr = mem */
2112     case 11: /* cr = cr */
2113     case 01: /* mem = cr */
2114     case 12: /* cr = far */
2115       emit_move_insn (operands[2], operands[1]);
2116       emit_move_insn (operands[0], operands[2]);
2117       return;
2118 
2119     case 20: /* far = general */
2120       emit_move_insn (operands[2], XEXP (operands[1], 0));
2121       emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2122       return;
2123 
2124     case 21: /* far = cr */
2125     case 22: /* far = far */
2126     default:
2127       fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2128 	       which, mode_name[mode]);
2129       debug_rtx (operands[0]);
2130       debug_rtx (operands[1]);
2131       gcc_unreachable ();
2132     }
2133 }
2134 
2135 /* Implement PREFERRED_RELOAD_CLASS.  See whether X is a constant that
2136    can be moved directly into registers 0 to 7, but not into the rest.
2137    If so, and if the required class includes registers 0 to 7, restrict
2138    it to those registers.  */
2139 
2140 enum reg_class
mep_preferred_reload_class(rtx x,enum reg_class rclass)2141 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2142 {
2143   switch (GET_CODE (x))
2144     {
2145     case CONST_INT:
2146       if (INTVAL (x) >= 0x10000
2147 	  && INTVAL (x) < 0x01000000
2148 	  && (INTVAL (x) & 0xffff) != 0
2149 	  && reg_class_subset_p (TPREL_REGS, rclass))
2150 	rclass = TPREL_REGS;
2151       break;
2152 
2153     case CONST:
2154     case SYMBOL_REF:
2155     case LABEL_REF:
2156       if (mep_section_tag (x) != 'f'
2157 	  && reg_class_subset_p (TPREL_REGS, rclass))
2158 	rclass = TPREL_REGS;
2159       break;
2160 
2161     default:
2162       break;
2163     }
2164   return rclass;
2165 }
2166 
2167 /* Implement REGISTER_MOVE_COST.  Return 2 for direct single-register
2168    moves, 4 for direct double-register moves, and 1000 for anything
2169    that requires a temporary register or temporary stack slot.  */
2170 
2171 int
mep_register_move_cost(enum machine_mode mode,enum reg_class from,enum reg_class to)2172 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2173 {
2174   if (mep_have_copro_copro_moves_p
2175       && reg_class_subset_p (from, CR_REGS)
2176       && reg_class_subset_p (to, CR_REGS))
2177     {
2178       if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2179 	return 4;
2180       return 2;
2181     }
2182   if (reg_class_subset_p (from, CR_REGS)
2183       && reg_class_subset_p (to, CR_REGS))
2184     {
2185       if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2186 	return 8;
2187       return 4;
2188     }
2189   if (reg_class_subset_p (from, CR_REGS)
2190       || reg_class_subset_p (to, CR_REGS))
2191     {
2192       if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2193 	return 4;
2194       return 2;
2195     }
2196   if (mep_secondary_memory_needed (from, to, mode))
2197     return 1000;
2198   if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2199     return 1000;
2200 
2201   if (GET_MODE_SIZE (mode) > 4)
2202     return 4;
2203 
2204   return 2;
2205 }
2206 
2207 
2208 /* Functions to save and restore machine-specific function data.  */
2209 
2210 static struct machine_function *
mep_init_machine_status(void)2211 mep_init_machine_status (void)
2212 {
2213   return ggc_alloc_cleared_machine_function ();
2214 }
2215 
2216 static rtx
mep_allocate_initial_value(rtx reg)2217 mep_allocate_initial_value (rtx reg)
2218 {
2219   int rss;
2220 
2221   if (GET_CODE (reg) != REG)
2222     return NULL_RTX;
2223 
2224   if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2225     return NULL_RTX;
2226 
2227   /* In interrupt functions, the "initial" values of $gp and $tp are
2228      provided by the prologue.  They are not necessarily the same as
2229      the values that the caller was using.  */
2230   if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2231     if (mep_interrupt_p ())
2232       return NULL_RTX;
2233 
2234   if (! cfun->machine->reg_save_slot[REGNO(reg)])
2235     {
2236       cfun->machine->reg_save_size += 4;
2237       cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2238     }
2239 
2240   rss = cfun->machine->reg_save_slot[REGNO(reg)];
2241   return gen_rtx_MEM (SImode, plus_constant (Pmode, arg_pointer_rtx, -rss));
2242 }
2243 
2244 rtx
mep_return_addr_rtx(int count)2245 mep_return_addr_rtx (int count)
2246 {
2247   if (count != 0)
2248     return const0_rtx;
2249 
2250   return get_hard_reg_initial_val (Pmode, LP_REGNO);
2251 }
2252 
2253 static rtx
mep_tp_rtx(void)2254 mep_tp_rtx (void)
2255 {
2256   return get_hard_reg_initial_val (Pmode, TP_REGNO);
2257 }
2258 
2259 static rtx
mep_gp_rtx(void)2260 mep_gp_rtx (void)
2261 {
2262   return get_hard_reg_initial_val (Pmode, GP_REGNO);
2263 }
2264 
2265 static bool
mep_interrupt_p(void)2266 mep_interrupt_p (void)
2267 {
2268   if (cfun->machine->interrupt_handler == 0)
2269     {
2270       int interrupt_handler
2271 	= (lookup_attribute ("interrupt",
2272 			     DECL_ATTRIBUTES (current_function_decl))
2273 	   != NULL_TREE);
2274       cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2275     }
2276   return cfun->machine->interrupt_handler == 2;
2277 }
2278 
2279 static bool
mep_disinterrupt_p(void)2280 mep_disinterrupt_p (void)
2281 {
2282   if (cfun->machine->disable_interrupts == 0)
2283     {
2284       int disable_interrupts
2285 	= (lookup_attribute ("disinterrupt",
2286 			     DECL_ATTRIBUTES (current_function_decl))
2287 	   != NULL_TREE);
2288       cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2289     }
2290   return cfun->machine->disable_interrupts == 2;
2291 }
2292 
2293 
2294 /* Frame/Epilog/Prolog Related.  */
2295 
2296 static bool
mep_reg_set_p(rtx reg,rtx insn)2297 mep_reg_set_p (rtx reg, rtx insn)
2298 {
2299   /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2300   if (INSN_P (insn))
2301     {
2302       if (FIND_REG_INC_NOTE (insn, reg))
2303 	return true;
2304       insn = PATTERN (insn);
2305     }
2306 
2307   if (GET_CODE (insn) == SET
2308       && GET_CODE (XEXP (insn, 0)) == REG
2309       && GET_CODE (XEXP (insn, 1)) == REG
2310       && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2311     return false;
2312 
2313   return set_of (reg, insn) != NULL_RTX;
2314 }
2315 
2316 
2317 #define MEP_SAVES_UNKNOWN 0
2318 #define MEP_SAVES_YES 1
2319 #define MEP_SAVES_MAYBE 2
2320 #define MEP_SAVES_NO 3
2321 
2322 static bool
mep_reg_set_in_function(int regno)2323 mep_reg_set_in_function (int regno)
2324 {
2325   rtx reg, insn;
2326 
2327   if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2328     return true;
2329 
2330   if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2331     return true;
2332 
2333   push_topmost_sequence ();
2334   insn = get_insns ();
2335   pop_topmost_sequence ();
2336 
2337   if (!insn)
2338     return false;
2339 
2340   reg = gen_rtx_REG (SImode, regno);
2341 
2342   for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2343     if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2344       return true;
2345   return false;
2346 }
2347 
2348 static bool
mep_asm_without_operands_p(void)2349 mep_asm_without_operands_p (void)
2350 {
2351   if (cfun->machine->asms_without_operands == 0)
2352     {
2353       rtx insn;
2354 
2355       push_topmost_sequence ();
2356       insn = get_insns ();
2357       pop_topmost_sequence ();
2358 
2359       cfun->machine->asms_without_operands = 1;
2360       while (insn)
2361 	{
2362 	  if (INSN_P (insn)
2363 	      && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2364 	    {
2365 	      cfun->machine->asms_without_operands = 2;
2366 	      break;
2367 	    }
2368 	  insn = NEXT_INSN (insn);
2369 	}
2370 
2371     }
2372   return cfun->machine->asms_without_operands == 2;
2373 }
2374 
2375 /* Interrupt functions save/restore every call-preserved register, and
2376    any call-used register it uses (or all if it calls any function,
2377    since they may get clobbered there too).  Here we check to see
2378    which call-used registers need saving.  */
2379 
2380 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2381 			   && (r == FIRST_CCR_REGNO + 1 \
2382 			       || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2383 			       || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2384 
2385 static bool
mep_interrupt_saved_reg(int r)2386 mep_interrupt_saved_reg (int r)
2387 {
2388   if (!mep_interrupt_p ())
2389     return false;
2390   if (r == REGSAVE_CONTROL_TEMP
2391       || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2392     return true;
2393   if (mep_asm_without_operands_p ()
2394       && (!fixed_regs[r]
2395 	  || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2396 	  || IVC2_ISAVED_REG (r)))
2397     return true;
2398   if (!crtl->is_leaf)
2399     /* Function calls mean we need to save $lp.  */
2400     if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2401       return true;
2402   if (!crtl->is_leaf || cfun->machine->doloop_tags > 0)
2403     /* The interrupt handler might use these registers for repeat blocks,
2404        or it might call a function that does so.  */
2405     if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2406       return true;
2407   if (crtl->is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2408     return false;
2409   /* Functions we call might clobber these.  */
2410   if (call_used_regs[r] && !fixed_regs[r])
2411     return true;
2412   /* Additional registers that need to be saved for IVC2.  */
2413   if (IVC2_ISAVED_REG (r))
2414     return true;
2415 
2416   return false;
2417 }
2418 
2419 static bool
mep_call_saves_register(int r)2420 mep_call_saves_register (int r)
2421 {
2422   if (! cfun->machine->frame_locked)
2423     {
2424       int rv = MEP_SAVES_NO;
2425 
2426       if (cfun->machine->reg_save_slot[r])
2427   	rv = MEP_SAVES_YES;
2428       else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2429 	rv = MEP_SAVES_YES;
2430       else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2431 	rv = MEP_SAVES_YES;
2432       else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2433 	rv = MEP_SAVES_YES;
2434       else if (crtl->calls_eh_return && (r == 10 || r == 11))
2435 	/* We need these to have stack slots so that they can be set during
2436 	   unwinding.  */
2437 	rv = MEP_SAVES_YES;
2438       else if (mep_interrupt_saved_reg (r))
2439 	rv = MEP_SAVES_YES;
2440       cfun->machine->reg_saved[r] = rv;
2441     }
2442   return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2443 }
2444 
2445 /* Return true if epilogue uses register REGNO.  */
2446 
2447 bool
mep_epilogue_uses(int regno)2448 mep_epilogue_uses (int regno)
2449 {
2450   /* Since $lp is a call-saved register, the generic code will normally
2451      mark it used in the epilogue if it needs to be saved and restored.
2452      However, when profiling is enabled, the profiling code will implicitly
2453      clobber $11.  This case has to be handled specially both here and in
2454      mep_call_saves_register.  */
2455   if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2456     return true;
2457   /* Interrupt functions save/restore pretty much everything.  */
2458   return (reload_completed && mep_interrupt_saved_reg (regno));
2459 }
2460 
2461 static int
mep_reg_size(int regno)2462 mep_reg_size (int regno)
2463 {
2464   if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2465     return 8;
2466   return 4;
2467 }
2468 
2469 /* Worker function for TARGET_CAN_ELIMINATE.  */
2470 
2471 bool
mep_can_eliminate(const int from,const int to)2472 mep_can_eliminate (const int from, const int to)
2473 {
2474   return  (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2475            ? ! frame_pointer_needed
2476            : true);
2477 }
2478 
2479 int
mep_elimination_offset(int from,int to)2480 mep_elimination_offset (int from, int to)
2481 {
2482   int reg_save_size;
2483   int i;
2484   int frame_size = get_frame_size () + crtl->outgoing_args_size;
2485   int total_size;
2486 
2487   if (!cfun->machine->frame_locked)
2488     memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2489 
2490   /* We don't count arg_regs_to_save in the arg pointer offset, because
2491      gcc thinks the arg pointer has moved along with the saved regs.
2492      However, we do count it when we adjust $sp in the prologue.  */
2493   reg_save_size = 0;
2494   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2495     if (mep_call_saves_register (i))
2496       reg_save_size += mep_reg_size (i);
2497 
2498   if (reg_save_size % 8)
2499     cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2500   else
2501     cfun->machine->regsave_filler = 0;
2502 
2503   /* This is what our total stack adjustment looks like.  */
2504   total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2505 
2506   if (total_size % 8)
2507     cfun->machine->frame_filler = 8 - (total_size % 8);
2508   else
2509     cfun->machine->frame_filler = 0;
2510 
2511 
2512   if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2513     return reg_save_size + cfun->machine->regsave_filler;
2514 
2515   if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2516     return cfun->machine->frame_filler + frame_size;
2517 
2518   if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2519     return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2520 
2521   gcc_unreachable ();
2522 }
2523 
2524 static rtx
F(rtx x)2525 F (rtx x)
2526 {
2527   RTX_FRAME_RELATED_P (x) = 1;
2528   return x;
2529 }
2530 
2531 /* Since the prologue/epilogue code is generated after optimization,
2532    we can't rely on gcc to split constants for us.  So, this code
2533    captures all the ways to add a constant to a register in one logic
2534    chunk, including optimizing away insns we just don't need.  This
2535    makes the prolog/epilog code easier to follow.  */
2536 static void
add_constant(int dest,int src,int value,int mark_frame)2537 add_constant (int dest, int src, int value, int mark_frame)
2538 {
2539   rtx insn;
2540   int hi, lo;
2541 
2542   if (src == dest && value == 0)
2543     return;
2544 
2545   if (value == 0)
2546     {
2547       insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2548 			     gen_rtx_REG (SImode, src));
2549       if (mark_frame)
2550 	RTX_FRAME_RELATED_P(insn) = 1;
2551       return;
2552     }
2553 
2554   if (value >= -32768 && value <= 32767)
2555     {
2556       insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2557 				    gen_rtx_REG (SImode, src),
2558 				    GEN_INT (value)));
2559       if (mark_frame)
2560 	RTX_FRAME_RELATED_P(insn) = 1;
2561       return;
2562     }
2563 
2564   /* Big constant, need to use a temp register.  We use
2565      REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2566      area is always small enough to directly add to).  */
2567 
2568   hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2569   lo = value & 0xffff;
2570 
2571   insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2572 			 GEN_INT (hi));
2573 
2574   if (lo)
2575     {
2576       insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2577 				    gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2578 				    GEN_INT (lo)));
2579     }
2580 
2581   insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2582 				gen_rtx_REG (SImode, src),
2583 				gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2584   if (mark_frame)
2585     {
2586       RTX_FRAME_RELATED_P(insn) = 1;
2587       add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2588 		    gen_rtx_SET (SImode,
2589 				 gen_rtx_REG (SImode, dest),
2590 				 gen_rtx_PLUS (SImode,
2591 					       gen_rtx_REG (SImode, dest),
2592 					       GEN_INT (value))));
2593     }
2594 }
2595 
2596 /* Move SRC to DEST.  Mark the move as being potentially dead if
2597    MAYBE_DEAD_P.  */
2598 
2599 static rtx
maybe_dead_move(rtx dest,rtx src,bool ATTRIBUTE_UNUSED maybe_dead_p)2600 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2601 {
2602   rtx insn = emit_move_insn (dest, src);
2603 #if 0
2604   if (maybe_dead_p)
2605     REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2606 #endif
2607   return insn;
2608 }
2609 
2610 /* Used for interrupt functions, which can't assume that $tp and $gp
2611    contain the correct pointers.  */
2612 
2613 static void
mep_reload_pointer(int regno,const char * symbol)2614 mep_reload_pointer (int regno, const char *symbol)
2615 {
2616   rtx reg, sym;
2617 
2618   if (!df_regs_ever_live_p(regno) && crtl->is_leaf)
2619     return;
2620 
2621   reg = gen_rtx_REG (SImode, regno);
2622   sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2623   emit_insn (gen_movsi_topsym_s (reg, sym));
2624   emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2625 }
2626 
2627 /* Assign save slots for any register not already saved.  DImode
2628    registers go at the end of the reg save area; the rest go at the
2629    beginning.  This is for alignment purposes.  Returns true if a frame
2630    is really needed.  */
2631 static bool
mep_assign_save_slots(int reg_save_size)2632 mep_assign_save_slots (int reg_save_size)
2633 {
2634   bool really_need_stack_frame = false;
2635   int di_ofs = 0;
2636   int i;
2637 
2638   for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2639     if (mep_call_saves_register(i))
2640       {
2641 	int regsize = mep_reg_size (i);
2642 
2643 	if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2644 	    || mep_reg_set_in_function (i))
2645 	  really_need_stack_frame = true;
2646 
2647 	if (cfun->machine->reg_save_slot[i])
2648 	  continue;
2649 
2650 	if (regsize < 8)
2651 	  {
2652 	    cfun->machine->reg_save_size += regsize;
2653 	    cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2654 	  }
2655 	else
2656 	  {
2657 	    cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2658 	    di_ofs += 8;
2659 	  }
2660       }
2661   cfun->machine->frame_locked = 1;
2662   return really_need_stack_frame;
2663 }
2664 
2665 void
mep_expand_prologue(void)2666 mep_expand_prologue (void)
2667 {
2668   int i, rss, sp_offset = 0;
2669   int reg_save_size;
2670   int frame_size;
2671   int really_need_stack_frame;
2672 
2673   /* We must not allow register renaming in interrupt functions,
2674      because that invalidates the correctness of the set of call-used
2675      registers we're going to save/restore.  */
2676   mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2677 
2678   if (mep_disinterrupt_p ())
2679     emit_insn (gen_mep_disable_int ());
2680 
2681   cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2682 
2683   reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2684   frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2685   really_need_stack_frame = frame_size;
2686 
2687   really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2688 
2689   sp_offset = reg_save_size;
2690   if (sp_offset + frame_size < 128)
2691     sp_offset += frame_size ;
2692 
2693   add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2694 
2695   for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2696     if (mep_call_saves_register(i))
2697       {
2698 	rtx mem;
2699 	bool maybe_dead_p;
2700 	enum machine_mode rmode;
2701 
2702 	rss = cfun->machine->reg_save_slot[i];
2703 
2704   	if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2705 	    && (!mep_reg_set_in_function (i)
2706 		&& !mep_interrupt_p ()))
2707 	  continue;
2708 
2709 	if (mep_reg_size (i) == 8)
2710 	  rmode = DImode;
2711 	else
2712 	  rmode = SImode;
2713 
2714 	/* If there is a pseudo associated with this register's initial value,
2715 	   reload might have already spilt it to the stack slot suggested by
2716 	   ALLOCATE_INITIAL_VALUE.  The moves emitted here can then be safely
2717 	   deleted as dead.  */
2718 	mem = gen_rtx_MEM (rmode,
2719 			   plus_constant (Pmode, stack_pointer_rtx,
2720 					  sp_offset - rss));
2721 	maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2722 
2723 	if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2724 	  F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2725 	else if (rmode == DImode)
2726 	  {
2727 	    rtx insn;
2728 	    int be = TARGET_BIG_ENDIAN ? 4 : 0;
2729 
2730 	    mem = gen_rtx_MEM (SImode,
2731 			       plus_constant (Pmode, stack_pointer_rtx,
2732 					      sp_offset - rss + be));
2733 
2734 	    maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2735 			     gen_rtx_REG (SImode, i),
2736 			     maybe_dead_p);
2737 	    maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2738 			     gen_rtx_ZERO_EXTRACT (SImode,
2739 						   gen_rtx_REG (DImode, i),
2740 						   GEN_INT (32),
2741 						   GEN_INT (32)),
2742 			     maybe_dead_p);
2743 	    insn = maybe_dead_move (mem,
2744 				    gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2745 				    maybe_dead_p);
2746 	    RTX_FRAME_RELATED_P (insn) = 1;
2747 
2748 	    add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2749 			  gen_rtx_SET (VOIDmode,
2750 				       copy_rtx (mem),
2751 				       gen_rtx_REG (rmode, i)));
2752 	    mem = gen_rtx_MEM (SImode,
2753 			       plus_constant (Pmode, stack_pointer_rtx,
2754 					      sp_offset - rss + (4-be)));
2755 	    insn = maybe_dead_move (mem,
2756 				    gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2757 				    maybe_dead_p);
2758 	  }
2759 	else
2760 	  {
2761 	    rtx insn;
2762 	    maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2763 			     gen_rtx_REG (rmode, i),
2764 			     maybe_dead_p);
2765 	    insn = maybe_dead_move (mem,
2766 				    gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2767 				    maybe_dead_p);
2768 	    RTX_FRAME_RELATED_P (insn) = 1;
2769 
2770 	    add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2771 			  gen_rtx_SET (VOIDmode,
2772 				       copy_rtx (mem),
2773 				       gen_rtx_REG (rmode, i)));
2774 	  }
2775       }
2776 
2777   if (frame_pointer_needed)
2778     {
2779       /* We've already adjusted down by sp_offset.  Total $sp change
2780 	 is reg_save_size + frame_size.  We want a net change here of
2781 	 just reg_save_size.  */
2782       add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2783     }
2784 
2785   add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2786 
2787   if (mep_interrupt_p ())
2788     {
2789       mep_reload_pointer(GP_REGNO, "__sdabase");
2790       mep_reload_pointer(TP_REGNO, "__tpbase");
2791     }
2792 }
2793 
2794 static void
mep_start_function(FILE * file,HOST_WIDE_INT hwi_local)2795 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2796 {
2797   int local = hwi_local;
2798   int frame_size = local + crtl->outgoing_args_size;
2799   int reg_save_size;
2800   int ffill;
2801   int i, sp, skip;
2802   int sp_offset;
2803   int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2804 
2805   reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2806   frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2807   sp_offset = reg_save_size + frame_size;
2808 
2809   ffill = cfun->machine->frame_filler;
2810 
2811   if (cfun->machine->mep_frame_pointer_needed)
2812     reg_names[FP_REGNO] = "$fp";
2813   else
2814     reg_names[FP_REGNO] = "$8";
2815 
2816   if (sp_offset == 0)
2817     return;
2818 
2819   if (debug_info_level == DINFO_LEVEL_NONE)
2820     {
2821       fprintf (file, "\t# frame: %d", sp_offset);
2822       if (reg_save_size)
2823 	fprintf (file, "   %d regs", reg_save_size);
2824       if (local)
2825 	fprintf (file, "   %d locals", local);
2826       if (crtl->outgoing_args_size)
2827 	fprintf (file, "   %d args", crtl->outgoing_args_size);
2828       fprintf (file, "\n");
2829       return;
2830     }
2831 
2832   fprintf (file, "\t#\n");
2833   fprintf (file, "\t# Initial Frame Information:\n");
2834   if (sp_offset || !frame_pointer_needed)
2835     fprintf (file, "\t# Entry   ---------- 0\n");
2836 
2837   /* Sort registers by save slots, so they're printed in the order
2838      they appear in memory, not the order they're saved in.  */
2839   for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2840     slot_map[si] = si;
2841   for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2842     for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2843       if (cfun->machine->reg_save_slot[slot_map[si]]
2844 	  > cfun->machine->reg_save_slot[slot_map[sj]])
2845 	{
2846 	  int t = slot_map[si];
2847 	  slot_map[si] = slot_map[sj];
2848 	  slot_map[sj] = t;
2849 	}
2850 
2851   sp = 0;
2852   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2853     {
2854       int rsize;
2855       int r = slot_map[i];
2856       int rss = cfun->machine->reg_save_slot[r];
2857 
2858       if (!mep_call_saves_register (r))
2859 	continue;
2860 
2861       if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2862 	  && (!mep_reg_set_in_function (r)
2863 	      && !mep_interrupt_p ()))
2864 	continue;
2865 
2866       rsize = mep_reg_size(r);
2867       skip = rss - (sp+rsize);
2868       if (skip)
2869 	fprintf (file, "\t#         %3d bytes for alignment\n", skip);
2870       fprintf (file, "\t#         %3d bytes for saved %-3s   %3d($sp)\n",
2871 	       rsize, reg_names[r], sp_offset - rss);
2872       sp = rss;
2873     }
2874 
2875   skip = reg_save_size - sp;
2876   if (skip)
2877     fprintf (file, "\t#         %3d bytes for alignment\n", skip);
2878 
2879   if (frame_pointer_needed)
2880     fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2881   if (local)
2882     fprintf (file, "\t#         %3d bytes for local vars\n", local);
2883   if (ffill)
2884     fprintf (file, "\t#         %3d bytes for alignment\n", ffill);
2885   if (crtl->outgoing_args_size)
2886     fprintf (file, "\t#         %3d bytes for outgoing args\n",
2887 	     crtl->outgoing_args_size);
2888   fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2889   fprintf (file, "\t#\n");
2890 }
2891 
2892 
2893 static int mep_prevent_lp_restore = 0;
2894 static int mep_sibcall_epilogue = 0;
2895 
2896 void
mep_expand_epilogue(void)2897 mep_expand_epilogue (void)
2898 {
2899   int i, sp_offset = 0;
2900   int reg_save_size = 0;
2901   int frame_size;
2902   int lp_temp = LP_REGNO, lp_slot = -1;
2903   int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
2904   int interrupt_handler = mep_interrupt_p ();
2905 
2906   if (profile_arc_flag == 2)
2907     emit_insn (gen_mep_bb_trace_ret ());
2908 
2909   reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2910   frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2911 
2912   really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
2913 
2914   if (frame_pointer_needed)
2915     {
2916       /* If we have a frame pointer, we won't have a reliable stack
2917 	 pointer (alloca, you know), so rebase SP from FP */
2918       emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
2919 		      gen_rtx_REG (SImode, FP_REGNO));
2920       sp_offset = reg_save_size;
2921     }
2922   else
2923     {
2924       /* SP is right under our local variable space.  Adjust it if
2925 	 needed.  */
2926       sp_offset = reg_save_size + frame_size;
2927       if (sp_offset >= 128)
2928 	{
2929 	  add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
2930 	  sp_offset -= frame_size;
2931 	}
2932     }
2933 
2934   /* This is backwards so that we restore the control and coprocessor
2935      registers before the temporary registers we use to restore
2936      them.  */
2937   for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
2938     if (mep_call_saves_register (i))
2939       {
2940 	enum machine_mode rmode;
2941 	int rss = cfun->machine->reg_save_slot[i];
2942 
2943 	if (mep_reg_size (i) == 8)
2944 	  rmode = DImode;
2945 	else
2946 	  rmode = SImode;
2947 
2948 	if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2949 	    && !(mep_reg_set_in_function (i) || interrupt_handler))
2950 	  continue;
2951 	if (mep_prevent_lp_restore && i == LP_REGNO)
2952 	  continue;
2953 	if (!mep_prevent_lp_restore
2954 	    && !interrupt_handler
2955 	    && (i == 10 || i == 11))
2956 	  continue;
2957 
2958 	if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2959 	  emit_move_insn (gen_rtx_REG (rmode, i),
2960 			  gen_rtx_MEM (rmode,
2961 				       plus_constant (Pmode, stack_pointer_rtx,
2962 						      sp_offset - rss)));
2963 	else
2964 	  {
2965 	    if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
2966 	      /* Defer this one so we can jump indirect rather than
2967 		 copying the RA to $lp and "ret".  EH epilogues
2968 		 automatically skip this anyway.  */
2969 	      lp_slot = sp_offset-rss;
2970 	    else
2971 	      {
2972 		emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2973 				gen_rtx_MEM (rmode,
2974 					     plus_constant (Pmode,
2975 							    stack_pointer_rtx,
2976 							    sp_offset-rss)));
2977 		emit_move_insn (gen_rtx_REG (rmode, i),
2978 				gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
2979 	      }
2980 	  }
2981       }
2982   if (lp_slot != -1)
2983     {
2984       /* Restore this one last so we know it will be in the temp
2985 	 register when we return by jumping indirectly via the temp.  */
2986       emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2987 		      gen_rtx_MEM (SImode,
2988 				   plus_constant (Pmode, stack_pointer_rtx,
2989 						  lp_slot)));
2990       lp_temp = REGSAVE_CONTROL_TEMP;
2991     }
2992 
2993 
2994   add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
2995 
2996   if (crtl->calls_eh_return && mep_prevent_lp_restore)
2997     emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
2998 			   gen_rtx_REG (SImode, SP_REGNO),
2999 			   cfun->machine->eh_stack_adjust));
3000 
3001   if (mep_sibcall_epilogue)
3002     return;
3003 
3004   if (mep_disinterrupt_p ())
3005     emit_insn (gen_mep_enable_int ());
3006 
3007   if (mep_prevent_lp_restore)
3008     {
3009       emit_jump_insn (gen_eh_return_internal ());
3010       emit_barrier ();
3011     }
3012   else if (interrupt_handler)
3013     emit_jump_insn (gen_mep_reti ());
3014   else
3015     emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3016 }
3017 
3018 void
mep_expand_eh_return(rtx * operands)3019 mep_expand_eh_return (rtx *operands)
3020 {
3021   if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3022     {
3023       rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3024       emit_move_insn (ra, operands[0]);
3025       operands[0] = ra;
3026     }
3027 
3028   emit_insn (gen_eh_epilogue (operands[0]));
3029 }
3030 
3031 void
mep_emit_eh_epilogue(rtx * operands ATTRIBUTE_UNUSED)3032 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3033 {
3034   cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3035   mep_prevent_lp_restore = 1;
3036   mep_expand_epilogue ();
3037   mep_prevent_lp_restore = 0;
3038 }
3039 
3040 void
mep_expand_sibcall_epilogue(void)3041 mep_expand_sibcall_epilogue (void)
3042 {
3043   mep_sibcall_epilogue = 1;
3044   mep_expand_epilogue ();
3045   mep_sibcall_epilogue = 0;
3046 }
3047 
3048 static bool
mep_function_ok_for_sibcall(tree decl,tree exp ATTRIBUTE_UNUSED)3049 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3050 {
3051   if (decl == NULL)
3052     return false;
3053 
3054   if (mep_section_tag (DECL_RTL (decl)) == 'f')
3055     return false;
3056 
3057   /* Can't call to a sibcall from an interrupt or disinterrupt function.  */
3058   if (mep_interrupt_p () || mep_disinterrupt_p ())
3059     return false;
3060 
3061   return true;
3062 }
3063 
3064 rtx
mep_return_stackadj_rtx(void)3065 mep_return_stackadj_rtx (void)
3066 {
3067   return gen_rtx_REG (SImode, 10);
3068 }
3069 
3070 rtx
mep_return_handler_rtx(void)3071 mep_return_handler_rtx (void)
3072 {
3073   return gen_rtx_REG (SImode, LP_REGNO);
3074 }
3075 
3076 void
mep_function_profiler(FILE * file)3077 mep_function_profiler (FILE *file)
3078 {
3079   /* Always right at the beginning of the function.  */
3080   fprintf (file, "\t# mep function profiler\n");
3081   fprintf (file, "\tadd\t$sp, -8\n");
3082   fprintf (file, "\tsw\t$0, ($sp)\n");
3083   fprintf (file, "\tldc\t$0, $lp\n");
3084   fprintf (file, "\tsw\t$0, 4($sp)\n");
3085   fprintf (file, "\tbsr\t__mep_mcount\n");
3086   fprintf (file, "\tlw\t$0, 4($sp)\n");
3087   fprintf (file, "\tstc\t$0, $lp\n");
3088   fprintf (file, "\tlw\t$0, ($sp)\n");
3089   fprintf (file, "\tadd\t$sp, 8\n\n");
3090 }
3091 
3092 const char *
mep_emit_bb_trace_ret(void)3093 mep_emit_bb_trace_ret (void)
3094 {
3095   fprintf (asm_out_file, "\t# end of block profiling\n");
3096   fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3097   fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3098   fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3099   fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3100   fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3101   fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3102   fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3103   fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3104   fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3105   return "";
3106 }
3107 
3108 #undef SAVE
3109 #undef RESTORE
3110 
3111 /* Operand Printing.  */
3112 
3113 void
mep_print_operand_address(FILE * stream,rtx address)3114 mep_print_operand_address (FILE *stream, rtx address)
3115 {
3116   if (GET_CODE (address) == MEM)
3117     address = XEXP (address, 0);
3118   else
3119     /* cf: gcc.dg/asm-4.c.  */
3120     gcc_assert (GET_CODE (address) == REG);
3121 
3122   mep_print_operand (stream, address, 0);
3123 }
3124 
3125 static struct
3126 {
3127   char code;
3128   const char *pattern;
3129   const char *format;
3130 }
3131 const conversions[] =
3132 {
3133   { 0, "r", "0" },
3134   { 0, "m+ri", "3(2)" },
3135   { 0, "mr", "(1)" },
3136   { 0, "ms", "(1)" },
3137   { 0, "ml", "(1)" },
3138   { 0, "mLrs", "%lo(3)(2)" },
3139   { 0, "mLr+si", "%lo(4+5)(2)" },
3140   { 0, "m+ru2s", "%tpoff(5)(2)" },
3141   { 0, "m+ru3s", "%sdaoff(5)(2)" },
3142   { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3143   { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3144   { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3145   { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3146   { 0, "mi", "(1)" },
3147   { 0, "m+si", "(2+3)" },
3148   { 0, "m+li", "(2+3)" },
3149   { 0, "i", "0" },
3150   { 0, "s", "0" },
3151   { 0, "+si", "1+2" },
3152   { 0, "+u2si", "%tpoff(3+4)" },
3153   { 0, "+u3si", "%sdaoff(3+4)" },
3154   { 0, "l", "0" },
3155   { 'b', "i", "0" },
3156   { 'B', "i", "0" },
3157   { 'U', "i", "0" },
3158   { 'h', "i", "0" },
3159   { 'h', "Hs", "%hi(1)" },
3160   { 'I', "i", "0" },
3161   { 'I', "u2s", "%tpoff(2)" },
3162   { 'I', "u3s", "%sdaoff(2)" },
3163   { 'I', "+u2si", "%tpoff(3+4)" },
3164   { 'I', "+u3si", "%sdaoff(3+4)" },
3165   { 'J', "i", "0" },
3166   { 'P', "mr", "(1\\+),\\0" },
3167   { 'x', "i", "0" },
3168   { 0, 0, 0 }
3169 };
3170 
3171 static int
unique_bit_in(HOST_WIDE_INT i)3172 unique_bit_in (HOST_WIDE_INT i)
3173 {
3174   switch (i & 0xff)
3175     {
3176     case 0x01: case 0xfe: return 0;
3177     case 0x02: case 0xfd: return 1;
3178     case 0x04: case 0xfb: return 2;
3179     case 0x08: case 0xf7: return 3;
3180     case 0x10: case 0x7f: return 4;
3181     case 0x20: case 0xbf: return 5;
3182     case 0x40: case 0xdf: return 6;
3183     case 0x80: case 0xef: return 7;
3184     default:
3185       gcc_unreachable ();
3186     }
3187 }
3188 
3189 static int
bit_size_for_clip(HOST_WIDE_INT i)3190 bit_size_for_clip (HOST_WIDE_INT i)
3191 {
3192   int rv;
3193 
3194   for (rv = 0; rv < 31; rv ++)
3195     if (((HOST_WIDE_INT) 1 << rv) > i)
3196       return rv + 1;
3197   gcc_unreachable ();
3198 }
3199 
3200 /* Print an operand to a assembler instruction.  */
3201 
3202 void
mep_print_operand(FILE * file,rtx x,int code)3203 mep_print_operand (FILE *file, rtx x, int code)
3204 {
3205   int i, j;
3206   const char *real_name;
3207 
3208   if (code == '<')
3209     {
3210       /* Print a mnemonic to do CR <- CR moves.  Find out which intrinsic
3211 	 we're using, then skip over the "mep_" part of its name.  */
3212       const struct cgen_insn *insn;
3213 
3214       if (mep_get_move_insn (mep_cmov, &insn))
3215 	fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3216       else
3217 	mep_intrinsic_unavailable (mep_cmov);
3218       return;
3219     }
3220   if (code == 'L')
3221     {
3222       switch (GET_CODE (x))
3223 	{
3224 	case AND:
3225 	  fputs ("clr", file);
3226 	  return;
3227 	case IOR:
3228 	  fputs ("set", file);
3229 	  return;
3230 	case XOR:
3231 	  fputs ("not", file);
3232 	  return;
3233 	default:
3234 	  output_operand_lossage ("invalid %%L code");
3235 	}
3236     }
3237   if (code == 'M')
3238     {
3239       /* Print the second operand of a CR <- CR move.  If we're using
3240 	 a two-operand instruction (i.e., a real cmov), then just print
3241 	 the operand normally.  If we're using a "reg, reg, immediate"
3242 	 instruction such as caddi3, print the operand followed by a
3243 	 zero field.  If we're using a three-register instruction,
3244 	 print the operand twice.  */
3245       const struct cgen_insn *insn;
3246 
3247       mep_print_operand (file, x, 0);
3248       if (mep_get_move_insn (mep_cmov, &insn)
3249 	  && insn_data[insn->icode].n_operands == 3)
3250 	{
3251 	  fputs (", ", file);
3252 	  if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3253 	    mep_print_operand (file, x, 0);
3254 	  else
3255 	    mep_print_operand (file, const0_rtx, 0);
3256 	}
3257       return;
3258     }
3259 
3260   encode_pattern (x);
3261   for (i = 0; conversions[i].pattern; i++)
3262     if (conversions[i].code == code
3263 	&& strcmp(conversions[i].pattern, pattern) == 0)
3264       {
3265 	for (j = 0; conversions[i].format[j]; j++)
3266 	  if (conversions[i].format[j] == '\\')
3267 	    {
3268 	      fputc (conversions[i].format[j+1], file);
3269 	      j++;
3270 	    }
3271 	  else if (ISDIGIT(conversions[i].format[j]))
3272 	    {
3273 	      rtx r = patternr[conversions[i].format[j] - '0'];
3274 	      switch (GET_CODE (r))
3275 		{
3276 		case REG:
3277 		  fprintf (file, "%s", reg_names [REGNO (r)]);
3278 		  break;
3279 		case CONST_INT:
3280 		  switch (code)
3281 		    {
3282 		    case 'b':
3283 		      fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3284 		      break;
3285 		    case 'B':
3286 		      fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3287 		      break;
3288 		    case 'h':
3289 		      fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3290 		      break;
3291 		    case 'U':
3292 		      fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3293 		      break;
3294 		    case 'J':
3295 		      fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3296 		      break;
3297 		    case 'x':
3298 		      if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3299 			  && !(INTVAL (r) & 0xff))
3300 			fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3301 		      else
3302 			fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3303 		      break;
3304 		    case 'I':
3305 		      if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3306 			  && conversions[i].format[j+1] == 0)
3307 			{
3308 			  fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3309 			  fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3310 			}
3311 		      else
3312 			fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3313 		      break;
3314 		    default:
3315 		      fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3316 		      break;
3317 		    }
3318 		  break;
3319 		case CONST_DOUBLE:
3320 		  fprintf(file, "[const_double 0x%lx]",
3321 			  (unsigned long) CONST_DOUBLE_HIGH(r));
3322 		  break;
3323 		case SYMBOL_REF:
3324 		  real_name = targetm.strip_name_encoding (XSTR (r, 0));
3325 		  assemble_name (file, real_name);
3326 		  break;
3327 		case LABEL_REF:
3328 		  output_asm_label (r);
3329 		  break;
3330 		default:
3331 		  fprintf (stderr, "don't know how to print this operand:");
3332 		  debug_rtx (r);
3333 		  gcc_unreachable ();
3334 		}
3335 	    }
3336 	  else
3337 	    {
3338 	      if (conversions[i].format[j] == '+'
3339 		  && (!code || code == 'I')
3340 		  && ISDIGIT (conversions[i].format[j+1])
3341 		  && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3342 		  && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3343 		continue;
3344 	      fputc(conversions[i].format[j], file);
3345 	    }
3346 	break;
3347       }
3348   if (!conversions[i].pattern)
3349     {
3350       error ("unconvertible operand %c %qs", code?code:'-', pattern);
3351       debug_rtx(x);
3352     }
3353 
3354   return;
3355 }
3356 
3357 void
mep_final_prescan_insn(rtx insn,rtx * operands ATTRIBUTE_UNUSED,int noperands ATTRIBUTE_UNUSED)3358 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3359 			int noperands ATTRIBUTE_UNUSED)
3360 {
3361   /* Despite the fact that MeP is perfectly capable of branching and
3362      doing something else in the same bundle, gcc does jump
3363      optimization *after* scheduling, so we cannot trust the bundling
3364      flags on jump instructions.  */
3365   if (GET_MODE (insn) == BImode
3366       && get_attr_slots (insn) != SLOTS_CORE)
3367     fputc ('+', asm_out_file);
3368 }
3369 
3370 /* Function args in registers.  */
3371 
3372 static void
mep_setup_incoming_varargs(cumulative_args_t cum,enum machine_mode mode ATTRIBUTE_UNUSED,tree type ATTRIBUTE_UNUSED,int * pretend_size,int second_time ATTRIBUTE_UNUSED)3373 mep_setup_incoming_varargs (cumulative_args_t cum,
3374 			    enum machine_mode mode ATTRIBUTE_UNUSED,
3375 			    tree type ATTRIBUTE_UNUSED, int *pretend_size,
3376 			    int second_time ATTRIBUTE_UNUSED)
3377 {
3378   int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
3379 
3380   if (nsave > 0)
3381     cfun->machine->arg_regs_to_save = nsave;
3382   *pretend_size = nsave * 4;
3383 }
3384 
3385 static int
bytesize(const_tree type,enum machine_mode mode)3386 bytesize (const_tree type, enum machine_mode mode)
3387 {
3388   if (mode == BLKmode)
3389     return int_size_in_bytes (type);
3390   return GET_MODE_SIZE (mode);
3391 }
3392 
3393 static rtx
mep_expand_builtin_saveregs(void)3394 mep_expand_builtin_saveregs (void)
3395 {
3396   int bufsize, i, ns;
3397   rtx regbuf;
3398 
3399   ns = cfun->machine->arg_regs_to_save;
3400   if (TARGET_IVC2)
3401     {
3402       bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3403       regbuf = assign_stack_local (SImode, bufsize, 64);
3404     }
3405   else
3406     {
3407       bufsize = ns * 4;
3408       regbuf = assign_stack_local (SImode, bufsize, 32);
3409     }
3410 
3411   move_block_from_reg (5-ns, regbuf, ns);
3412 
3413   if (TARGET_IVC2)
3414     {
3415       rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3416       int ofs = 8 * ((ns+1)/2);
3417 
3418       for (i=0; i<ns; i++)
3419 	{
3420 	  int rn = (4-ns) + i + 49;
3421 	  rtx ptr;
3422 
3423 	  ptr = offset_address (tmp, GEN_INT (ofs), 2);
3424 	  emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3425 	  ofs += 8;
3426 	}
3427     }
3428   return XEXP (regbuf, 0);
3429 }
3430 
3431 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3432 
3433 static tree
mep_build_builtin_va_list(void)3434 mep_build_builtin_va_list (void)
3435 {
3436   tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3437   tree record;
3438 
3439 
3440   record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3441 
3442   f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3443 			  get_identifier ("__va_next_gp"), ptr_type_node);
3444   f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3445 				get_identifier ("__va_next_gp_limit"),
3446 				ptr_type_node);
3447   f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3448 			   ptr_type_node);
3449   f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3450 			     ptr_type_node);
3451 
3452   DECL_FIELD_CONTEXT (f_next_gp) = record;
3453   DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3454   DECL_FIELD_CONTEXT (f_next_cop) = record;
3455   DECL_FIELD_CONTEXT (f_next_stack) = record;
3456 
3457   TYPE_FIELDS (record) = f_next_gp;
3458   DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3459   DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3460   DECL_CHAIN (f_next_cop) = f_next_stack;
3461 
3462   layout_type (record);
3463 
3464   return record;
3465 }
3466 
3467 static void
mep_expand_va_start(tree valist,rtx nextarg)3468 mep_expand_va_start (tree valist, rtx nextarg)
3469 {
3470   tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3471   tree next_gp, next_gp_limit, next_cop, next_stack;
3472   tree t, u;
3473   int ns;
3474 
3475   ns = cfun->machine->arg_regs_to_save;
3476 
3477   f_next_gp = TYPE_FIELDS (va_list_type_node);
3478   f_next_gp_limit = DECL_CHAIN (f_next_gp);
3479   f_next_cop = DECL_CHAIN (f_next_gp_limit);
3480   f_next_stack = DECL_CHAIN (f_next_cop);
3481 
3482   next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3483 		    NULL_TREE);
3484   next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3485 			  valist, f_next_gp_limit, NULL_TREE);
3486   next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3487 		     NULL_TREE);
3488   next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3489 		       valist, f_next_stack, NULL_TREE);
3490 
3491   /* va_list.next_gp = expand_builtin_saveregs (); */
3492   u = make_tree (sizetype, expand_builtin_saveregs ());
3493   u = fold_convert (ptr_type_node, u);
3494   t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3495   TREE_SIDE_EFFECTS (t) = 1;
3496   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3497 
3498   /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3499   u = fold_build_pointer_plus_hwi (u, 4 * ns);
3500   t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3501   TREE_SIDE_EFFECTS (t) = 1;
3502   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3503 
3504   u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
3505   /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3506   t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3507   TREE_SIDE_EFFECTS (t) = 1;
3508   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3509 
3510   /* va_list.next_stack = nextarg; */
3511   u = make_tree (ptr_type_node, nextarg);
3512   t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3513   TREE_SIDE_EFFECTS (t) = 1;
3514   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3515 }
3516 
3517 static tree
mep_gimplify_va_arg_expr(tree valist,tree type,gimple_seq * pre_p,gimple_seq * post_p ATTRIBUTE_UNUSED)3518 mep_gimplify_va_arg_expr (tree valist, tree type,
3519 			  gimple_seq *pre_p,
3520 			  gimple_seq *post_p ATTRIBUTE_UNUSED)
3521 {
3522   HOST_WIDE_INT size, rsize;
3523   bool by_reference, ivc2_vec;
3524   tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3525   tree next_gp, next_gp_limit, next_cop, next_stack;
3526   tree label_sover, label_selse;
3527   tree tmp, res_addr;
3528 
3529   ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3530 
3531   size = int_size_in_bytes (type);
3532   by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3533 
3534   if (by_reference)
3535     {
3536       type = build_pointer_type (type);
3537       size = 4;
3538     }
3539   rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3540 
3541   f_next_gp = TYPE_FIELDS (va_list_type_node);
3542   f_next_gp_limit = DECL_CHAIN (f_next_gp);
3543   f_next_cop = DECL_CHAIN (f_next_gp_limit);
3544   f_next_stack = DECL_CHAIN (f_next_cop);
3545 
3546   next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3547 		    NULL_TREE);
3548   next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3549 			  valist, f_next_gp_limit, NULL_TREE);
3550   next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3551 		     NULL_TREE);
3552   next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3553 		       valist, f_next_stack, NULL_TREE);
3554 
3555   /* if f_next_gp < f_next_gp_limit
3556        IF (VECTOR_P && IVC2)
3557          val = *f_next_cop;
3558        ELSE
3559          val = *f_next_gp;
3560        f_next_gp += 4;
3561        f_next_cop += 8;
3562      else
3563        label_selse:
3564        val = *f_next_stack;
3565        f_next_stack += rsize;
3566      label_sover:
3567   */
3568 
3569   label_sover = create_artificial_label (UNKNOWN_LOCATION);
3570   label_selse = create_artificial_label (UNKNOWN_LOCATION);
3571   res_addr = create_tmp_var (ptr_type_node, NULL);
3572 
3573   tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3574 		unshare_expr (next_gp_limit));
3575   tmp = build3 (COND_EXPR, void_type_node, tmp,
3576 		build1 (GOTO_EXPR, void_type_node,
3577 			unshare_expr (label_selse)),
3578 		NULL_TREE);
3579   gimplify_and_add (tmp, pre_p);
3580 
3581   if (ivc2_vec)
3582     {
3583       tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3584       gimplify_and_add (tmp, pre_p);
3585     }
3586   else
3587     {
3588       tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3589       gimplify_and_add (tmp, pre_p);
3590     }
3591 
3592   tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
3593   gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3594 
3595   tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
3596   gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3597 
3598   tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3599   gimplify_and_add (tmp, pre_p);
3600 
3601   /* - - */
3602 
3603   tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3604   gimplify_and_add (tmp, pre_p);
3605 
3606   tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3607   gimplify_and_add (tmp, pre_p);
3608 
3609   tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
3610   gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3611 
3612   /* - - */
3613 
3614   tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3615   gimplify_and_add (tmp, pre_p);
3616 
3617   res_addr = fold_convert (build_pointer_type (type), res_addr);
3618 
3619   if (by_reference)
3620     res_addr = build_va_arg_indirect_ref (res_addr);
3621 
3622   return build_va_arg_indirect_ref (res_addr);
3623 }
3624 
3625 void
mep_init_cumulative_args(CUMULATIVE_ARGS * pcum,tree fntype,rtx libname ATTRIBUTE_UNUSED,tree fndecl ATTRIBUTE_UNUSED)3626 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3627 			  rtx libname ATTRIBUTE_UNUSED,
3628 			  tree fndecl ATTRIBUTE_UNUSED)
3629 {
3630   pcum->nregs = 0;
3631 
3632   if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3633     pcum->vliw = 1;
3634   else
3635     pcum->vliw = 0;
3636 }
3637 
3638 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack.  Arguments
3639    larger than 4 bytes are passed indirectly.  Return value in 0,
3640    unless bigger than 4 bytes, then the caller passes a pointer as the
3641    first arg.  For varargs, we copy $1..$4 to the stack.  */
3642 
3643 static rtx
mep_function_arg(cumulative_args_t cum_v,enum machine_mode mode,const_tree type ATTRIBUTE_UNUSED,bool named ATTRIBUTE_UNUSED)3644 mep_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
3645 		  const_tree type ATTRIBUTE_UNUSED,
3646 		  bool named ATTRIBUTE_UNUSED)
3647 {
3648   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3649 
3650   /* VOIDmode is a signal for the backend to pass data to the call
3651      expander via the second operand to the call pattern.  We use
3652      this to determine whether to use "jsr" or "jsrv".  */
3653   if (mode == VOIDmode)
3654     return GEN_INT (cum->vliw);
3655 
3656   /* If we havn't run out of argument registers, return the next.  */
3657   if (cum->nregs < 4)
3658     {
3659       if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3660 	return gen_rtx_REG (mode, cum->nregs + 49);
3661       else
3662 	return gen_rtx_REG (mode, cum->nregs + 1);
3663     }
3664 
3665   /* Otherwise the argument goes on the stack.  */
3666   return NULL_RTX;
3667 }
3668 
3669 static bool
mep_pass_by_reference(cumulative_args_t cum ATTRIBUTE_UNUSED,enum machine_mode mode,const_tree type,bool named ATTRIBUTE_UNUSED)3670 mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
3671 		       enum machine_mode mode,
3672 		       const_tree        type,
3673 		       bool              named ATTRIBUTE_UNUSED)
3674 {
3675   int size = bytesize (type, mode);
3676 
3677   /* This is non-obvious, but yes, large values passed after we've run
3678      out of registers are *still* passed by reference - we put the
3679      address of the parameter on the stack, as well as putting the
3680      parameter itself elsewhere on the stack.  */
3681 
3682   if (size <= 0 || size > 8)
3683     return true;
3684   if (size <= 4)
3685     return false;
3686   if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3687       && type != NULL_TREE && VECTOR_TYPE_P (type))
3688     return false;
3689   return true;
3690 }
3691 
3692 static void
mep_function_arg_advance(cumulative_args_t pcum,enum machine_mode mode ATTRIBUTE_UNUSED,const_tree type ATTRIBUTE_UNUSED,bool named ATTRIBUTE_UNUSED)3693 mep_function_arg_advance (cumulative_args_t pcum,
3694 			  enum machine_mode mode ATTRIBUTE_UNUSED,
3695 			  const_tree type ATTRIBUTE_UNUSED,
3696 			  bool named ATTRIBUTE_UNUSED)
3697 {
3698   get_cumulative_args (pcum)->nregs += 1;
3699 }
3700 
3701 bool
mep_return_in_memory(const_tree type,const_tree decl ATTRIBUTE_UNUSED)3702 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3703 {
3704   int size = bytesize (type, BLKmode);
3705   if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3706     return size > 0 && size <= 8 ? 0 : 1;
3707   return size > 0 && size <= 4 ? 0 : 1;
3708 }
3709 
3710 static bool
mep_narrow_volatile_bitfield(void)3711 mep_narrow_volatile_bitfield (void)
3712 {
3713   return true;
3714   return false;
3715 }
3716 
3717 /* Implement FUNCTION_VALUE.  All values are returned in $0.  */
3718 
3719 rtx
mep_function_value(const_tree type,const_tree func ATTRIBUTE_UNUSED)3720 mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
3721 {
3722   if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3723     return gen_rtx_REG (TYPE_MODE (type), 48);
3724   return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3725 }
3726 
3727 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value.  */
3728 
3729 rtx
mep_libcall_value(enum machine_mode mode)3730 mep_libcall_value (enum machine_mode mode)
3731 {
3732   return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3733 }
3734 
3735 /* Handle pipeline hazards.  */
3736 
3737 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3738 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3739 
3740 static int prev_opcode = 0;
3741 
3742 /* This isn't as optimal as it could be, because we don't know what
3743    control register the STC opcode is storing in.  We only need to add
3744    the nop if it's the relevant register, but we add it for irrelevant
3745    registers also.  */
3746 
3747 void
mep_asm_output_opcode(FILE * file,const char * ptr)3748 mep_asm_output_opcode (FILE *file, const char *ptr)
3749 {
3750   int this_opcode = op_none;
3751   const char *hazard = 0;
3752 
3753   switch (*ptr)
3754     {
3755     case 'f':
3756       if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3757 	this_opcode = op_fsft;
3758       break;
3759     case 'r':
3760       if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3761 	this_opcode = op_ret;
3762       break;
3763     case 's':
3764       if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3765 	this_opcode = op_stc;
3766       break;
3767     }
3768 
3769   if (prev_opcode == op_stc && this_opcode == op_fsft)
3770     hazard = "nop";
3771   if (prev_opcode == op_stc && this_opcode == op_ret)
3772     hazard = "nop";
3773 
3774   if (hazard)
3775     fprintf(file, "%s\t# %s-%s hazard\n\t",
3776 	    hazard, opnames[prev_opcode], opnames[this_opcode]);
3777 
3778   prev_opcode = this_opcode;
3779 }
3780 
3781 /* Handle attributes.  */
3782 
3783 static tree
mep_validate_based_tiny(tree * node,tree name,tree args,int flags ATTRIBUTE_UNUSED,bool * no_add)3784 mep_validate_based_tiny (tree *node, tree name, tree args,
3785 			 int flags ATTRIBUTE_UNUSED, bool *no_add)
3786 {
3787   if (TREE_CODE (*node) != VAR_DECL
3788       && TREE_CODE (*node) != POINTER_TYPE
3789       && TREE_CODE (*node) != TYPE_DECL)
3790     {
3791       warning (0, "%qE attribute only applies to variables", name);
3792       *no_add = true;
3793     }
3794   else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3795     {
3796       if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3797 	{
3798 	  warning (0, "address region attributes not allowed with auto storage class");
3799 	  *no_add = true;
3800 	}
3801       /* Ignore storage attribute of pointed to variable: char __far * x;  */
3802       if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3803 	{
3804 	  warning (0, "address region attributes on pointed-to types ignored");
3805 	  *no_add = true;
3806 	}
3807     }
3808 
3809   return NULL_TREE;
3810 }
3811 
3812 static int
mep_multiple_address_regions(tree list,bool check_section_attr)3813 mep_multiple_address_regions (tree list, bool check_section_attr)
3814 {
3815   tree a;
3816   int count_sections = 0;
3817   int section_attr_count = 0;
3818 
3819   for (a = list; a; a = TREE_CHAIN (a))
3820     {
3821       if (is_attribute_p ("based", TREE_PURPOSE (a))
3822 	  || is_attribute_p ("tiny", TREE_PURPOSE (a))
3823 	  || is_attribute_p ("near", TREE_PURPOSE (a))
3824 	  || is_attribute_p ("far", TREE_PURPOSE (a))
3825 	  || is_attribute_p ("io", TREE_PURPOSE (a)))
3826 	count_sections ++;
3827       if (check_section_attr)
3828 	section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3829     }
3830 
3831   if (check_section_attr)
3832     return section_attr_count;
3833   else
3834     return count_sections;
3835 }
3836 
3837 #define MEP_ATTRIBUTES(decl) \
3838   (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3839                 : DECL_ATTRIBUTES (decl) \
3840                   ? (DECL_ATTRIBUTES (decl)) \
3841 		  : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3842 
3843 static tree
mep_validate_near_far(tree * node,tree name,tree args,int flags ATTRIBUTE_UNUSED,bool * no_add)3844 mep_validate_near_far (tree *node, tree name, tree args,
3845 		       int flags ATTRIBUTE_UNUSED, bool *no_add)
3846 {
3847   if (TREE_CODE (*node) != VAR_DECL
3848       && TREE_CODE (*node) != FUNCTION_DECL
3849       && TREE_CODE (*node) != METHOD_TYPE
3850       && TREE_CODE (*node) != POINTER_TYPE
3851       && TREE_CODE (*node) != TYPE_DECL)
3852     {
3853       warning (0, "%qE attribute only applies to variables and functions",
3854 	       name);
3855       *no_add = true;
3856     }
3857   else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3858     {
3859       if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3860 	{
3861 	  warning (0, "address region attributes not allowed with auto storage class");
3862 	  *no_add = true;
3863 	}
3864       /* Ignore storage attribute of pointed to variable: char __far * x;  */
3865       if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3866 	{
3867 	  warning (0, "address region attributes on pointed-to types ignored");
3868 	  *no_add = true;
3869 	}
3870     }
3871   else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3872     {
3873       warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3874 	       name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3875       DECL_ATTRIBUTES (*node) = NULL_TREE;
3876     }
3877   return NULL_TREE;
3878 }
3879 
3880 static tree
mep_validate_disinterrupt(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add)3881 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3882 			   int flags ATTRIBUTE_UNUSED, bool *no_add)
3883 {
3884   if (TREE_CODE (*node) != FUNCTION_DECL
3885       && TREE_CODE (*node) != METHOD_TYPE)
3886     {
3887       warning (0, "%qE attribute only applies to functions", name);
3888       *no_add = true;
3889     }
3890   return NULL_TREE;
3891 }
3892 
3893 static tree
mep_validate_interrupt(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add)3894 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3895 			int flags ATTRIBUTE_UNUSED, bool *no_add)
3896 {
3897   tree function_type;
3898 
3899   if (TREE_CODE (*node) != FUNCTION_DECL)
3900     {
3901       warning (0, "%qE attribute only applies to functions", name);
3902       *no_add = true;
3903       return NULL_TREE;
3904     }
3905 
3906   if (DECL_DECLARED_INLINE_P (*node))
3907     error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3908   DECL_UNINLINABLE (*node) = 1;
3909 
3910   function_type = TREE_TYPE (*node);
3911 
3912   if (TREE_TYPE (function_type) != void_type_node)
3913     error ("interrupt function must have return type of void");
3914 
3915   if (prototype_p (function_type)
3916       && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
3917 	  || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
3918     error ("interrupt function must have no arguments");
3919 
3920   return NULL_TREE;
3921 }
3922 
3923 static tree
mep_validate_io_cb(tree * node,tree name,tree args,int flags ATTRIBUTE_UNUSED,bool * no_add)3924 mep_validate_io_cb (tree *node, tree name, tree args,
3925 		    int flags ATTRIBUTE_UNUSED, bool *no_add)
3926 {
3927   if (TREE_CODE (*node) != VAR_DECL)
3928     {
3929       warning (0, "%qE attribute only applies to variables", name);
3930       *no_add = true;
3931     }
3932 
3933   if (args != NULL_TREE)
3934     {
3935       if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
3936 	TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
3937       if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
3938 	{
3939 	  warning (0, "%qE attribute allows only an integer constant argument",
3940 		   name);
3941 	  *no_add = true;
3942 	}
3943     }
3944 
3945   if (*no_add == false && !TARGET_IO_NO_VOLATILE)
3946     TREE_THIS_VOLATILE (*node) = 1;
3947 
3948   return NULL_TREE;
3949 }
3950 
3951 static tree
mep_validate_vliw(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add)3952 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3953 		   int flags ATTRIBUTE_UNUSED, bool *no_add)
3954 {
3955   if (TREE_CODE (*node) != FUNCTION_TYPE
3956       && TREE_CODE (*node) != FUNCTION_DECL
3957       && TREE_CODE (*node) != METHOD_TYPE
3958       && TREE_CODE (*node) != FIELD_DECL
3959       && TREE_CODE (*node) != TYPE_DECL)
3960     {
3961       static int gave_pointer_note = 0;
3962       static int gave_array_note = 0;
3963       static const char * given_type = NULL;
3964 
3965       given_type = tree_code_name[TREE_CODE (*node)];
3966       if (TREE_CODE (*node) == POINTER_TYPE)
3967  	given_type = "pointers";
3968       if (TREE_CODE (*node) == ARRAY_TYPE)
3969  	given_type = "arrays";
3970 
3971       if (given_type)
3972  	warning (0, "%qE attribute only applies to functions, not %s",
3973  		 name, given_type);
3974       else
3975  	warning (0, "%qE attribute only applies to functions",
3976  		 name);
3977       *no_add = true;
3978 
3979       if (TREE_CODE (*node) == POINTER_TYPE
3980  	  && !gave_pointer_note)
3981  	{
3982  	  inform (input_location,
3983  	          "to describe a pointer to a VLIW function, use syntax like this:\n%s",
3984  	          "   typedef int (__vliw *vfuncptr) ();");
3985  	  gave_pointer_note = 1;
3986  	}
3987 
3988       if (TREE_CODE (*node) == ARRAY_TYPE
3989  	  && !gave_array_note)
3990  	{
3991  	  inform (input_location,
3992  	          "to describe an array of VLIW function pointers, use syntax like this:\n%s",
3993  	          "   typedef int (__vliw *vfuncptr[]) ();");
3994  	  gave_array_note = 1;
3995  	}
3996     }
3997   if (!TARGET_VLIW)
3998     error ("VLIW functions are not allowed without a VLIW configuration");
3999   return NULL_TREE;
4000 }
4001 
4002 static const struct attribute_spec mep_attribute_table[11] =
4003 {
4004   /* name         min max decl   type   func   handler
4005      affects_type_identity */
4006   { "based",        0, 0, false, false, false, mep_validate_based_tiny, false },
4007   { "tiny",         0, 0, false, false, false, mep_validate_based_tiny, false },
4008   { "near",         0, 0, false, false, false, mep_validate_near_far, false },
4009   { "far",          0, 0, false, false, false, mep_validate_near_far, false },
4010   { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4011     false },
4012   { "interrupt",    0, 0, false, false, false, mep_validate_interrupt, false },
4013   { "io",           0, 1, false, false, false, mep_validate_io_cb, false },
4014   { "cb",           0, 1, false, false, false, mep_validate_io_cb, false },
4015   { "vliw",         0, 0, false, true,  false, mep_validate_vliw, false },
4016   { NULL,           0, 0, false, false, false, NULL, false }
4017 };
4018 
4019 static bool
mep_function_attribute_inlinable_p(const_tree callee)4020 mep_function_attribute_inlinable_p (const_tree callee)
4021 {
4022   tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4023   if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4024   return (lookup_attribute ("disinterrupt", attrs) == 0
4025 	  && lookup_attribute ("interrupt", attrs) == 0);
4026 }
4027 
4028 static bool
mep_can_inline_p(tree caller,tree callee)4029 mep_can_inline_p (tree caller, tree callee)
4030 {
4031   if (TREE_CODE (callee) == ADDR_EXPR)
4032     callee = TREE_OPERAND (callee, 0);
4033 
4034   if (!mep_vliw_function_p (caller)
4035       && mep_vliw_function_p (callee))
4036     {
4037       return false;
4038     }
4039   return true;
4040 }
4041 
4042 #define FUNC_CALL		1
4043 #define FUNC_DISINTERRUPT	2
4044 
4045 
4046 struct GTY(()) pragma_entry {
4047   int used;
4048   int flag;
4049   const char *funcname;
4050 };
4051 typedef struct pragma_entry pragma_entry;
4052 
4053 /* Hash table of farcall-tagged sections.  */
4054 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4055 
4056 static int
pragma_entry_eq(const void * p1,const void * p2)4057 pragma_entry_eq (const void *p1, const void *p2)
4058 {
4059   const pragma_entry *old = (const pragma_entry *) p1;
4060   const char *new_name = (const char *) p2;
4061 
4062   return strcmp (old->funcname, new_name) == 0;
4063 }
4064 
4065 static hashval_t
pragma_entry_hash(const void * p)4066 pragma_entry_hash (const void *p)
4067 {
4068   const pragma_entry *old = (const pragma_entry *) p;
4069   return htab_hash_string (old->funcname);
4070 }
4071 
4072 static void
mep_note_pragma_flag(const char * funcname,int flag)4073 mep_note_pragma_flag (const char *funcname, int flag)
4074 {
4075   pragma_entry **slot;
4076 
4077   if (!pragma_htab)
4078     pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4079 				    pragma_entry_eq, NULL);
4080 
4081   slot = (pragma_entry **)
4082     htab_find_slot_with_hash (pragma_htab, funcname,
4083 			      htab_hash_string (funcname), INSERT);
4084 
4085   if (!*slot)
4086     {
4087       *slot = ggc_alloc_pragma_entry ();
4088       (*slot)->flag = 0;
4089       (*slot)->used = 0;
4090       (*slot)->funcname = ggc_strdup (funcname);
4091     }
4092   (*slot)->flag |= flag;
4093 }
4094 
4095 static bool
mep_lookup_pragma_flag(const char * funcname,int flag)4096 mep_lookup_pragma_flag (const char *funcname, int flag)
4097 {
4098   pragma_entry **slot;
4099 
4100   if (!pragma_htab)
4101     return false;
4102 
4103   if (funcname[0] == '@' && funcname[2] == '.')
4104     funcname += 3;
4105 
4106   slot = (pragma_entry **)
4107     htab_find_slot_with_hash (pragma_htab, funcname,
4108 			      htab_hash_string (funcname), NO_INSERT);
4109   if (slot && *slot && ((*slot)->flag & flag))
4110     {
4111       (*slot)->used |= flag;
4112       return true;
4113     }
4114   return false;
4115 }
4116 
4117 bool
mep_lookup_pragma_call(const char * funcname)4118 mep_lookup_pragma_call (const char *funcname)
4119 {
4120   return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4121 }
4122 
4123 void
mep_note_pragma_call(const char * funcname)4124 mep_note_pragma_call (const char *funcname)
4125 {
4126   mep_note_pragma_flag (funcname, FUNC_CALL);
4127 }
4128 
4129 bool
mep_lookup_pragma_disinterrupt(const char * funcname)4130 mep_lookup_pragma_disinterrupt (const char *funcname)
4131 {
4132   return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4133 }
4134 
4135 void
mep_note_pragma_disinterrupt(const char * funcname)4136 mep_note_pragma_disinterrupt (const char *funcname)
4137 {
4138   mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4139 }
4140 
4141 static int
note_unused_pragma_disinterrupt(void ** slot,void * data ATTRIBUTE_UNUSED)4142 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4143 {
4144   const pragma_entry *d = (const pragma_entry *)(*slot);
4145 
4146   if ((d->flag & FUNC_DISINTERRUPT)
4147       && !(d->used & FUNC_DISINTERRUPT))
4148     warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4149   return 1;
4150 }
4151 
4152 void
mep_file_cleanups(void)4153 mep_file_cleanups (void)
4154 {
4155   if (pragma_htab)
4156     htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4157 }
4158 
4159 /* These three functions provide a bridge between the pramgas that
4160    affect register classes, and the functions that maintain them.  We
4161    can't call those functions directly as pragma handling is part of
4162    the front end and doesn't have direct access to them.  */
4163 
4164 void
mep_save_register_info(void)4165 mep_save_register_info (void)
4166 {
4167   save_register_info ();
4168 }
4169 
4170 void
mep_reinit_regs(void)4171 mep_reinit_regs (void)
4172 {
4173   reinit_regs ();
4174 }
4175 
4176 void
mep_init_regs(void)4177 mep_init_regs (void)
4178 {
4179   init_regs ();
4180 }
4181 
4182 
4183 
4184 static int
mep_attrlist_to_encoding(tree list,tree decl)4185 mep_attrlist_to_encoding (tree list, tree decl)
4186 {
4187   if (mep_multiple_address_regions (list, false) > 1)
4188     {
4189       warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4190 	       TREE_PURPOSE (TREE_CHAIN (list)),
4191 	       DECL_NAME (decl),
4192 	       DECL_SOURCE_LINE (decl));
4193       TREE_CHAIN (list) = NULL_TREE;
4194     }
4195 
4196   while (list)
4197     {
4198       if (is_attribute_p ("based", TREE_PURPOSE (list)))
4199 	return 'b';
4200       if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4201 	return 't';
4202       if (is_attribute_p ("near", TREE_PURPOSE (list)))
4203 	return 'n';
4204       if (is_attribute_p ("far", TREE_PURPOSE (list)))
4205 	return 'f';
4206       if (is_attribute_p ("io", TREE_PURPOSE (list)))
4207 	{
4208 	  if (TREE_VALUE (list)
4209 	      && TREE_VALUE (TREE_VALUE (list))
4210 	      && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4211 	    {
4212 	      int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4213 	      if (location >= 0
4214 		  && location <= 0x1000000)
4215 		return 'i';
4216 	    }
4217 	  return 'I';
4218 	}
4219       if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4220 	return 'c';
4221       list = TREE_CHAIN (list);
4222     }
4223   if (TARGET_TF
4224       && TREE_CODE (decl) == FUNCTION_DECL
4225       && DECL_SECTION_NAME (decl) == 0)
4226     return 'f';
4227   return 0;
4228 }
4229 
4230 static int
mep_comp_type_attributes(const_tree t1,const_tree t2)4231 mep_comp_type_attributes (const_tree t1, const_tree t2)
4232 {
4233   int vliw1, vliw2;
4234 
4235   vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4236   vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4237 
4238   if (vliw1 != vliw2)
4239     return 0;
4240 
4241   return 1;
4242 }
4243 
4244 static void
mep_insert_attributes(tree decl,tree * attributes)4245 mep_insert_attributes (tree decl, tree *attributes)
4246 {
4247   int size;
4248   const char *secname = 0;
4249   tree attrib, attrlist;
4250   char encoding;
4251 
4252   if (TREE_CODE (decl) == FUNCTION_DECL)
4253     {
4254       const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4255 
4256       if (mep_lookup_pragma_disinterrupt (funcname))
4257 	{
4258 	  attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4259 	  *attributes = chainon (*attributes, attrib);
4260 	}
4261     }
4262 
4263   if (TREE_CODE (decl) != VAR_DECL
4264       || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4265     return;
4266 
4267   if (TREE_READONLY (decl) && TARGET_DC)
4268     /* -mdc means that const variables default to the near section,
4269        regardless of the size cutoff.  */
4270     return;
4271 
4272   /* User specified an attribute, so override the default.
4273      Ignore storage attribute of pointed to variable. char __far * x;  */
4274   if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4275     {
4276       if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4277 	TYPE_ATTRIBUTES (decl) = NULL_TREE;
4278       else if (DECL_ATTRIBUTES (decl) && *attributes)
4279 	DECL_ATTRIBUTES (decl) = NULL_TREE;
4280     }
4281 
4282   attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4283   encoding = mep_attrlist_to_encoding (attrlist, decl);
4284   if (!encoding && TYPE_P (TREE_TYPE (decl)))
4285     {
4286       attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4287       encoding = mep_attrlist_to_encoding (attrlist, decl);
4288     }
4289   if (encoding)
4290     {
4291       /* This means that the declaration has a specific section
4292 	 attribute, so we should not apply the default rules.  */
4293 
4294       if (encoding == 'i' || encoding == 'I')
4295 	{
4296 	  tree attr = lookup_attribute ("io", attrlist);
4297 	  if (attr
4298 	      && TREE_VALUE (attr)
4299 	      && TREE_VALUE (TREE_VALUE(attr)))
4300 	    {
4301 	      int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4302 	      static tree previous_value = 0;
4303 	      static int previous_location = 0;
4304 	      static tree previous_name = 0;
4305 
4306 	      /* We take advantage of the fact that gcc will reuse the
4307 		 same tree pointer when applying an attribute to a
4308 		 list of decls, but produce a new tree for attributes
4309 		 on separate source lines, even when they're textually
4310 		 identical.  This is the behavior we want.  */
4311 	      if (TREE_VALUE (attr) == previous_value
4312 		  && location == previous_location)
4313 		{
4314 		  warning(0, "__io address 0x%x is the same for %qE and %qE",
4315 			  location, previous_name, DECL_NAME (decl));
4316 		}
4317 	      previous_name = DECL_NAME (decl);
4318 	      previous_location = location;
4319 	      previous_value = TREE_VALUE (attr);
4320 	    }
4321 	}
4322       return;
4323     }
4324 
4325 
4326   /* Declarations of arrays can change size.  Don't trust them.  */
4327   if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4328     size = 0;
4329   else
4330     size = int_size_in_bytes (TREE_TYPE (decl));
4331 
4332   if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4333     {
4334       if (TREE_PUBLIC (decl)
4335 	  || DECL_EXTERNAL (decl)
4336 	  || TREE_STATIC (decl))
4337 	{
4338 	  const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4339 	  int key = 0;
4340 
4341 	  while (*name)
4342 	    key += *name++;
4343 
4344 	  switch (key & 3)
4345 	    {
4346 	    case 0:
4347 	      secname = "based";
4348 	      break;
4349 	    case 1:
4350 	      secname = "tiny";
4351 	      break;
4352 	    case 2:
4353 	      secname = "far";
4354 	      break;
4355 	    default:
4356 	      ;
4357 	    }
4358 	}
4359     }
4360   else
4361     {
4362       if (size <= mep_based_cutoff && size > 0)
4363 	secname = "based";
4364       else if (size <= mep_tiny_cutoff && size > 0)
4365 	secname = "tiny";
4366       else if (TARGET_L)
4367 	secname = "far";
4368     }
4369 
4370   if (mep_const_section && TREE_READONLY (decl))
4371     {
4372       if (strcmp (mep_const_section, "tiny") == 0)
4373 	secname = "tiny";
4374       else if (strcmp (mep_const_section, "near") == 0)
4375 	return;
4376       else if (strcmp (mep_const_section, "far") == 0)
4377 	secname = "far";
4378     }
4379 
4380   if (!secname)
4381     return;
4382 
4383   if (!mep_multiple_address_regions (*attributes, true)
4384       && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4385     {
4386       attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4387 
4388       /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4389 	 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4390 	 and mep_validate_based_tiny.  */
4391       DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4392     }
4393 }
4394 
4395 static void
mep_encode_section_info(tree decl,rtx rtl,int first)4396 mep_encode_section_info (tree decl, rtx rtl, int first)
4397 {
4398   rtx rtlname;
4399   const char *oldname;
4400   const char *secname;
4401   char encoding;
4402   char *newname;
4403   tree idp;
4404   int maxsize;
4405   tree type;
4406   tree mep_attributes;
4407 
4408   if (! first)
4409     return;
4410 
4411   if (TREE_CODE (decl) != VAR_DECL
4412       && TREE_CODE (decl) != FUNCTION_DECL)
4413     return;
4414 
4415   rtlname = XEXP (rtl, 0);
4416   if (GET_CODE (rtlname) == SYMBOL_REF)
4417     oldname = XSTR (rtlname, 0);
4418   else if (GET_CODE (rtlname) == MEM
4419 	   && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4420     oldname = XSTR (XEXP (rtlname, 0), 0);
4421   else
4422     gcc_unreachable ();
4423 
4424   type = TREE_TYPE (decl);
4425   if (type == error_mark_node)
4426     return;
4427   mep_attributes = MEP_ATTRIBUTES (decl);
4428 
4429   encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4430 
4431   if (encoding)
4432     {
4433       newname = (char *) alloca (strlen (oldname) + 4);
4434       sprintf (newname, "@%c.%s", encoding, oldname);
4435       idp = get_identifier (newname);
4436       XEXP (rtl, 0) =
4437 	gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4438       SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4439       SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4440 
4441       switch (encoding)
4442 	{
4443 	case 'b':
4444 	  maxsize = 128;
4445 	  secname = "based";
4446 	  break;
4447 	case 't':
4448 	  maxsize = 65536;
4449 	  secname = "tiny";
4450 	  break;
4451 	case 'n':
4452 	  maxsize = 0x1000000;
4453 	  secname = "near";
4454 	  break;
4455 	default:
4456 	  maxsize = 0;
4457 	  secname = 0;
4458 	  break;
4459 	}
4460       if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4461 	{
4462 	  warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4463 		   oldname,
4464 		   (long) int_size_in_bytes (TREE_TYPE (decl)),
4465 		   secname,
4466 		   maxsize);
4467 	}
4468     }
4469 }
4470 
4471 const char *
mep_strip_name_encoding(const char * sym)4472 mep_strip_name_encoding (const char *sym)
4473 {
4474   while (1)
4475     {
4476       if (*sym == '*')
4477 	sym++;
4478       else if (*sym == '@' && sym[2] == '.')
4479 	sym += 3;
4480       else
4481 	return sym;
4482     }
4483 }
4484 
4485 static section *
mep_select_section(tree decl,int reloc ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)4486 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4487 		    unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4488 {
4489   int readonly = 1;
4490   int encoding;
4491 
4492   switch (TREE_CODE (decl))
4493     {
4494     case VAR_DECL:
4495       if (!TREE_READONLY (decl)
4496 	  || TREE_SIDE_EFFECTS (decl)
4497 	  || !DECL_INITIAL (decl)
4498 	  || (DECL_INITIAL (decl) != error_mark_node
4499 	      && !TREE_CONSTANT (DECL_INITIAL (decl))))
4500 	readonly = 0;
4501       break;
4502     case CONSTRUCTOR:
4503       if (! TREE_CONSTANT (decl))
4504 	readonly = 0;
4505       break;
4506 
4507     default:
4508       break;
4509     }
4510 
4511   if (TREE_CODE (decl) == FUNCTION_DECL)
4512     {
4513       const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4514 
4515       if (name[0] == '@' && name[2] == '.')
4516 	encoding = name[1];
4517       else
4518 	encoding = 0;
4519 
4520       if (flag_function_sections || DECL_ONE_ONLY (decl))
4521 	mep_unique_section (decl, 0);
4522       else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4523 	{
4524 	  if (encoding == 'f')
4525 	    return vftext_section;
4526 	  else
4527 	    return vtext_section;
4528 	}
4529       else if (encoding == 'f')
4530 	return ftext_section;
4531       else
4532 	return text_section;
4533     }
4534 
4535   if (TREE_CODE (decl) == VAR_DECL)
4536     {
4537       const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4538 
4539       if (name[0] == '@' && name[2] == '.')
4540 	switch (name[1])
4541 	  {
4542 	  case 'b':
4543 	    return based_section;
4544 
4545 	  case 't':
4546 	    if (readonly)
4547 	      return srodata_section;
4548 	    if (DECL_INITIAL (decl))
4549 	      return sdata_section;
4550 	    return tinybss_section;
4551 
4552 	  case 'f':
4553 	    if (readonly)
4554 	      return frodata_section;
4555 	    return far_section;
4556 
4557 	  case 'i':
4558 	  case 'I':
4559 	    error_at (DECL_SOURCE_LOCATION (decl),
4560 		      "variable %D of type %<io%> must be uninitialized", decl);
4561 	    return data_section;
4562 
4563 	  case 'c':
4564 	    error_at (DECL_SOURCE_LOCATION (decl),
4565 		      "variable %D of type %<cb%> must be uninitialized", decl);
4566 	    return data_section;
4567 	  }
4568     }
4569 
4570   if (readonly)
4571     return readonly_data_section;
4572 
4573   return data_section;
4574 }
4575 
4576 static void
mep_unique_section(tree decl,int reloc)4577 mep_unique_section (tree decl, int reloc)
4578 {
4579   static const char *prefixes[][2] =
4580   {
4581     { ".text.",   ".gnu.linkonce.t." },
4582     { ".rodata.", ".gnu.linkonce.r." },
4583     { ".data.",   ".gnu.linkonce.d." },
4584     { ".based.",   ".gnu.linkonce.based." },
4585     { ".sdata.",   ".gnu.linkonce.s." },
4586     { ".far.",     ".gnu.linkonce.far." },
4587     { ".ftext.",   ".gnu.linkonce.ft." },
4588     { ".frodata.", ".gnu.linkonce.frd." },
4589     { ".srodata.", ".gnu.linkonce.srd." },
4590     { ".vtext.",   ".gnu.linkonce.v." },
4591     { ".vftext.",   ".gnu.linkonce.vf." }
4592   };
4593   int sec = 2; /* .data */
4594   int len;
4595   const char *name, *prefix;
4596   char *string;
4597 
4598   name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4599   if (DECL_RTL (decl))
4600     name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4601 
4602   if (TREE_CODE (decl) == FUNCTION_DECL)
4603     {
4604       if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4605 	sec = 9; /* .vtext */
4606       else
4607 	sec = 0; /* .text */
4608     }
4609   else if (decl_readonly_section (decl, reloc))
4610     sec = 1; /* .rodata */
4611 
4612   if (name[0] == '@' && name[2] == '.')
4613     {
4614       switch (name[1])
4615 	{
4616 	case 'b':
4617 	  sec = 3; /* .based */
4618 	  break;
4619 	case 't':
4620 	  if (sec == 1)
4621 	    sec = 8; /* .srodata */
4622 	  else
4623 	    sec = 4; /* .sdata */
4624 	  break;
4625 	case 'f':
4626 	  if (sec == 0)
4627 	    sec = 6; /* .ftext */
4628 	  else if (sec == 9)
4629 	    sec = 10; /* .vftext */
4630 	  else if (sec == 1)
4631 	    sec = 7; /* .frodata */
4632 	  else
4633 	    sec = 5; /* .far. */
4634 	  break;
4635 	}
4636       name += 3;
4637     }
4638 
4639   prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4640   len    = strlen (name) + strlen (prefix);
4641   string = (char *) alloca (len + 1);
4642 
4643   sprintf (string, "%s%s", prefix, name);
4644 
4645   DECL_SECTION_NAME (decl) = build_string (len, string);
4646 }
4647 
4648 /* Given a decl, a section name, and whether the decl initializer
4649    has relocs, choose attributes for the section.  */
4650 
4651 #define SECTION_MEP_VLIW	SECTION_MACH_DEP
4652 
4653 static unsigned int
mep_section_type_flags(tree decl,const char * name,int reloc)4654 mep_section_type_flags (tree decl, const char *name, int reloc)
4655 {
4656   unsigned int flags = default_section_type_flags (decl, name, reloc);
4657 
4658   if (decl && TREE_CODE (decl) == FUNCTION_DECL
4659       && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4660     flags |= SECTION_MEP_VLIW;
4661 
4662   return flags;
4663 }
4664 
4665 /* Switch to an arbitrary section NAME with attributes as specified
4666    by FLAGS.  ALIGN specifies any known alignment requirements for
4667    the section; 0 if the default should be used.
4668 
4669    Differs from the standard ELF version only in support of VLIW mode.  */
4670 
4671 static void
mep_asm_named_section(const char * name,unsigned int flags,tree decl ATTRIBUTE_UNUSED)4672 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4673 {
4674   char flagchars[8], *f = flagchars;
4675   const char *type;
4676 
4677   if (!(flags & SECTION_DEBUG))
4678     *f++ = 'a';
4679   if (flags & SECTION_WRITE)
4680     *f++ = 'w';
4681   if (flags & SECTION_CODE)
4682     *f++ = 'x';
4683   if (flags & SECTION_SMALL)
4684     *f++ = 's';
4685   if (flags & SECTION_MEP_VLIW)
4686     *f++ = 'v';
4687   *f = '\0';
4688 
4689   if (flags & SECTION_BSS)
4690     type = "nobits";
4691   else
4692     type = "progbits";
4693 
4694   fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4695 	   name, flagchars, type);
4696 
4697   if (flags & SECTION_CODE)
4698     fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4699 	   asm_out_file);
4700 }
4701 
4702 void
mep_output_aligned_common(FILE * stream,tree decl,const char * name,int size,int align,int global)4703 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4704 			   int size, int align, int global)
4705 {
4706   /* We intentionally don't use mep_section_tag() here.  */
4707   if (name[0] == '@'
4708       && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4709       && name[2] == '.')
4710     {
4711       int location = -1;
4712       tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4713 				    DECL_ATTRIBUTES (decl));
4714       if (attr
4715 	  && TREE_VALUE (attr)
4716 	  && TREE_VALUE (TREE_VALUE(attr)))
4717 	location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4718       if (location == -1)
4719 	return;
4720       if (global)
4721 	{
4722 	  fprintf (stream, "\t.globl\t");
4723 	  assemble_name (stream, name);
4724 	  fprintf (stream, "\n");
4725 	}
4726       assemble_name (stream, name);
4727       fprintf (stream, " = %d\n", location);
4728       return;
4729     }
4730   if (name[0] == '@' && name[2] == '.')
4731     {
4732       const char *sec = 0;
4733       switch (name[1])
4734 	{
4735 	case 'b':
4736 	  switch_to_section (based_section);
4737 	  sec = ".based";
4738 	  break;
4739 	case 't':
4740 	  switch_to_section (tinybss_section);
4741 	  sec = ".sbss";
4742 	  break;
4743 	case 'f':
4744 	  switch_to_section (farbss_section);
4745 	  sec = ".farbss";
4746 	  break;
4747 	}
4748       if (sec)
4749 	{
4750 	  const char *name2;
4751 	  int p2align = 0;
4752 
4753 	  while (align > BITS_PER_UNIT)
4754 	    {
4755 	      align /= 2;
4756 	      p2align ++;
4757 	    }
4758 	  name2 = targetm.strip_name_encoding (name);
4759 	  if (global)
4760 	    fprintf (stream, "\t.globl\t%s\n", name2);
4761 	  fprintf (stream, "\t.p2align %d\n", p2align);
4762 	  fprintf (stream, "\t.type\t%s,@object\n", name2);
4763 	  fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4764 	  fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4765 	  return;
4766 	}
4767     }
4768 
4769   if (!global)
4770     {
4771       fprintf (stream, "\t.local\t");
4772       assemble_name (stream, name);
4773       fprintf (stream, "\n");
4774     }
4775   fprintf (stream, "\t.comm\t");
4776   assemble_name (stream, name);
4777   fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4778 }
4779 
4780 /* Trampolines.  */
4781 
4782 static void
mep_trampoline_init(rtx m_tramp,tree fndecl,rtx static_chain)4783 mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4784 {
4785   rtx addr = XEXP (m_tramp, 0);
4786   rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4787 
4788   emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4789 		     LCT_NORMAL, VOIDmode, 3,
4790 		     addr, Pmode,
4791 		     fnaddr, Pmode,
4792 		     static_chain, Pmode);
4793 }
4794 
4795 /* Experimental Reorg.  */
4796 
4797 static bool
mep_mentioned_p(rtx in,rtx reg,int modes_too)4798 mep_mentioned_p (rtx in,
4799 		 rtx reg, /* NULL for mem */
4800 		 int modes_too) /* if nonzero, modes must match also.  */
4801 {
4802   const char *fmt;
4803   int i;
4804   enum rtx_code code;
4805 
4806   if (in == 0)
4807     return false;
4808   if (reg && GET_CODE (reg) != REG)
4809     return false;
4810 
4811   if (GET_CODE (in) == LABEL_REF)
4812     return (reg == 0);
4813 
4814   code = GET_CODE (in);
4815 
4816   switch (code)
4817     {
4818     case MEM:
4819       if (reg)
4820 	return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4821       return true;
4822 
4823     case REG:
4824       if (!reg)
4825 	return false;
4826       if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4827 	return false;
4828       return (REGNO (in) == REGNO (reg));
4829 
4830     case SCRATCH:
4831     case CC0:
4832     case PC:
4833     case CONST_INT:
4834     case CONST_DOUBLE:
4835       return false;
4836 
4837     default:
4838       break;
4839     }
4840 
4841   /* Set's source should be read-only.  */
4842   if (code == SET && !reg)
4843     return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4844 
4845   fmt = GET_RTX_FORMAT (code);
4846 
4847   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4848     {
4849       if (fmt[i] == 'E')
4850 	{
4851 	  register int j;
4852 	  for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4853 	    if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4854 	      return true;
4855 	}
4856       else if (fmt[i] == 'e'
4857 	       && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4858 	return true;
4859     }
4860   return false;
4861 }
4862 
4863 #define EXPERIMENTAL_REGMOVE_REORG 1
4864 
4865 #if EXPERIMENTAL_REGMOVE_REORG
4866 
4867 static int
mep_compatible_reg_class(int r1,int r2)4868 mep_compatible_reg_class (int r1, int r2)
4869 {
4870   if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4871     return 1;
4872   if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4873     return 1;
4874   return 0;
4875 }
4876 
4877 static void
mep_reorg_regmove(rtx insns)4878 mep_reorg_regmove (rtx insns)
4879 {
4880   rtx insn, next, pat, follow, *where;
4881   int count = 0, done = 0, replace, before = 0;
4882 
4883   if (dump_file)
4884     for (insn = insns; insn; insn = NEXT_INSN (insn))
4885       if (GET_CODE (insn) == INSN)
4886 	before++;
4887 
4888   /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4889      set that uses the r2 and r2 dies there.  We replace r2 with r1
4890      and see if it's still a valid insn.  If so, delete the first set.
4891      Copied from reorg.c.  */
4892 
4893   while (!done)
4894     {
4895       done = 1;
4896       for (insn = insns; insn; insn = next)
4897 	{
4898 	  next = next_nonnote_nondebug_insn (insn);
4899 	  if (GET_CODE (insn) != INSN)
4900 	    continue;
4901 	  pat = PATTERN (insn);
4902 
4903 	  replace = 0;
4904 
4905 	  if (GET_CODE (pat) == SET
4906 	      && GET_CODE (SET_SRC (pat)) == REG
4907 	      && GET_CODE (SET_DEST (pat)) == REG
4908 	      && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4909 	      && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4910 	    {
4911 	      follow = next_nonnote_nondebug_insn (insn);
4912 	      if (dump_file)
4913 		fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4914 
4915 	      while (follow && GET_CODE (follow) == INSN
4916 		     && GET_CODE (PATTERN (follow)) == SET
4917 		     && !dead_or_set_p (follow, SET_SRC (pat))
4918 		     && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4919 		     && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4920 		{
4921 		  if (dump_file)
4922 		    fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4923 		  follow = next_nonnote_insn (follow);
4924 		}
4925 
4926 	      if (dump_file)
4927 		fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
4928 	      if (follow && GET_CODE (follow) == INSN
4929 		  && GET_CODE (PATTERN (follow)) == SET
4930 		  && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
4931 		{
4932 		  if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
4933 		    {
4934 		      if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
4935 			{
4936 			  replace = 1;
4937 			  where = & SET_SRC (PATTERN (follow));
4938 			}
4939 		    }
4940 		  else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
4941 		    {
4942 		      if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
4943 			{
4944 			  replace = 1;
4945 			  where = & PATTERN (follow);
4946 			}
4947 		    }
4948 		}
4949 	    }
4950 
4951 	  /* If so, follow is the corresponding insn */
4952 	  if (replace)
4953 	    {
4954 	      if (dump_file)
4955 		{
4956 		  rtx x;
4957 
4958 		  fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
4959 		  for (x = insn; x ;x = NEXT_INSN (x))
4960 		    {
4961 		      print_rtl_single (dump_file, x);
4962 		      if (x == follow)
4963 			break;
4964 		      fprintf (dump_file, "\n");
4965 		    }
4966 		}
4967 
4968 	      if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
4969 					       follow, where))
4970 		{
4971 		  count ++;
4972 		  delete_insn (insn);
4973 		  if (dump_file)
4974 		    {
4975 		      fprintf (dump_file, "\n----- Success!  new insn:\n\n");
4976 		      print_rtl_single (dump_file, follow);
4977 		    }
4978 		  done = 0;
4979 		}
4980 	    }
4981 	}
4982     }
4983 
4984   if (dump_file)
4985     {
4986       fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
4987       fprintf (dump_file, "=====\n");
4988     }
4989 }
4990 #endif
4991 
4992 
4993 /* Figure out where to put LABEL, which is the label for a repeat loop.
4994    If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
4995    the loop ends just before LAST_INSN.  If SHARED, insns other than the
4996    "repeat" might use LABEL to jump to the loop's continuation point.
4997 
4998    Return the last instruction in the adjusted loop.  */
4999 
5000 static rtx
mep_insert_repeat_label_last(rtx last_insn,rtx label,bool including,bool shared)5001 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5002 			      bool shared)
5003 {
5004   rtx next, prev;
5005   int count = 0, code, icode;
5006 
5007   if (dump_file)
5008     fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5009 	     INSN_UID (last_insn));
5010 
5011   /* Set PREV to the last insn in the loop.  */
5012   prev = last_insn;
5013   if (!including)
5014     prev = PREV_INSN (prev);
5015 
5016   /* Set NEXT to the next insn after the repeat label.  */
5017   next = last_insn;
5018   if (!shared)
5019     while (prev != 0)
5020       {
5021 	code = GET_CODE (prev);
5022 	if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5023 	  break;
5024 
5025 	if (INSN_P (prev))
5026 	  {
5027 	    if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5028 	      prev = XVECEXP (PATTERN (prev), 0, 1);
5029 
5030 	    /* Other insns that should not be in the last two opcodes.  */
5031 	    icode = recog_memoized (prev);
5032 	    if (icode < 0
5033 		|| icode == CODE_FOR_repeat
5034 		|| icode == CODE_FOR_erepeat
5035 		|| get_attr_may_trap (prev) == MAY_TRAP_YES)
5036 	      break;
5037 
5038 	    /* That leaves JUMP_INSN and INSN.  It will have BImode if it
5039 	       is the second instruction in a VLIW bundle.  In that case,
5040 	       loop again: if the first instruction also satisfies the
5041 	       conditions above then we will reach here again and put
5042 	       both of them into the repeat epilogue.  Otherwise both
5043 	       should remain outside.  */
5044 	    if (GET_MODE (prev) != BImode)
5045 	      {
5046 		count++;
5047 		next = prev;
5048 		if (dump_file)
5049 		  print_rtl_single (dump_file, next);
5050 		if (count == 2)
5051 		  break;
5052 	      }
5053 	  }
5054 	prev = PREV_INSN (prev);
5055       }
5056 
5057   /* See if we're adding the label immediately after the repeat insn.
5058      If so, we need to separate them with a nop.  */
5059   prev = prev_real_insn (next);
5060   if (prev)
5061     switch (recog_memoized (prev))
5062       {
5063       case CODE_FOR_repeat:
5064       case CODE_FOR_erepeat:
5065 	if (dump_file)
5066 	  fprintf (dump_file, "Adding nop inside loop\n");
5067 	emit_insn_before (gen_nop (), next);
5068 	break;
5069 
5070       default:
5071 	break;
5072       }
5073 
5074   /* Insert the label.  */
5075   emit_label_before (label, next);
5076 
5077   /* Insert the nops.  */
5078   if (dump_file && count < 2)
5079     fprintf (dump_file, "Adding %d nop%s\n\n",
5080 	     2 - count, count == 1 ? "" : "s");
5081 
5082   for (; count < 2; count++)
5083     if (including)
5084       last_insn = emit_insn_after (gen_nop (), last_insn);
5085     else
5086       emit_insn_before (gen_nop (), last_insn);
5087 
5088   return last_insn;
5089 }
5090 
5091 
5092 void
mep_emit_doloop(rtx * operands,int is_end)5093 mep_emit_doloop (rtx *operands, int is_end)
5094 {
5095   rtx tag;
5096 
5097   if (cfun->machine->doloop_tags == 0
5098       || cfun->machine->doloop_tag_from_end == is_end)
5099     {
5100       cfun->machine->doloop_tags++;
5101       cfun->machine->doloop_tag_from_end = is_end;
5102     }
5103 
5104   tag = GEN_INT (cfun->machine->doloop_tags - 1);
5105   if (is_end)
5106     emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5107   else
5108     emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5109 }
5110 
5111 
5112 /* Code for converting doloop_begins and doloop_ends into valid
5113    MeP instructions.  A doloop_begin is just a placeholder:
5114 
5115 	$count = unspec ($count)
5116 
5117    where $count is initially the number of iterations - 1.
5118    doloop_end has the form:
5119 
5120 	if ($count-- == 0) goto label
5121 
5122    The counter variable is private to the doloop insns, nothing else
5123    relies on its value.
5124 
5125    There are three cases, in decreasing order of preference:
5126 
5127       1. A loop has exactly one doloop_begin and one doloop_end.
5128 	 The doloop_end branches to the first instruction after
5129 	 the doloop_begin.
5130 
5131 	 In this case we can replace the doloop_begin with a repeat
5132 	 instruction and remove the doloop_end.  I.e.:
5133 
5134 		$count1 = unspec ($count1)
5135 	    label:
5136 		...
5137 		insn1
5138 		insn2
5139 		if ($count2-- == 0) goto label
5140 
5141 	  becomes:
5142 
5143 		repeat $count1,repeat_label
5144 	    label:
5145 		...
5146 	    repeat_label:
5147 		insn1
5148 		insn2
5149 		# end repeat
5150 
5151       2. As for (1), except there are several doloop_ends.  One of them
5152 	 (call it X) falls through to a label L.  All the others fall
5153 	 through to branches to L.
5154 
5155 	 In this case, we remove X and replace the other doloop_ends
5156 	 with branches to the repeat label.  For example:
5157 
5158 		$count1 = unspec ($count1)
5159 	    start:
5160 		...
5161 		if ($count2-- == 0) goto label
5162 	    end:
5163 		...
5164 		if ($count3-- == 0) goto label
5165 		goto end
5166 
5167 	 becomes:
5168 
5169 		repeat $count1,repeat_label
5170 	    start:
5171 		...
5172 	    repeat_label:
5173 		nop
5174 		nop
5175 		# end repeat
5176 	    end:
5177 		...
5178 		goto repeat_label
5179 
5180       3. The fallback case.  Replace doloop_begins with:
5181 
5182 		$count = $count + 1
5183 
5184 	 Replace doloop_ends with the equivalent of:
5185 
5186 		$count = $count - 1
5187 		if ($count == 0) goto label
5188 
5189 	 Note that this might need a scratch register if $count
5190 	 is stored in memory.  */
5191 
5192 /* A structure describing one doloop_begin.  */
5193 struct mep_doloop_begin {
5194   /* The next doloop_begin with the same tag.  */
5195   struct mep_doloop_begin *next;
5196 
5197   /* The instruction itself.  */
5198   rtx insn;
5199 
5200   /* The initial counter value.  This is known to be a general register.  */
5201   rtx counter;
5202 };
5203 
5204 /* A structure describing a doloop_end.  */
5205 struct mep_doloop_end {
5206   /* The next doloop_end with the same loop tag.  */
5207   struct mep_doloop_end *next;
5208 
5209   /* The instruction itself.  */
5210   rtx insn;
5211 
5212   /* The first instruction after INSN when the branch isn't taken.  */
5213   rtx fallthrough;
5214 
5215   /* The location of the counter value.  Since doloop_end_internal is a
5216      jump instruction, it has to allow the counter to be stored anywhere
5217      (any non-fixed register or memory location).  */
5218   rtx counter;
5219 
5220   /* The target label (the place where the insn branches when the counter
5221      isn't zero).  */
5222   rtx label;
5223 
5224   /* A scratch register.  Only available when COUNTER isn't stored
5225      in a general register.  */
5226   rtx scratch;
5227 };
5228 
5229 
5230 /* One do-while loop.  */
5231 struct mep_doloop {
5232   /* All the doloop_begins for this loop (in no particular order).  */
5233   struct mep_doloop_begin *begin;
5234 
5235   /* All the doloop_ends.  When there is more than one, arrange things
5236      so that the first one is the most likely to be X in case (2) above.  */
5237   struct mep_doloop_end *end;
5238 };
5239 
5240 
5241 /* Return true if LOOP can be converted into repeat/repeat_end form
5242    (that is, if it matches cases (1) or (2) above).  */
5243 
5244 static bool
mep_repeat_loop_p(struct mep_doloop * loop)5245 mep_repeat_loop_p (struct mep_doloop *loop)
5246 {
5247   struct mep_doloop_end *end;
5248   rtx fallthrough;
5249 
5250   /* There must be exactly one doloop_begin and at least one doloop_end.  */
5251   if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5252     return false;
5253 
5254   /* The first doloop_end (X) must branch back to the insn after
5255      the doloop_begin.  */
5256   if (prev_real_insn (loop->end->label) != loop->begin->insn)
5257     return false;
5258 
5259   /* All the other doloop_ends must branch to the same place as X.
5260      When the branch isn't taken, they must jump to the instruction
5261      after X.  */
5262   fallthrough = loop->end->fallthrough;
5263   for (end = loop->end->next; end != 0; end = end->next)
5264     if (end->label != loop->end->label
5265 	|| !simplejump_p (end->fallthrough)
5266 	|| next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5267       return false;
5268 
5269   return true;
5270 }
5271 
5272 
5273 /* The main repeat reorg function.  See comment above for details.  */
5274 
5275 static void
mep_reorg_repeat(rtx insns)5276 mep_reorg_repeat (rtx insns)
5277 {
5278   rtx insn;
5279   struct mep_doloop *loops, *loop;
5280   struct mep_doloop_begin *begin;
5281   struct mep_doloop_end *end;
5282 
5283   /* Quick exit if we haven't created any loops.  */
5284   if (cfun->machine->doloop_tags == 0)
5285     return;
5286 
5287   /* Create an array of mep_doloop structures.  */
5288   loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5289   memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5290 
5291   /* Search the function for do-while insns and group them by loop tag.  */
5292   for (insn = insns; insn; insn = NEXT_INSN (insn))
5293     if (INSN_P (insn))
5294       switch (recog_memoized (insn))
5295 	{
5296 	case CODE_FOR_doloop_begin_internal:
5297 	  insn_extract (insn);
5298 	  loop = &loops[INTVAL (recog_data.operand[2])];
5299 
5300 	  begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5301 	  begin->next = loop->begin;
5302 	  begin->insn = insn;
5303 	  begin->counter = recog_data.operand[0];
5304 
5305 	  loop->begin = begin;
5306 	  break;
5307 
5308 	case CODE_FOR_doloop_end_internal:
5309 	  insn_extract (insn);
5310 	  loop = &loops[INTVAL (recog_data.operand[2])];
5311 
5312 	  end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5313 	  end->insn = insn;
5314 	  end->fallthrough = next_real_insn (insn);
5315 	  end->counter = recog_data.operand[0];
5316 	  end->label = recog_data.operand[1];
5317 	  end->scratch = recog_data.operand[3];
5318 
5319 	  /* If this insn falls through to an unconditional jump,
5320 	     give it a lower priority than the others.  */
5321 	  if (loop->end != 0 && simplejump_p (end->fallthrough))
5322 	    {
5323 	      end->next = loop->end->next;
5324 	      loop->end->next = end;
5325 	    }
5326 	  else
5327 	    {
5328 	      end->next = loop->end;
5329 	      loop->end = end;
5330 	    }
5331 	  break;
5332 	}
5333 
5334   /* Convert the insns for each loop in turn.  */
5335   for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5336     if (mep_repeat_loop_p (loop))
5337       {
5338 	/* Case (1) or (2).  */
5339 	rtx repeat_label, label_ref;
5340 
5341 	/* Create a new label for the repeat insn.  */
5342 	repeat_label = gen_label_rtx ();
5343 
5344 	/* Replace the doloop_begin with a repeat.  */
5345 	label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5346 	emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5347 			  loop->begin->insn);
5348 	delete_insn (loop->begin->insn);
5349 
5350 	/* Insert the repeat label before the first doloop_end.
5351 	   Fill the gap with nops if there are other doloop_ends.  */
5352 	mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5353 				      false, loop->end->next != 0);
5354 
5355 	/* Emit a repeat_end (to improve the readability of the output).  */
5356 	emit_insn_before (gen_repeat_end (), loop->end->insn);
5357 
5358 	/* Delete the first doloop_end.  */
5359 	delete_insn (loop->end->insn);
5360 
5361 	/* Replace the others with branches to REPEAT_LABEL.  */
5362 	for (end = loop->end->next; end != 0; end = end->next)
5363 	  {
5364 	    emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5365 	    delete_insn (end->insn);
5366 	    delete_insn (end->fallthrough);
5367 	  }
5368       }
5369     else
5370       {
5371 	/* Case (3).  First replace all the doloop_begins with increment
5372 	   instructions.  */
5373 	for (begin = loop->begin; begin != 0; begin = begin->next)
5374 	  {
5375 	    emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5376 					     begin->counter, const1_rtx),
5377 			      begin->insn);
5378 	    delete_insn (begin->insn);
5379 	  }
5380 
5381 	/* Replace all the doloop_ends with decrement-and-branch sequences.  */
5382 	for (end = loop->end; end != 0; end = end->next)
5383 	  {
5384 	    rtx reg;
5385 
5386 	    start_sequence ();
5387 
5388 	    /* Load the counter value into a general register.  */
5389 	    reg = end->counter;
5390 	    if (!REG_P (reg) || REGNO (reg) > 15)
5391 	      {
5392 		reg = end->scratch;
5393 		emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5394 	      }
5395 
5396 	    /* Decrement the counter.  */
5397 	    emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5398 				      constm1_rtx));
5399 
5400 	    /* Copy it back to its original location.  */
5401 	    if (reg != end->counter)
5402 	      emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5403 
5404 	    /* Jump back to the start label.  */
5405 	    insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5406 						     end->label));
5407 	    JUMP_LABEL (insn) = end->label;
5408 	    LABEL_NUSES (end->label)++;
5409 
5410 	    /* Emit the whole sequence before the doloop_end.  */
5411 	    insn = get_insns ();
5412 	    end_sequence ();
5413 	    emit_insn_before (insn, end->insn);
5414 
5415 	    /* Delete the doloop_end.  */
5416 	    delete_insn (end->insn);
5417 	  }
5418       }
5419 }
5420 
5421 
5422 static bool
mep_invertable_branch_p(rtx insn)5423 mep_invertable_branch_p (rtx insn)
5424 {
5425   rtx cond, set;
5426   enum rtx_code old_code;
5427   int i;
5428 
5429   set = PATTERN (insn);
5430   if (GET_CODE (set) != SET)
5431     return false;
5432   if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5433     return false;
5434   cond = XEXP (XEXP (set, 1), 0);
5435   old_code = GET_CODE (cond);
5436   switch (old_code)
5437     {
5438     case EQ:
5439       PUT_CODE (cond, NE);
5440       break;
5441     case NE:
5442       PUT_CODE (cond, EQ);
5443       break;
5444     case LT:
5445       PUT_CODE (cond, GE);
5446       break;
5447     case GE:
5448       PUT_CODE (cond, LT);
5449       break;
5450     default:
5451       return false;
5452     }
5453   INSN_CODE (insn) = -1;
5454   i = recog_memoized (insn);
5455   PUT_CODE (cond, old_code);
5456   INSN_CODE (insn) = -1;
5457   return i >= 0;
5458 }
5459 
5460 static void
mep_invert_branch(rtx insn,rtx after)5461 mep_invert_branch (rtx insn, rtx after)
5462 {
5463   rtx cond, set, label;
5464   int i;
5465 
5466   set = PATTERN (insn);
5467 
5468   gcc_assert (GET_CODE (set) == SET);
5469   gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5470 
5471   cond = XEXP (XEXP (set, 1), 0);
5472   switch (GET_CODE (cond))
5473     {
5474     case EQ:
5475       PUT_CODE (cond, NE);
5476       break;
5477     case NE:
5478       PUT_CODE (cond, EQ);
5479       break;
5480     case LT:
5481       PUT_CODE (cond, GE);
5482       break;
5483     case GE:
5484       PUT_CODE (cond, LT);
5485       break;
5486     default:
5487       gcc_unreachable ();
5488     }
5489   label = gen_label_rtx ();
5490   emit_label_after (label, after);
5491   for (i=1; i<=2; i++)
5492     if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5493       {
5494 	rtx ref = XEXP (XEXP (set, 1), i);
5495 	if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5496 	  delete_insn (XEXP (ref, 0));
5497 	XEXP (ref, 0) = label;
5498 	LABEL_NUSES (label) ++;
5499 	JUMP_LABEL (insn) = label;
5500       }
5501   INSN_CODE (insn) = -1;
5502   i = recog_memoized (insn);
5503   gcc_assert (i >= 0);
5504 }
5505 
5506 static void
mep_reorg_erepeat(rtx insns)5507 mep_reorg_erepeat (rtx insns)
5508 {
5509   rtx insn, prev, l, x;
5510   int count;
5511 
5512   for (insn = insns; insn; insn = NEXT_INSN (insn))
5513     if (JUMP_P (insn)
5514 	&& ! JUMP_TABLE_DATA_P (insn)
5515 	&& mep_invertable_branch_p (insn))
5516       {
5517 	if (dump_file)
5518 	  {
5519 	    fprintf (dump_file, "\n------------------------------\n");
5520 	    fprintf (dump_file, "erepeat: considering this jump:\n");
5521 	    print_rtl_single (dump_file, insn);
5522 	  }
5523 	count = simplejump_p (insn) ? 0 : 1;
5524 	for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5525 	  {
5526 	    if (GET_CODE (prev) == CALL_INSN
5527 		|| BARRIER_P (prev))
5528 	      break;
5529 
5530 	    if (prev == JUMP_LABEL (insn))
5531 	      {
5532 		rtx newlast;
5533 		if (dump_file)
5534 		  fprintf (dump_file, "found loop top, %d insns\n", count);
5535 
5536 		if (LABEL_NUSES (prev) == 1)
5537 		  /* We're the only user, always safe */ ;
5538 		else if (LABEL_NUSES (prev) == 2)
5539 		  {
5540 		    /* See if there's a barrier before this label.  If
5541 		       so, we know nobody inside the loop uses it.
5542 		       But we must be careful to put the erepeat
5543 		       *after* the label.  */
5544 		    rtx barrier;
5545 		    for (barrier = PREV_INSN (prev);
5546 			 barrier && GET_CODE (barrier) == NOTE;
5547 			 barrier = PREV_INSN (barrier))
5548 		      ;
5549 		    if (barrier && GET_CODE (barrier) != BARRIER)
5550 		      break;
5551 		  }
5552 		else
5553 		  {
5554 		    /* We don't know who else, within or without our loop, uses this */
5555 		    if (dump_file)
5556 		      fprintf (dump_file, "... but there are multiple users, too risky.\n");
5557 		    break;
5558 		  }
5559 
5560 		/* Generate a label to be used by the erepat insn.  */
5561 		l = gen_label_rtx ();
5562 
5563 		/* Insert the erepeat after INSN's target label.  */
5564 		x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5565 		LABEL_NUSES (l)++;
5566 		emit_insn_after (x, prev);
5567 
5568 		/* Insert the erepeat label.  */
5569 		newlast = (mep_insert_repeat_label_last
5570 			   (insn, l, !simplejump_p (insn), false));
5571 		if (simplejump_p (insn))
5572 		  {
5573 		    emit_insn_before (gen_erepeat_end (), insn);
5574 		    delete_insn (insn);
5575 		  }
5576 		else
5577 		  {
5578 		    mep_invert_branch (insn, newlast);
5579 		    emit_insn_after (gen_erepeat_end (), newlast);
5580 		  }
5581 		break;
5582 	      }
5583 
5584 	    if (LABEL_P (prev))
5585 	      {
5586 		/* A label is OK if there is exactly one user, and we
5587 		   can find that user before the next label.  */
5588 		rtx user = 0;
5589 		int safe = 0;
5590 		if (LABEL_NUSES (prev) == 1)
5591 		  {
5592 		    for (user = PREV_INSN (prev);
5593 			 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5594 			 user = PREV_INSN (user))
5595 		      if (GET_CODE (user) == JUMP_INSN
5596 			  && JUMP_LABEL (user) == prev)
5597 			{
5598 			  safe = INSN_UID (user);
5599 			  break;
5600 			}
5601 		  }
5602 		if (!safe)
5603 		  break;
5604 		if (dump_file)
5605 		  fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5606 			   safe, INSN_UID (prev));
5607 	      }
5608 
5609 	    if (INSN_P (prev))
5610 	      {
5611 		count ++;
5612 	      }
5613 	  }
5614       }
5615   if (dump_file)
5616     fprintf (dump_file, "\n==============================\n");
5617 }
5618 
5619 /* Replace a jump to a return, with a copy of the return.  GCC doesn't
5620    always do this on its own.  */
5621 
5622 static void
mep_jmp_return_reorg(rtx insns)5623 mep_jmp_return_reorg (rtx insns)
5624 {
5625   rtx insn, label, ret;
5626   int ret_code;
5627 
5628   for (insn = insns; insn; insn = NEXT_INSN (insn))
5629     if (simplejump_p (insn))
5630     {
5631       /* Find the fist real insn the jump jumps to.  */
5632       label = ret = JUMP_LABEL (insn);
5633       while (ret
5634 	     && (GET_CODE (ret) == NOTE
5635 		 || GET_CODE (ret) == CODE_LABEL
5636 		 || GET_CODE (PATTERN (ret)) == USE))
5637 	ret = NEXT_INSN (ret);
5638 
5639       if (ret)
5640 	{
5641 	  /* Is it a return?  */
5642 	  ret_code = recog_memoized (ret);
5643 	  if (ret_code == CODE_FOR_return_internal
5644 	      || ret_code == CODE_FOR_eh_return_internal)
5645 	    {
5646 	      /* It is.  Replace the jump with a return.  */
5647 	      LABEL_NUSES (label) --;
5648 	      if (LABEL_NUSES (label) == 0)
5649 		delete_insn (label);
5650 	      PATTERN (insn) = copy_rtx (PATTERN (ret));
5651 	      INSN_CODE (insn) = -1;
5652 	    }
5653 	}
5654     }
5655 }
5656 
5657 
5658 static void
mep_reorg_addcombine(rtx insns)5659 mep_reorg_addcombine (rtx insns)
5660 {
5661   rtx i, n;
5662 
5663   for (i = insns; i; i = NEXT_INSN (i))
5664     if (INSN_P (i)
5665 	&& INSN_CODE (i) == CODE_FOR_addsi3
5666 	&& GET_CODE (SET_DEST (PATTERN (i))) == REG
5667 	&& GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5668 	&& REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5669 	&& GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5670       {
5671 	n = NEXT_INSN (i);
5672 	if (INSN_P (n)
5673 	    && INSN_CODE (n) == CODE_FOR_addsi3
5674 	    && GET_CODE (SET_DEST (PATTERN (n))) == REG
5675 	    && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5676 	    && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5677 	    && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5678 	  {
5679 	    int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5680 	    int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5681 	    if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5682 		&& ic + nc < 32767
5683 		&& ic + nc > -32768)
5684 	      {
5685 		XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5686 		NEXT_INSN (i) = NEXT_INSN (n);
5687 		if (NEXT_INSN (i))
5688 		  PREV_INSN (NEXT_INSN (i)) = i;
5689 	      }
5690 	  }
5691       }
5692 }
5693 
5694 /* If this insn adjusts the stack, return the adjustment, else return
5695    zero.  */
5696 static int
add_sp_insn_p(rtx insn)5697 add_sp_insn_p (rtx insn)
5698 {
5699   rtx pat;
5700 
5701   if (! single_set (insn))
5702     return 0;
5703   pat = PATTERN (insn);
5704   if (GET_CODE (SET_DEST (pat)) != REG)
5705     return 0;
5706   if (REGNO (SET_DEST (pat)) != SP_REGNO)
5707     return 0;
5708   if (GET_CODE (SET_SRC (pat)) != PLUS)
5709     return 0;
5710   if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5711     return 0;
5712   if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5713     return 0;
5714   if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5715     return 0;
5716   return INTVAL (XEXP (SET_SRC (pat), 1));
5717 }
5718 
5719 /* Check for trivial functions that set up an unneeded stack
5720    frame.  */
5721 static void
mep_reorg_noframe(rtx insns)5722 mep_reorg_noframe (rtx insns)
5723 {
5724   rtx start_frame_insn;
5725   rtx end_frame_insn = 0;
5726   int sp_adjust, sp2;
5727   rtx sp;
5728 
5729   /* The first insn should be $sp = $sp + N */
5730   while (insns && ! INSN_P (insns))
5731     insns = NEXT_INSN (insns);
5732   if (!insns)
5733     return;
5734 
5735   sp_adjust = add_sp_insn_p (insns);
5736   if (sp_adjust == 0)
5737     return;
5738 
5739   start_frame_insn = insns;
5740   sp = SET_DEST (PATTERN (start_frame_insn));
5741 
5742   insns = next_real_insn (insns);
5743 
5744   while (insns)
5745     {
5746       rtx next = next_real_insn (insns);
5747       if (!next)
5748 	break;
5749 
5750       sp2 = add_sp_insn_p (insns);
5751       if (sp2)
5752 	{
5753 	  if (end_frame_insn)
5754 	    return;
5755 	  end_frame_insn = insns;
5756 	  if (sp2 != -sp_adjust)
5757 	    return;
5758 	}
5759       else if (mep_mentioned_p (insns, sp, 0))
5760 	return;
5761       else if (CALL_P (insns))
5762 	return;
5763 
5764       insns = next;
5765     }
5766 
5767   if (end_frame_insn)
5768     {
5769       delete_insn (start_frame_insn);
5770       delete_insn (end_frame_insn);
5771     }
5772 }
5773 
5774 static void
mep_reorg(void)5775 mep_reorg (void)
5776 {
5777   rtx insns = get_insns ();
5778 
5779   /* We require accurate REG_DEAD notes.  */
5780   compute_bb_for_insn ();
5781   df_note_add_problem ();
5782   df_analyze ();
5783 
5784   mep_reorg_addcombine (insns);
5785 #if EXPERIMENTAL_REGMOVE_REORG
5786   /* VLIW packing has been done already, so we can't just delete things.  */
5787   if (!mep_vliw_function_p (cfun->decl))
5788     mep_reorg_regmove (insns);
5789 #endif
5790   mep_jmp_return_reorg (insns);
5791   mep_bundle_insns (insns);
5792   mep_reorg_repeat (insns);
5793   if (optimize
5794       && !profile_flag
5795       && !profile_arc_flag
5796       && TARGET_OPT_REPEAT
5797       && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5798     mep_reorg_erepeat (insns);
5799 
5800   /* This may delete *insns so make sure it's last.  */
5801   mep_reorg_noframe (insns);
5802 
5803   df_finish_pass (false);
5804 }
5805 
5806 
5807 
5808 /*----------------------------------------------------------------------*/
5809 /* Builtins								*/
5810 /*----------------------------------------------------------------------*/
5811 
5812 /* Element X gives the index into cgen_insns[] of the most general
5813    implementation of intrinsic X.  Unimplemented intrinsics are
5814    mapped to -1.  */
5815 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5816 
5817 /* Element X gives the index of another instruction that is mapped to
5818    the same intrinsic as cgen_insns[X].  It is -1 when there is no other
5819    instruction.
5820 
5821    Things are set up so that mep_intrinsic_chain[X] < X.  */
5822 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5823 
5824 /* The bitmask for the current ISA.  The ISA masks are declared
5825    in mep-intrin.h.  */
5826 unsigned int mep_selected_isa;
5827 
5828 struct mep_config {
5829   const char *config_name;
5830   unsigned int isa;
5831 };
5832 
5833 static struct mep_config mep_configs[] = {
5834 #ifdef COPROC_SELECTION_TABLE
5835   COPROC_SELECTION_TABLE,
5836 #endif
5837   { 0, 0 }
5838 };
5839 
5840 /* Initialize the global intrinsics variables above.  */
5841 
5842 static void
mep_init_intrinsics(void)5843 mep_init_intrinsics (void)
5844 {
5845   size_t i;
5846 
5847   /* Set MEP_SELECTED_ISA to the ISA flag for this configuration.  */
5848   mep_selected_isa = mep_configs[0].isa;
5849   if (mep_config_string != 0)
5850     for (i = 0; mep_configs[i].config_name; i++)
5851       if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5852 	{
5853 	  mep_selected_isa = mep_configs[i].isa;
5854 	  break;
5855 	}
5856 
5857   /* Assume all intrinsics are unavailable.  */
5858   for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5859     mep_intrinsic_insn[i] = -1;
5860 
5861   /* Build up the global intrinsic tables.  */
5862   for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5863     if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5864       {
5865 	mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5866 	mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5867       }
5868   /* See whether we can directly move values between one coprocessor
5869      register and another.  */
5870   for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5871     if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5872       mep_have_copro_copro_moves_p = true;
5873 
5874   /* See whether we can directly move values between core and
5875      coprocessor registers.  */
5876   mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5877                                  && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5878 
5879   mep_have_core_copro_moves_p = 1;
5880 }
5881 
5882 /* Declare all available intrinsic functions.  Called once only.  */
5883 
5884 static tree cp_data_bus_int_type_node;
5885 static tree opaque_vector_type_node;
5886 static tree v8qi_type_node;
5887 static tree v4hi_type_node;
5888 static tree v2si_type_node;
5889 static tree v8uqi_type_node;
5890 static tree v4uhi_type_node;
5891 static tree v2usi_type_node;
5892 
5893 static tree
mep_cgen_regnum_to_type(enum cgen_regnum_operand_type cr)5894 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5895 {
5896   switch (cr)
5897     {
5898     case cgen_regnum_operand_type_POINTER:	return ptr_type_node;
5899     case cgen_regnum_operand_type_LONG:		return long_integer_type_node;
5900     case cgen_regnum_operand_type_ULONG:	return long_unsigned_type_node;
5901     case cgen_regnum_operand_type_SHORT:	return short_integer_type_node;
5902     case cgen_regnum_operand_type_USHORT:	return short_unsigned_type_node;
5903     case cgen_regnum_operand_type_CHAR:		return char_type_node;
5904     case cgen_regnum_operand_type_UCHAR:	return unsigned_char_type_node;
5905     case cgen_regnum_operand_type_SI:		return intSI_type_node;
5906     case cgen_regnum_operand_type_DI:		return intDI_type_node;
5907     case cgen_regnum_operand_type_VECTOR:	return opaque_vector_type_node;
5908     case cgen_regnum_operand_type_V8QI:		return v8qi_type_node;
5909     case cgen_regnum_operand_type_V4HI:		return v4hi_type_node;
5910     case cgen_regnum_operand_type_V2SI:		return v2si_type_node;
5911     case cgen_regnum_operand_type_V8UQI:	return v8uqi_type_node;
5912     case cgen_regnum_operand_type_V4UHI:	return v4uhi_type_node;
5913     case cgen_regnum_operand_type_V2USI:	return v2usi_type_node;
5914     case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5915     default:
5916       return void_type_node;
5917     }
5918 }
5919 
5920 static void
mep_init_builtins(void)5921 mep_init_builtins (void)
5922 {
5923   size_t i;
5924 
5925   if (TARGET_64BIT_CR_REGS)
5926     cp_data_bus_int_type_node = long_long_integer_type_node;
5927   else
5928     cp_data_bus_int_type_node = long_integer_type_node;
5929 
5930   opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5931   v8qi_type_node = build_vector_type (intQI_type_node, 8);
5932   v4hi_type_node = build_vector_type (intHI_type_node, 4);
5933   v2si_type_node = build_vector_type (intSI_type_node, 2);
5934   v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
5935   v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
5936   v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
5937 
5938   add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node);
5939 
5940   add_builtin_type ("cp_vector", opaque_vector_type_node);
5941 
5942   add_builtin_type ("cp_v8qi", v8qi_type_node);
5943   add_builtin_type ("cp_v4hi", v4hi_type_node);
5944   add_builtin_type ("cp_v2si", v2si_type_node);
5945 
5946   add_builtin_type ("cp_v8uqi", v8uqi_type_node);
5947   add_builtin_type ("cp_v4uhi", v4uhi_type_node);
5948   add_builtin_type ("cp_v2usi", v2usi_type_node);
5949 
5950   /* Intrinsics like mep_cadd3 are implemented with two groups of
5951      instructions, one which uses UNSPECs and one which uses a specific
5952      rtl code such as PLUS.  Instructions in the latter group belong
5953      to GROUP_KNOWN_CODE.
5954 
5955      In such cases, the intrinsic will have two entries in the global
5956      tables above.  The unspec form is accessed using builtin functions
5957      while the specific form is accessed using the mep_* enum in
5958      mep-intrin.h.
5959 
5960      The idea is that __cop arithmetic and builtin functions have
5961      different optimization requirements.  If mep_cadd3() appears in
5962      the source code, the user will surely except gcc to use cadd3
5963      rather than a work-alike such as add3.  However, if the user
5964      just writes "a + b", where a or b are __cop variables, it is
5965      reasonable for gcc to choose a core instruction rather than
5966      cadd3 if it believes that is more optimal.  */
5967   for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5968     if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
5969 	&& mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
5970       {
5971 	tree ret_type = void_type_node;
5972 	tree bi_type;
5973 
5974 	if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
5975 	  continue;
5976 
5977 	if (cgen_insns[i].cret_p)
5978 	  ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
5979 
5980 	bi_type = build_function_type_list (ret_type, NULL_TREE);
5981 	add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
5982 			      bi_type,
5983 			      cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
5984       }
5985 }
5986 
5987 /* Report the unavailablity of the given intrinsic.  */
5988 
5989 #if 1
5990 static void
mep_intrinsic_unavailable(int intrinsic)5991 mep_intrinsic_unavailable (int intrinsic)
5992 {
5993   static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
5994 
5995   if (already_reported_p[intrinsic])
5996     return;
5997 
5998   if (mep_intrinsic_insn[intrinsic] < 0)
5999     error ("coprocessor intrinsic %qs is not available in this configuration",
6000 	   cgen_intrinsics[intrinsic]);
6001   else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6002     error ("%qs is not available in VLIW functions",
6003 	   cgen_intrinsics[intrinsic]);
6004   else
6005     error ("%qs is not available in non-VLIW functions",
6006 	   cgen_intrinsics[intrinsic]);
6007 
6008   already_reported_p[intrinsic] = 1;
6009 }
6010 #endif
6011 
6012 
6013 /* See if any implementation of INTRINSIC is available to the
6014    current function.  If so, store the most general implementation
6015    in *INSN_PTR and return true.  Return false otherwise.  */
6016 
6017 static bool
mep_get_intrinsic_insn(int intrinsic ATTRIBUTE_UNUSED,const struct cgen_insn ** insn_ptr ATTRIBUTE_UNUSED)6018 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6019 {
6020   int i;
6021 
6022   i = mep_intrinsic_insn[intrinsic];
6023   while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6024     i = mep_intrinsic_chain[i];
6025 
6026   if (i >= 0)
6027     {
6028       *insn_ptr = &cgen_insns[i];
6029       return true;
6030     }
6031   return false;
6032 }
6033 
6034 
6035 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6036    If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6037    try using a work-alike instead.  In this case, the returned insn
6038    may have three operands rather than two.  */
6039 
6040 static bool
mep_get_move_insn(int intrinsic,const struct cgen_insn ** cgen_insn)6041 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6042 {
6043   size_t i;
6044 
6045   if (intrinsic == mep_cmov)
6046     {
6047       for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6048 	if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6049 	  return true;
6050       return false;
6051     }
6052   return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6053 }
6054 
6055 
6056 /* If ARG is a register operand that is the same size as MODE, convert it
6057    to MODE using a subreg.  Otherwise return ARG as-is.  */
6058 
6059 static rtx
mep_convert_arg(enum machine_mode mode,rtx arg)6060 mep_convert_arg (enum machine_mode mode, rtx arg)
6061 {
6062   if (GET_MODE (arg) != mode
6063       && register_operand (arg, VOIDmode)
6064       && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6065     return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6066   return arg;
6067 }
6068 
6069 
6070 /* Apply regnum conversions to ARG using the description given by REGNUM.
6071    Return the new argument on success and null on failure.  */
6072 
6073 static rtx
mep_convert_regnum(const struct cgen_regnum_operand * regnum,rtx arg)6074 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6075 {
6076   if (regnum->count == 0)
6077     return arg;
6078 
6079   if (GET_CODE (arg) != CONST_INT
6080       || INTVAL (arg) < 0
6081       || INTVAL (arg) >= regnum->count)
6082     return 0;
6083 
6084   return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6085 }
6086 
6087 
6088 /* Try to make intrinsic argument ARG match the given operand.
6089    UNSIGNED_P is true if the argument has an unsigned type.  */
6090 
6091 static rtx
mep_legitimize_arg(const struct insn_operand_data * operand,rtx arg,int unsigned_p)6092 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6093 		    int unsigned_p)
6094 {
6095   if (GET_CODE (arg) == CONST_INT)
6096     {
6097       /* CONST_INTs can only be bound to integer operands.  */
6098       if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6099 	return 0;
6100     }
6101   else if (GET_CODE (arg) == CONST_DOUBLE)
6102     /* These hold vector constants.  */;
6103   else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6104     {
6105       /* If the argument is a different size from what's expected, we must
6106 	 have a value in the right mode class in order to convert it.  */
6107       if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6108 	return 0;
6109 
6110       /* If the operand is an rvalue, promote or demote it to match the
6111 	 operand's size.  This might not need extra instructions when
6112 	 ARG is a register value.  */
6113       if (operand->constraint[0] != '=')
6114 	arg = convert_to_mode (operand->mode, arg, unsigned_p);
6115     }
6116 
6117   /* If the operand is an lvalue, bind the operand to a new register.
6118      The caller will copy this value into ARG after the main
6119      instruction.  By doing this always, we produce slightly more
6120      optimal code.  */
6121   /* But not for control registers.  */
6122   if (operand->constraint[0] == '='
6123       && (! REG_P (arg)
6124 	  || ! (CONTROL_REGNO_P (REGNO (arg))
6125 		|| CCR_REGNO_P (REGNO (arg))
6126 		|| CR_REGNO_P (REGNO (arg)))
6127 	  ))
6128     return gen_reg_rtx (operand->mode);
6129 
6130   /* Try simple mode punning.  */
6131   arg = mep_convert_arg (operand->mode, arg);
6132   if (operand->predicate (arg, operand->mode))
6133     return arg;
6134 
6135   /* See if forcing the argument into a register will make it match.  */
6136   if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6137     arg = force_reg (operand->mode, arg);
6138   else
6139     arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6140   if (operand->predicate (arg, operand->mode))
6141     return arg;
6142 
6143   return 0;
6144 }
6145 
6146 
6147 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6148    function FNNAME.  OPERAND describes the operand to which ARGNUM
6149    is mapped.  */
6150 
6151 static void
mep_incompatible_arg(const struct insn_operand_data * operand,rtx arg,int argnum,tree fnname)6152 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6153 		      int argnum, tree fnname)
6154 {
6155   size_t i;
6156 
6157   if (GET_CODE (arg) == CONST_INT)
6158     for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6159       if (operand->predicate == cgen_immediate_predicates[i].predicate)
6160 	{
6161 	  const struct cgen_immediate_predicate *predicate;
6162 	  HOST_WIDE_INT argval;
6163 
6164 	  predicate = &cgen_immediate_predicates[i];
6165 	  argval = INTVAL (arg);
6166 	  if (argval < predicate->lower || argval >= predicate->upper)
6167 	    error ("argument %d of %qE must be in the range %d...%d",
6168 		   argnum, fnname, predicate->lower, predicate->upper - 1);
6169 	  else
6170 	    error ("argument %d of %qE must be a multiple of %d",
6171 		   argnum, fnname, predicate->align);
6172 	  return;
6173 	}
6174 
6175   error ("incompatible type for argument %d of %qE", argnum, fnname);
6176 }
6177 
6178 static rtx
mep_expand_builtin(tree exp,rtx target ATTRIBUTE_UNUSED,rtx subtarget ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)6179 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6180 		    rtx subtarget ATTRIBUTE_UNUSED,
6181 		    enum machine_mode mode ATTRIBUTE_UNUSED,
6182 		    int ignore ATTRIBUTE_UNUSED)
6183 {
6184   rtx pat, op[10], arg[10];
6185   unsigned int a;
6186   int opindex, unsigned_p[10];
6187   tree fndecl, args;
6188   unsigned int n_args;
6189   tree fnname;
6190   const struct cgen_insn *cgen_insn;
6191   const struct insn_data_d *idata;
6192   unsigned int first_arg = 0;
6193   unsigned int builtin_n_args;
6194 
6195   fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6196   fnname = DECL_NAME (fndecl);
6197 
6198   /* Find out which instruction we should emit.  Note that some coprocessor
6199      intrinsics may only be available in VLIW mode, or only in normal mode.  */
6200   if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6201     {
6202       mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6203       return NULL_RTX;
6204     }
6205   idata = &insn_data[cgen_insn->icode];
6206 
6207   builtin_n_args = cgen_insn->num_args;
6208 
6209   if (cgen_insn->cret_p)
6210     {
6211       if (cgen_insn->cret_p > 1)
6212 	builtin_n_args ++;
6213       first_arg = 1;
6214       mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6215       builtin_n_args --;
6216     }
6217 
6218   /* Evaluate each argument.  */
6219   n_args = call_expr_nargs (exp);
6220 
6221   if (n_args < builtin_n_args)
6222     {
6223       error ("too few arguments to %qE", fnname);
6224       return NULL_RTX;
6225     }
6226   if (n_args > builtin_n_args)
6227     {
6228       error ("too many arguments to %qE", fnname);
6229       return NULL_RTX;
6230     }
6231 
6232   for (a = first_arg; a < builtin_n_args + first_arg; a++)
6233     {
6234       tree value;
6235 
6236       args = CALL_EXPR_ARG (exp, a - first_arg);
6237 
6238       value = args;
6239 
6240 #if 0
6241       if (cgen_insn->regnums[a].reference_p)
6242 	{
6243 	  if (TREE_CODE (value) != ADDR_EXPR)
6244 	    {
6245 	      debug_tree(value);
6246 	      error ("argument %d of %qE must be an address", a+1, fnname);
6247 	      return NULL_RTX;
6248 	    }
6249 	  value = TREE_OPERAND (value, 0);
6250 	}
6251 #endif
6252 
6253       /* If the argument has been promoted to int, get the unpromoted
6254 	 value.  This is necessary when sub-int memory values are bound
6255 	 to reference parameters.  */
6256       if (TREE_CODE (value) == NOP_EXPR
6257 	  && TREE_TYPE (value) == integer_type_node
6258 	  && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6259 	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6260 	      < TYPE_PRECISION (TREE_TYPE (value))))
6261 	value = TREE_OPERAND (value, 0);
6262 
6263       /* If the argument has been promoted to double, get the unpromoted
6264 	 SFmode value.  This is necessary for FMAX support, for example.  */
6265       if (TREE_CODE (value) == NOP_EXPR
6266 	  && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6267 	  && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6268 	  && TYPE_MODE (TREE_TYPE (value)) == DFmode
6269 	  && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6270 	value = TREE_OPERAND (value, 0);
6271 
6272       unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6273       arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6274       arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6275       if (cgen_insn->regnums[a].reference_p)
6276 	{
6277 	  tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6278 	  enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6279 
6280 	  arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6281 	}
6282       if (arg[a] == 0)
6283 	{
6284 	  error ("argument %d of %qE must be in the range %d...%d",
6285 		 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6286 	  return NULL_RTX;
6287 	}
6288     }
6289 
6290   for (a = 0; a < first_arg; a++)
6291     {
6292       if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6293 	arg[a] = target;
6294       else
6295 	arg[a] = gen_reg_rtx (idata->operand[0].mode);
6296     }
6297 
6298   /* Convert the arguments into a form suitable for the intrinsic.
6299      Report an error if this isn't possible.  */
6300   for (opindex = 0; opindex < idata->n_operands; opindex++)
6301     {
6302       a = cgen_insn->op_mapping[opindex];
6303       op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6304 					arg[a], unsigned_p[a]);
6305       if (op[opindex] == 0)
6306 	{
6307 	  mep_incompatible_arg (&idata->operand[opindex],
6308 				arg[a], a + 1 - first_arg, fnname);
6309 	  return NULL_RTX;
6310 	}
6311     }
6312 
6313   /* Emit the instruction.  */
6314   pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6315 		       op[5], op[6], op[7], op[8], op[9]);
6316 
6317   if (GET_CODE (pat) == SET
6318       && GET_CODE (SET_DEST (pat)) == PC
6319       && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6320     emit_jump_insn (pat);
6321   else
6322     emit_insn (pat);
6323 
6324   /* Copy lvalues back to their final locations.  */
6325   for (opindex = 0; opindex < idata->n_operands; opindex++)
6326     if (idata->operand[opindex].constraint[0] == '=')
6327       {
6328 	a = cgen_insn->op_mapping[opindex];
6329 	if (a >= first_arg)
6330 	  {
6331 	    if (GET_MODE_CLASS (GET_MODE (arg[a]))
6332 		!= GET_MODE_CLASS (GET_MODE (op[opindex])))
6333 	      emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6334 						   op[opindex]));
6335 	    else
6336 	      {
6337 		/* First convert the operand to the right mode, then copy it
6338 		   into the destination.  Doing the conversion as a separate
6339 		   step (rather than using convert_move) means that we can
6340 		   avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6341 		   refer to the same register.  */
6342 		op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6343 					       op[opindex], unsigned_p[a]);
6344 		if (!rtx_equal_p (arg[a], op[opindex]))
6345 		  emit_move_insn (arg[a], op[opindex]);
6346 	      }
6347 	  }
6348       }
6349 
6350   if (first_arg > 0 && target && target != op[0])
6351     {
6352       emit_move_insn (target, op[0]);
6353     }
6354 
6355   return target;
6356 }
6357 
6358 static bool
mep_vector_mode_supported_p(enum machine_mode mode ATTRIBUTE_UNUSED)6359 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6360 {
6361   return false;
6362 }
6363 
6364 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6365    a global register.  */
6366 
6367 static int
global_reg_mentioned_p_1(rtx * loc,void * data ATTRIBUTE_UNUSED)6368 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6369 {
6370   int regno;
6371   rtx x = *loc;
6372 
6373   if (! x)
6374     return 0;
6375 
6376   switch (GET_CODE (x))
6377     {
6378     case SUBREG:
6379       if (REG_P (SUBREG_REG (x)))
6380 	{
6381 	  if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6382 	      && global_regs[subreg_regno (x)])
6383 	    return 1;
6384 	  return 0;
6385 	}
6386       break;
6387 
6388     case REG:
6389       regno = REGNO (x);
6390       if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6391 	return 1;
6392       return 0;
6393 
6394     case SCRATCH:
6395     case PC:
6396     case CC0:
6397     case CONST_INT:
6398     case CONST_DOUBLE:
6399     case CONST:
6400     case LABEL_REF:
6401       return 0;
6402 
6403     case CALL:
6404       /* A non-constant call might use a global register.  */
6405       return 1;
6406 
6407     default:
6408       break;
6409     }
6410 
6411   return 0;
6412 }
6413 
6414 /* Returns nonzero if X mentions a global register.  */
6415 
6416 static int
global_reg_mentioned_p(rtx x)6417 global_reg_mentioned_p (rtx x)
6418 {
6419   if (INSN_P (x))
6420     {
6421       if (CALL_P (x))
6422 	{
6423 	  if (! RTL_CONST_OR_PURE_CALL_P (x))
6424 	    return 1;
6425 	  x = CALL_INSN_FUNCTION_USAGE (x);
6426 	  if (x == 0)
6427 	    return 0;
6428 	}
6429       else
6430 	x = PATTERN (x);
6431     }
6432 
6433   return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6434 }
6435 /* Scheduling hooks for VLIW mode.
6436 
6437    Conceptually this is very simple: we have a two-pack architecture
6438    that takes one core insn and one coprocessor insn to make up either
6439    a 32- or 64-bit instruction word (depending on the option bit set in
6440    the chip).  I.e. in VL32 mode, we can pack one 16-bit core insn and
6441    one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6442    and one 48-bit cop insn or two 32-bit core/cop insns.
6443 
6444    In practice, instruction selection will be a bear.  Consider in
6445    VL64 mode the following insns
6446 
6447 	add $1, 1
6448 	cmov $cr0, $0
6449 
6450    these cannot pack, since the add is a 16-bit core insn and cmov
6451    is a 32-bit cop insn.  However,
6452 
6453 	add3 $1, $1, 1
6454 	cmov $cr0, $0
6455 
6456    packs just fine.  For good VLIW code generation in VL64 mode, we
6457    will have to have 32-bit alternatives for many of the common core
6458    insns.  Not implemented.  */
6459 
6460 static int
mep_adjust_cost(rtx insn,rtx link,rtx dep_insn,int cost)6461 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6462 {
6463   int cost_specified;
6464 
6465   if (REG_NOTE_KIND (link) != 0)
6466     {
6467       /* See whether INSN and DEP_INSN are intrinsics that set the same
6468 	 hard register.  If so, it is more important to free up DEP_INSN
6469 	 than it is to free up INSN.
6470 
6471 	 Note that intrinsics like mep_mulr are handled differently from
6472 	 the equivalent mep.md patterns.  In mep.md, if we don't care
6473 	 about the value of $lo and $hi, the pattern will just clobber
6474 	 the registers, not set them.  Since clobbers don't count as
6475 	 output dependencies, it is often possible to reorder two mulrs,
6476 	 even after reload.
6477 
6478 	 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6479 	 so any pair of mep_mulr()s will be inter-dependent.   We should
6480 	 therefore give the first mep_mulr() a higher priority.  */
6481       if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6482 	  && global_reg_mentioned_p (PATTERN (insn))
6483 	  && global_reg_mentioned_p (PATTERN (dep_insn)))
6484 	return 1;
6485 
6486       /* If the dependence is an anti or output dependence, assume it
6487 	 has no cost.  */
6488       return 0;
6489     }
6490 
6491   /* If we can't recognize the insns, we can't really do anything.  */
6492   if (recog_memoized (dep_insn) < 0)
6493     return cost;
6494 
6495   /* The latency attribute doesn't apply to MeP-h1: we use the stall
6496      attribute instead.  */
6497   if (!TARGET_H1)
6498     {
6499       cost_specified = get_attr_latency (dep_insn);
6500       if (cost_specified != 0)
6501 	return cost_specified;
6502     }
6503 
6504   return cost;
6505 }
6506 
6507 /* ??? We don't properly compute the length of a load/store insn,
6508    taking into account the addressing mode.  */
6509 
6510 static int
mep_issue_rate(void)6511 mep_issue_rate (void)
6512 {
6513   return TARGET_IVC2 ? 3 : 2;
6514 }
6515 
6516 /* Return true if function DECL was declared with the vliw attribute.  */
6517 
6518 bool
mep_vliw_function_p(tree decl)6519 mep_vliw_function_p (tree decl)
6520 {
6521   return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6522 }
6523 
6524 static rtx
mep_find_ready_insn(rtx * ready,int nready,enum attr_slot slot,int length)6525 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6526 {
6527   int i;
6528 
6529   for (i = nready - 1; i >= 0; --i)
6530     {
6531       rtx insn = ready[i];
6532       if (recog_memoized (insn) >= 0
6533 	  && get_attr_slot (insn) == slot
6534 	  && get_attr_length (insn) == length)
6535 	return insn;
6536     }
6537 
6538   return NULL_RTX;
6539 }
6540 
6541 static void
mep_move_ready_insn(rtx * ready,int nready,rtx insn)6542 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6543 {
6544   int i;
6545 
6546   for (i = 0; i < nready; ++i)
6547     if (ready[i] == insn)
6548       {
6549 	for (; i < nready - 1; ++i)
6550 	  ready[i] = ready[i + 1];
6551 	ready[i] = insn;
6552 	return;
6553       }
6554 
6555   gcc_unreachable ();
6556 }
6557 
6558 static void
mep_print_sched_insn(FILE * dump,rtx insn)6559 mep_print_sched_insn (FILE *dump, rtx insn)
6560 {
6561   const char *slots = "none";
6562   const char *name = NULL;
6563   int code;
6564   char buf[30];
6565 
6566   if (GET_CODE (PATTERN (insn)) == SET
6567       || GET_CODE (PATTERN (insn)) == PARALLEL)
6568     {
6569       switch (get_attr_slots (insn))
6570 	{
6571 	case SLOTS_CORE: slots = "core"; break;
6572 	case SLOTS_C3: slots = "c3"; break;
6573 	case SLOTS_P0: slots = "p0"; break;
6574 	case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6575 	case SLOTS_P0_P1: slots = "p0,p1"; break;
6576 	case SLOTS_P0S: slots = "p0s"; break;
6577 	case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6578 	case SLOTS_P1: slots = "p1"; break;
6579 	default:
6580 	  sprintf(buf, "%d", get_attr_slots (insn));
6581 	  slots = buf;
6582 	  break;
6583 	}
6584     }
6585   if (GET_CODE (PATTERN (insn)) == USE)
6586     slots = "use";
6587 
6588   code = INSN_CODE (insn);
6589   if (code >= 0)
6590     name = get_insn_name (code);
6591   if (!name)
6592     name = "{unknown}";
6593 
6594   fprintf (dump,
6595 	   "insn %4d %4d  %8s  %s\n",
6596 	   code,
6597 	   INSN_UID (insn),
6598 	   name,
6599 	   slots);
6600 }
6601 
6602 static int
mep_sched_reorder(FILE * dump ATTRIBUTE_UNUSED,int sched_verbose ATTRIBUTE_UNUSED,rtx * ready,int * pnready,int clock ATTRIBUTE_UNUSED)6603 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6604 		   int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6605 		   int *pnready, int clock ATTRIBUTE_UNUSED)
6606 {
6607   int nready = *pnready;
6608   rtx core_insn, cop_insn;
6609   int i;
6610 
6611   if (dump && sched_verbose > 1)
6612     {
6613       fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6614       for (i=0; i<nready; i++)
6615 	mep_print_sched_insn (dump, ready[i]);
6616       fprintf (dump, "\n");
6617     }
6618 
6619   if (!mep_vliw_function_p (cfun->decl))
6620     return 1;
6621   if (nready < 2)
6622     return 1;
6623 
6624   /* IVC2 uses a DFA to determine what's ready and what's not. */
6625   if (TARGET_IVC2)
6626     return nready;
6627 
6628   /* We can issue either a core or coprocessor instruction.
6629      Look for a matched pair of insns to reorder.  If we don't
6630      find any, don't second-guess the scheduler's priorities.  */
6631 
6632   if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6633       && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6634 					  TARGET_OPT_VL64 ? 6 : 2)))
6635     ;
6636   else if (TARGET_OPT_VL64
6637 	   && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6638 	   && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6639     ;
6640   else
6641     /* We didn't find a pair.  Issue the single insn at the head
6642        of the ready list.  */
6643     return 1;
6644 
6645   /* Reorder the two insns first.  */
6646   mep_move_ready_insn (ready, nready, core_insn);
6647   mep_move_ready_insn (ready, nready - 1, cop_insn);
6648   return 2;
6649 }
6650 
6651 /* A for_each_rtx callback.  Return true if *X is a register that is
6652    set by insn PREV.  */
6653 
6654 static int
mep_store_find_set(rtx * x,void * prev)6655 mep_store_find_set (rtx *x, void *prev)
6656 {
6657   return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6658 }
6659 
6660 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6661    not the containing insn.  */
6662 
6663 static bool
mep_store_data_bypass_1(rtx prev,rtx pat)6664 mep_store_data_bypass_1 (rtx prev, rtx pat)
6665 {
6666   /* Cope with intrinsics like swcpa.  */
6667   if (GET_CODE (pat) == PARALLEL)
6668     {
6669       int i;
6670 
6671       for (i = 0; i < XVECLEN (pat, 0); i++)
6672 	if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6673 	  return true;
6674 
6675       return false;
6676     }
6677 
6678   /* Check for some sort of store.  */
6679   if (GET_CODE (pat) != SET
6680       || GET_CODE (SET_DEST (pat)) != MEM)
6681     return false;
6682 
6683   /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6684      The first operand to the unspec is the store data and the other operands
6685      are used to calculate the address.  */
6686   if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6687     {
6688       rtx src;
6689       int i;
6690 
6691       src = SET_SRC (pat);
6692       for (i = 1; i < XVECLEN (src, 0); i++)
6693 	if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6694 	  return false;
6695 
6696       return true;
6697     }
6698 
6699   /* Otherwise just check that PREV doesn't modify any register mentioned
6700      in the memory destination.  */
6701   return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6702 }
6703 
6704 /* Return true if INSN is a store instruction and if the store address
6705    has no true dependence on PREV.  */
6706 
6707 bool
mep_store_data_bypass_p(rtx prev,rtx insn)6708 mep_store_data_bypass_p (rtx prev, rtx insn)
6709 {
6710   return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6711 }
6712 
6713 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p.  Return 1 if *X
6714    is a register other than LO or HI and if PREV sets *X.  */
6715 
6716 static int
mep_mul_hilo_bypass_1(rtx * x,void * prev)6717 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6718 {
6719   return (REG_P (*x)
6720 	  && REGNO (*x) != LO_REGNO
6721 	  && REGNO (*x) != HI_REGNO
6722 	  && reg_set_p (*x, (const_rtx) prev));
6723 }
6724 
6725 /* Return true if, apart from HI/LO, there are no true dependencies
6726    between multiplication instructions PREV and INSN.  */
6727 
6728 bool
mep_mul_hilo_bypass_p(rtx prev,rtx insn)6729 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6730 {
6731   rtx pat;
6732 
6733   pat = PATTERN (insn);
6734   if (GET_CODE (pat) == PARALLEL)
6735     pat = XVECEXP (pat, 0, 0);
6736   return (GET_CODE (pat) == SET
6737 	  && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6738 }
6739 
6740 /* Return true if INSN is an ldc instruction that issues to the
6741    MeP-h1 integer pipeline.  This is true for instructions that
6742    read from PSW, LP, SAR, HI and LO.  */
6743 
6744 bool
mep_ipipe_ldc_p(rtx insn)6745 mep_ipipe_ldc_p (rtx insn)
6746 {
6747   rtx pat, src;
6748 
6749   pat = PATTERN (insn);
6750 
6751   /* Cope with instrinsics that set both a hard register and its shadow.
6752      The set of the hard register comes first.  */
6753   if (GET_CODE (pat) == PARALLEL)
6754     pat = XVECEXP (pat, 0, 0);
6755 
6756   if (GET_CODE (pat) == SET)
6757     {
6758       src = SET_SRC (pat);
6759 
6760       /* Cope with intrinsics.  The first operand to the unspec is
6761 	 the source register.  */
6762       if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6763 	src = XVECEXP (src, 0, 0);
6764 
6765       if (REG_P (src))
6766 	switch (REGNO (src))
6767 	  {
6768 	  case PSW_REGNO:
6769 	  case LP_REGNO:
6770 	  case SAR_REGNO:
6771 	  case HI_REGNO:
6772 	  case LO_REGNO:
6773 	    return true;
6774 	  }
6775     }
6776   return false;
6777 }
6778 
6779 /* Create a VLIW bundle from core instruction CORE and coprocessor
6780    instruction COP.  COP always satisfies INSN_P, but CORE can be
6781    either a new pattern or an existing instruction.
6782 
6783    Emit the bundle in place of COP and return it.  */
6784 
6785 static rtx
mep_make_bundle(rtx core,rtx cop)6786 mep_make_bundle (rtx core, rtx cop)
6787 {
6788   rtx insn;
6789 
6790   /* If CORE is an existing instruction, remove it, otherwise put
6791      the new pattern in an INSN harness.  */
6792   if (INSN_P (core))
6793     remove_insn (core);
6794   else
6795     core = make_insn_raw (core);
6796 
6797   /* Generate the bundle sequence and replace COP with it.  */
6798   insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6799   insn = emit_insn_after (insn, cop);
6800   remove_insn (cop);
6801 
6802   /* Set up the links of the insns inside the SEQUENCE.  */
6803   PREV_INSN (core) = PREV_INSN (insn);
6804   NEXT_INSN (core) = cop;
6805   PREV_INSN (cop) = core;
6806   NEXT_INSN (cop) = NEXT_INSN (insn);
6807 
6808   /* Set the VLIW flag for the coprocessor instruction.  */
6809   PUT_MODE (core, VOIDmode);
6810   PUT_MODE (cop, BImode);
6811 
6812   /* Derive a location for the bundle.  Individual instructions cannot
6813      have their own location because there can be no assembler labels
6814      between CORE and COP.  */
6815   INSN_LOCATION (insn) = INSN_LOCATION (INSN_LOCATION (core) ? core : cop);
6816   INSN_LOCATION (core) = 0;
6817   INSN_LOCATION (cop) = 0;
6818 
6819   return insn;
6820 }
6821 
6822 /* A helper routine for ms1_insn_dependent_p called through note_stores.  */
6823 
6824 static void
mep_insn_dependent_p_1(rtx x,const_rtx pat ATTRIBUTE_UNUSED,void * data)6825 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6826 {
6827   rtx * pinsn = (rtx *) data;
6828 
6829   if (*pinsn && reg_mentioned_p (x, *pinsn))
6830     *pinsn = NULL_RTX;
6831 }
6832 
6833 /* Return true if anything in insn X is (anti,output,true) dependent on
6834    anything in insn Y.  */
6835 
6836 static int
mep_insn_dependent_p(rtx x,rtx y)6837 mep_insn_dependent_p (rtx x, rtx y)
6838 {
6839   rtx tmp;
6840 
6841   gcc_assert (INSN_P (x));
6842   gcc_assert (INSN_P (y));
6843 
6844   tmp = PATTERN (y);
6845   note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6846   if (tmp == NULL_RTX)
6847     return 1;
6848 
6849   tmp = PATTERN (x);
6850   note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6851   if (tmp == NULL_RTX)
6852     return 1;
6853 
6854   return 0;
6855 }
6856 
6857 static int
core_insn_p(rtx insn)6858 core_insn_p (rtx insn)
6859 {
6860   if (GET_CODE (PATTERN (insn)) == USE)
6861     return 0;
6862   if (get_attr_slot (insn) == SLOT_CORE)
6863     return 1;
6864   return 0;
6865 }
6866 
6867 /* Mark coprocessor instructions that can be bundled together with
6868    the immediately preceding core instruction.  This is later used
6869    to emit the "+" that tells the assembler to create a VLIW insn.
6870 
6871    For unbundled insns, the assembler will automatically add coprocessor
6872    nops, and 16-bit core nops.  Due to an apparent oversight in the
6873    spec, the assembler will _not_ automatically add 32-bit core nops,
6874    so we have to emit those here.
6875 
6876    Called from mep_insn_reorg.  */
6877 
6878 static void
mep_bundle_insns(rtx insns)6879 mep_bundle_insns (rtx insns)
6880 {
6881   rtx insn, last = NULL_RTX, first = NULL_RTX;
6882   int saw_scheduling = 0;
6883 
6884   /* Only do bundling if we're in vliw mode.  */
6885   if (!mep_vliw_function_p (cfun->decl))
6886     return;
6887 
6888   /* The first insn in a bundle are TImode, the remainder are
6889      VOIDmode.  After this function, the first has VOIDmode and the
6890      rest have BImode.  */
6891 
6892   /* Note: this doesn't appear to be true for JUMP_INSNs.  */
6893 
6894   /* First, move any NOTEs that are within a bundle, to the beginning
6895      of the bundle.  */
6896   for (insn = insns; insn ; insn = NEXT_INSN (insn))
6897     {
6898       if (NOTE_P (insn) && first)
6899 	/* Don't clear FIRST.  */;
6900 
6901       else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
6902 	first = insn;
6903 
6904       else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
6905 	{
6906 	  rtx note, prev;
6907 
6908 	  /* INSN is part of a bundle; FIRST is the first insn in that
6909 	     bundle.  Move all intervening notes out of the bundle.
6910 	     In addition, since the debug pass may insert a label
6911 	     whenever the current line changes, set the location info
6912 	     for INSN to match FIRST.  */
6913 
6914 	  INSN_LOCATION (insn) = INSN_LOCATION (first);
6915 
6916 	  note = PREV_INSN (insn);
6917 	  while (note && note != first)
6918 	    {
6919 	      prev = PREV_INSN (note);
6920 
6921 	      if (NOTE_P (note))
6922 		{
6923 		  /* Remove NOTE from here... */
6924 		  PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
6925 		  NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
6926 		  /* ...and put it in here.  */
6927 		  NEXT_INSN (note) = first;
6928 		  PREV_INSN (note) = PREV_INSN (first);
6929 		  NEXT_INSN (PREV_INSN (note)) = note;
6930 		  PREV_INSN (NEXT_INSN (note)) = note;
6931 		}
6932 
6933 	      note = prev;
6934 	    }
6935 	}
6936 
6937       else if (!NONJUMP_INSN_P (insn))
6938 	first = 0;
6939     }
6940 
6941   /* Now fix up the bundles.  */
6942   for (insn = insns; insn ; insn = NEXT_INSN (insn))
6943     {
6944       if (NOTE_P (insn))
6945 	continue;
6946 
6947       if (!NONJUMP_INSN_P (insn))
6948 	{
6949 	  last = 0;
6950 	  continue;
6951 	}
6952 
6953       /* If we're not optimizing enough, there won't be scheduling
6954 	 info.  We detect that here.  */
6955       if (GET_MODE (insn) == TImode)
6956 	saw_scheduling = 1;
6957       if (!saw_scheduling)
6958 	continue;
6959 
6960       if (TARGET_IVC2)
6961 	{
6962 	  rtx core_insn = NULL_RTX;
6963 
6964 	  /* IVC2 slots are scheduled by DFA, so we just accept
6965 	     whatever the scheduler gives us.  However, we must make
6966 	     sure the core insn (if any) is the first in the bundle.
6967 	     The IVC2 assembler can insert whatever NOPs are needed,
6968 	     and allows a COP insn to be first.  */
6969 
6970 	  if (NONJUMP_INSN_P (insn)
6971 	      && GET_CODE (PATTERN (insn)) != USE
6972 	      && GET_MODE (insn) == TImode)
6973 	    {
6974 	      for (last = insn;
6975 		   NEXT_INSN (last)
6976 		     && GET_MODE (NEXT_INSN (last)) == VOIDmode
6977 		     && NONJUMP_INSN_P (NEXT_INSN (last));
6978 		   last = NEXT_INSN (last))
6979 		{
6980 		  if (core_insn_p (last))
6981 		    core_insn = last;
6982 		}
6983 	      if (core_insn_p (last))
6984 		core_insn = last;
6985 
6986 	      if (core_insn && core_insn != insn)
6987 		{
6988 		  /* Swap core insn to first in the bundle.  */
6989 
6990 		  /* Remove core insn.  */
6991 		  if (PREV_INSN (core_insn))
6992 		    NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
6993 		  if (NEXT_INSN (core_insn))
6994 		    PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
6995 
6996 		  /* Re-insert core insn.  */
6997 		  PREV_INSN (core_insn) = PREV_INSN (insn);
6998 		  NEXT_INSN (core_insn) = insn;
6999 
7000 		  if (PREV_INSN (core_insn))
7001 		    NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7002 		  PREV_INSN (insn) = core_insn;
7003 
7004 		  PUT_MODE (core_insn, TImode);
7005 		  PUT_MODE (insn, VOIDmode);
7006 		}
7007 	    }
7008 
7009 	  /* The first insn has TImode, the rest have VOIDmode */
7010 	  if (GET_MODE (insn) == TImode)
7011 	    PUT_MODE (insn, VOIDmode);
7012 	  else
7013 	    PUT_MODE (insn, BImode);
7014 	  continue;
7015 	}
7016 
7017       PUT_MODE (insn, VOIDmode);
7018       if (recog_memoized (insn) >= 0
7019 	  && get_attr_slot (insn) == SLOT_COP)
7020 	{
7021 	  if (GET_CODE (insn) == JUMP_INSN
7022 	      || ! last
7023 	      || recog_memoized (last) < 0
7024 	      || get_attr_slot (last) != SLOT_CORE
7025 	      || (get_attr_length (insn)
7026 		  != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7027 	      || mep_insn_dependent_p (insn, last))
7028 	    {
7029 	      switch (get_attr_length (insn))
7030 		{
7031 		case 8:
7032 		  break;
7033 		case 6:
7034 		  insn = mep_make_bundle (gen_nop (), insn);
7035 		  break;
7036 		case 4:
7037 		  if (TARGET_OPT_VL64)
7038 		    insn = mep_make_bundle (gen_nop32 (), insn);
7039 		  break;
7040 		case 2:
7041 		  if (TARGET_OPT_VL64)
7042 		    error ("2 byte cop instructions are"
7043 			   " not allowed in 64-bit VLIW mode");
7044 		  else
7045 		    insn = mep_make_bundle (gen_nop (), insn);
7046 		  break;
7047 		default:
7048 		  error ("unexpected %d byte cop instruction",
7049 			 get_attr_length (insn));
7050 		  break;
7051 		}
7052 	    }
7053 	  else
7054 	    insn = mep_make_bundle (last, insn);
7055 	}
7056 
7057       last = insn;
7058     }
7059 }
7060 
7061 
7062 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7063    Return true on success.  This function can fail if the intrinsic
7064    is unavailable or if the operands don't satisfy their predicates.  */
7065 
7066 bool
mep_emit_intrinsic(int intrinsic,const rtx * operands)7067 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7068 {
7069   const struct cgen_insn *cgen_insn;
7070   const struct insn_data_d *idata;
7071   rtx newop[10];
7072   int i;
7073 
7074   if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7075     return false;
7076 
7077   idata = &insn_data[cgen_insn->icode];
7078   for (i = 0; i < idata->n_operands; i++)
7079     {
7080       newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7081       if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7082 	return false;
7083     }
7084 
7085   emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7086 			    newop[3], newop[4], newop[5],
7087 			    newop[6], newop[7], newop[8]));
7088 
7089   return true;
7090 }
7091 
7092 
7093 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7094    OPERANDS[0].  Report an error if the instruction could not
7095    be synthesized.  OPERANDS[1] is a register_operand.  For sign
7096    and zero extensions, it may be smaller than SImode.  */
7097 
7098 bool
mep_expand_unary_intrinsic(int ATTRIBUTE_UNUSED intrinsic,rtx * operands ATTRIBUTE_UNUSED)7099 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7100 			    rtx * operands ATTRIBUTE_UNUSED)
7101 {
7102   return false;
7103 }
7104 
7105 
7106 /* Likewise, but apply a binary operation to OPERANDS[1] and
7107    OPERANDS[2].  OPERANDS[1] is a register_operand, OPERANDS[2]
7108    can be a general_operand.
7109 
7110    IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7111    third operand.  REG and REG3 take register operands only.  */
7112 
7113 bool
mep_expand_binary_intrinsic(int ATTRIBUTE_UNUSED immediate,int ATTRIBUTE_UNUSED immediate3,int ATTRIBUTE_UNUSED reg,int ATTRIBUTE_UNUSED reg3,rtx * operands ATTRIBUTE_UNUSED)7114 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7115 			     int ATTRIBUTE_UNUSED immediate3,
7116 			     int ATTRIBUTE_UNUSED reg,
7117 			     int ATTRIBUTE_UNUSED reg3,
7118 			     rtx * operands ATTRIBUTE_UNUSED)
7119 {
7120   return false;
7121 }
7122 
7123 static bool
mep_rtx_cost(rtx x,int code,int outer_code ATTRIBUTE_UNUSED,int opno ATTRIBUTE_UNUSED,int * total,bool ATTRIBUTE_UNUSED speed_t)7124 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
7125 	      int opno ATTRIBUTE_UNUSED, int *total,
7126 	      bool ATTRIBUTE_UNUSED speed_t)
7127 {
7128   switch (code)
7129     {
7130     case CONST_INT:
7131       if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7132 	*total = 0;
7133       else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7134 	*total = 1;
7135       else
7136 	*total = 3;
7137       return true;
7138 
7139     case SYMBOL_REF:
7140       *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7141       return true;
7142 
7143     case MULT:
7144       *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7145 		? COSTS_N_INSNS (3)
7146 		: COSTS_N_INSNS (2));
7147       return true;
7148     }
7149   return false;
7150 }
7151 
7152 static int
mep_address_cost(rtx addr ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED,addr_space_t as ATTRIBUTE_UNUSED,bool ATTRIBUTE_UNUSED speed_p)7153 mep_address_cost (rtx addr ATTRIBUTE_UNUSED,
7154 		  enum machine_mode mode ATTRIBUTE_UNUSED,
7155 		  addr_space_t as ATTRIBUTE_UNUSED,
7156 		  bool ATTRIBUTE_UNUSED speed_p)
7157 {
7158   return 1;
7159 }
7160 
7161 static void
mep_asm_init_sections(void)7162 mep_asm_init_sections (void)
7163 {
7164   based_section
7165     = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7166 			   "\t.section .based,\"aw\"");
7167 
7168   tinybss_section
7169     = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7170 			   "\t.section .sbss,\"aw\"");
7171 
7172   sdata_section
7173     = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7174 			   "\t.section .sdata,\"aw\",@progbits");
7175 
7176   far_section
7177     = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7178 			   "\t.section .far,\"aw\"");
7179 
7180   farbss_section
7181     = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7182 			   "\t.section .farbss,\"aw\"");
7183 
7184   frodata_section
7185     = get_unnamed_section (0, output_section_asm_op,
7186 			   "\t.section .frodata,\"a\"");
7187 
7188   srodata_section
7189     = get_unnamed_section (0, output_section_asm_op,
7190 			   "\t.section .srodata,\"a\"");
7191 
7192   vtext_section
7193     = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7194 			   "\t.section .vtext,\"axv\"\n\t.vliw");
7195 
7196   vftext_section
7197     = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7198 			   "\t.section .vftext,\"axv\"\n\t.vliw");
7199 
7200   ftext_section
7201     = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7202 			   "\t.section .ftext,\"ax\"\n\t.core");
7203 
7204 }
7205 
7206 /* Initialize the GCC target structure.  */
7207 
7208 #undef  TARGET_ASM_FUNCTION_PROLOGUE
7209 #define TARGET_ASM_FUNCTION_PROLOGUE	mep_start_function
7210 #undef  TARGET_ATTRIBUTE_TABLE
7211 #define TARGET_ATTRIBUTE_TABLE		mep_attribute_table
7212 #undef  TARGET_COMP_TYPE_ATTRIBUTES
7213 #define TARGET_COMP_TYPE_ATTRIBUTES	mep_comp_type_attributes
7214 #undef  TARGET_INSERT_ATTRIBUTES
7215 #define TARGET_INSERT_ATTRIBUTES	mep_insert_attributes
7216 #undef  TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7217 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P	mep_function_attribute_inlinable_p
7218 #undef  TARGET_CAN_INLINE_P
7219 #define TARGET_CAN_INLINE_P		mep_can_inline_p
7220 #undef  TARGET_SECTION_TYPE_FLAGS
7221 #define TARGET_SECTION_TYPE_FLAGS	mep_section_type_flags
7222 #undef  TARGET_ASM_NAMED_SECTION
7223 #define TARGET_ASM_NAMED_SECTION	mep_asm_named_section
7224 #undef  TARGET_INIT_BUILTINS
7225 #define TARGET_INIT_BUILTINS		mep_init_builtins
7226 #undef  TARGET_EXPAND_BUILTIN
7227 #define TARGET_EXPAND_BUILTIN		mep_expand_builtin
7228 #undef  TARGET_SCHED_ADJUST_COST
7229 #define TARGET_SCHED_ADJUST_COST	mep_adjust_cost
7230 #undef  TARGET_SCHED_ISSUE_RATE
7231 #define TARGET_SCHED_ISSUE_RATE		mep_issue_rate
7232 #undef  TARGET_SCHED_REORDER
7233 #define TARGET_SCHED_REORDER		mep_sched_reorder
7234 #undef  TARGET_STRIP_NAME_ENCODING
7235 #define TARGET_STRIP_NAME_ENCODING	mep_strip_name_encoding
7236 #undef  TARGET_ASM_SELECT_SECTION
7237 #define TARGET_ASM_SELECT_SECTION	mep_select_section
7238 #undef  TARGET_ASM_UNIQUE_SECTION
7239 #define TARGET_ASM_UNIQUE_SECTION	mep_unique_section
7240 #undef  TARGET_ENCODE_SECTION_INFO
7241 #define TARGET_ENCODE_SECTION_INFO	mep_encode_section_info
7242 #undef  TARGET_FUNCTION_OK_FOR_SIBCALL
7243 #define TARGET_FUNCTION_OK_FOR_SIBCALL	mep_function_ok_for_sibcall
7244 #undef  TARGET_RTX_COSTS
7245 #define TARGET_RTX_COSTS		mep_rtx_cost
7246 #undef  TARGET_ADDRESS_COST
7247 #define TARGET_ADDRESS_COST 		mep_address_cost
7248 #undef  TARGET_MACHINE_DEPENDENT_REORG
7249 #define TARGET_MACHINE_DEPENDENT_REORG  mep_reorg
7250 #undef  TARGET_SETUP_INCOMING_VARARGS
7251 #define TARGET_SETUP_INCOMING_VARARGS	mep_setup_incoming_varargs
7252 #undef  TARGET_PASS_BY_REFERENCE
7253 #define TARGET_PASS_BY_REFERENCE        mep_pass_by_reference
7254 #undef  TARGET_FUNCTION_ARG
7255 #define TARGET_FUNCTION_ARG             mep_function_arg
7256 #undef  TARGET_FUNCTION_ARG_ADVANCE
7257 #define TARGET_FUNCTION_ARG_ADVANCE     mep_function_arg_advance
7258 #undef  TARGET_VECTOR_MODE_SUPPORTED_P
7259 #define TARGET_VECTOR_MODE_SUPPORTED_P	mep_vector_mode_supported_p
7260 #undef  TARGET_OPTION_OVERRIDE
7261 #define TARGET_OPTION_OVERRIDE		mep_option_override
7262 #undef  TARGET_ALLOCATE_INITIAL_VALUE
7263 #define TARGET_ALLOCATE_INITIAL_VALUE   mep_allocate_initial_value
7264 #undef  TARGET_ASM_INIT_SECTIONS
7265 #define TARGET_ASM_INIT_SECTIONS 	mep_asm_init_sections
7266 #undef  TARGET_RETURN_IN_MEMORY
7267 #define TARGET_RETURN_IN_MEMORY		mep_return_in_memory
7268 #undef  TARGET_NARROW_VOLATILE_BITFIELD
7269 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7270 #undef	TARGET_EXPAND_BUILTIN_SAVEREGS
7271 #define	TARGET_EXPAND_BUILTIN_SAVEREGS	mep_expand_builtin_saveregs
7272 #undef  TARGET_BUILD_BUILTIN_VA_LIST
7273 #define TARGET_BUILD_BUILTIN_VA_LIST	mep_build_builtin_va_list
7274 #undef  TARGET_EXPAND_BUILTIN_VA_START
7275 #define TARGET_EXPAND_BUILTIN_VA_START	mep_expand_va_start
7276 #undef	TARGET_GIMPLIFY_VA_ARG_EXPR
7277 #define	TARGET_GIMPLIFY_VA_ARG_EXPR	mep_gimplify_va_arg_expr
7278 #undef  TARGET_CAN_ELIMINATE
7279 #define TARGET_CAN_ELIMINATE            mep_can_eliminate
7280 #undef  TARGET_CONDITIONAL_REGISTER_USAGE
7281 #define TARGET_CONDITIONAL_REGISTER_USAGE	mep_conditional_register_usage
7282 #undef  TARGET_TRAMPOLINE_INIT
7283 #define TARGET_TRAMPOLINE_INIT		mep_trampoline_init
7284 #undef  TARGET_LEGITIMATE_CONSTANT_P
7285 #define TARGET_LEGITIMATE_CONSTANT_P	mep_legitimate_constant_p
7286 
7287 struct gcc_target targetm = TARGET_INITIALIZER;
7288 
7289 #include "gt-mep.h"
7290