1 /* The Blackfin code generation auxiliary output file.
2    Copyright (C) 2005  Free Software Foundation, Inc.
3    Contributed by Analog Devices.
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify it
8    under the terms of the GNU General Public License as published
9    by the Free Software Foundation; either version 2, or (at your
10    option) any later version.
11 
12    GCC is distributed in the hope that it will be useful, but WITHOUT
13    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
15    License for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING.  If not, write to
19    the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20    Boston, MA 02110-1301, USA.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "insn-codes.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "tree.h"
37 #include "flags.h"
38 #include "except.h"
39 #include "function.h"
40 #include "input.h"
41 #include "target.h"
42 #include "target-def.h"
43 #include "expr.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "ggc.h"
47 #include "integrate.h"
48 #include "cgraph.h"
49 #include "langhooks.h"
50 #include "bfin-protos.h"
51 #include "tm-preds.h"
52 #include "gt-bfin.h"
53 
54 /* Test and compare insns in bfin.md store the information needed to
55    generate branch and scc insns here.  */
56 rtx bfin_compare_op0, bfin_compare_op1;
57 
58 /* RTX for condition code flag register and RETS register */
59 extern GTY(()) rtx bfin_cc_rtx;
60 extern GTY(()) rtx bfin_rets_rtx;
61 rtx bfin_cc_rtx, bfin_rets_rtx;
62 
63 int max_arg_registers = 0;
64 
65 /* Arrays used when emitting register names.  */
66 const char *short_reg_names[]  =  SHORT_REGISTER_NAMES;
67 const char *high_reg_names[]   =  HIGH_REGISTER_NAMES;
68 const char *dregs_pair_names[] =  DREGS_PAIR_NAMES;
69 const char *byte_reg_names[]   =  BYTE_REGISTER_NAMES;
70 
71 static int arg_regs[] = FUNCTION_ARG_REGISTERS;
72 
73 /* Nonzero if -mshared-library-id was given.  */
74 static int bfin_lib_id_given;
75 
76 static void
bfin_globalize_label(FILE * stream,const char * name)77 bfin_globalize_label (FILE *stream, const char *name)
78 {
79   fputs (".global ", stream);
80   assemble_name (stream, name);
81   fputc (';',stream);
82   fputc ('\n',stream);
83 }
84 
85 static void
output_file_start(void)86 output_file_start (void)
87 {
88   FILE *file = asm_out_file;
89   int i;
90 
91   fprintf (file, ".file \"%s\";\n", input_filename);
92 
93   for (i = 0; arg_regs[i] >= 0; i++)
94     ;
95   max_arg_registers = i;	/* how many arg reg used  */
96 }
97 
98 /* Called early in the compilation to conditionally modify
99    fixed_regs/call_used_regs.  */
100 
101 void
conditional_register_usage(void)102 conditional_register_usage (void)
103 {
104   /* initialize condition code flag register rtx */
105   bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
106   bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
107 }
108 
109 /* Examine machine-dependent attributes of function type FUNTYPE and return its
110    type.  See the definition of E_FUNKIND.  */
111 
funkind(tree funtype)112 static e_funkind funkind (tree funtype)
113 {
114   tree attrs = TYPE_ATTRIBUTES (funtype);
115   if (lookup_attribute ("interrupt_handler", attrs))
116     return INTERRUPT_HANDLER;
117   else if (lookup_attribute ("exception_handler", attrs))
118     return EXCPT_HANDLER;
119   else if (lookup_attribute ("nmi_handler", attrs))
120     return NMI_HANDLER;
121   else
122     return SUBROUTINE;
123 }
124 
125 /* Legitimize PIC addresses.  If the address is already position-independent,
126    we return ORIG.  Newly generated position-independent addresses go into a
127    reg.  This is REG if nonzero, otherwise we allocate register(s) as
128    necessary.  PICREG is the register holding the pointer to the PIC offset
129    table.  */
130 
131 rtx
legitimize_pic_address(rtx orig,rtx reg,rtx picreg)132 legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
133 {
134   rtx addr = orig;
135   rtx new = orig;
136 
137   if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
138     {
139       if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
140 	reg = new = orig;
141       else
142 	{
143 	  if (reg == 0)
144 	    {
145 	      gcc_assert (!no_new_pseudos);
146 	      reg = gen_reg_rtx (Pmode);
147 	    }
148 
149 	  if (flag_pic == 2)
150 	    {
151 	      emit_insn (gen_movsi_high_pic (reg, addr));
152 	      emit_insn (gen_movsi_low_pic (reg, reg, addr));
153 	      emit_insn (gen_addsi3 (reg, reg, picreg));
154 	      new = gen_const_mem (Pmode, reg);
155 	    }
156 	  else
157 	    {
158 	      rtx tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
159 					UNSPEC_MOVE_PIC);
160 	      new = gen_const_mem (Pmode,
161 				   gen_rtx_PLUS (Pmode, picreg, tmp));
162 	    }
163 	  emit_move_insn (reg, new);
164 	}
165       if (picreg == pic_offset_table_rtx)
166 	current_function_uses_pic_offset_table = 1;
167       return reg;
168     }
169 
170   else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
171     {
172       rtx base;
173 
174       if (GET_CODE (addr) == CONST)
175 	{
176 	  addr = XEXP (addr, 0);
177 	  gcc_assert (GET_CODE (addr) == PLUS);
178 	}
179 
180       if (XEXP (addr, 0) == picreg)
181 	return orig;
182 
183       if (reg == 0)
184 	{
185 	  gcc_assert (!no_new_pseudos);
186 	  reg = gen_reg_rtx (Pmode);
187 	}
188 
189       base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
190       addr = legitimize_pic_address (XEXP (addr, 1),
191 				     base == reg ? NULL_RTX : reg,
192 				     picreg);
193 
194       if (GET_CODE (addr) == CONST_INT)
195 	{
196 	  gcc_assert (! reload_in_progress && ! reload_completed);
197 	  addr = force_reg (Pmode, addr);
198 	}
199 
200       if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
201 	{
202 	  base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
203 	  addr = XEXP (addr, 1);
204 	}
205 
206       return gen_rtx_PLUS (Pmode, base, addr);
207     }
208 
209   return new;
210 }
211 
212 /* Stack frame layout. */
213 
214 /* Compute the number of DREGS to save with a push_multiple operation.
215    This could include registers that aren't modified in the function,
216    since push_multiple only takes a range of registers.
217    If IS_INTHANDLER, then everything that is live must be saved, even
218    if normally call-clobbered.  */
219 
220 static int
n_dregs_to_save(bool is_inthandler)221 n_dregs_to_save (bool is_inthandler)
222 {
223   unsigned i;
224 
225   for (i = REG_R0; i <= REG_R7; i++)
226     {
227       if (regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
228 	return REG_R7 - i + 1;
229 
230       if (current_function_calls_eh_return)
231 	{
232 	  unsigned j;
233 	  for (j = 0; ; j++)
234 	    {
235 	      unsigned test = EH_RETURN_DATA_REGNO (j);
236 	      if (test == INVALID_REGNUM)
237 		break;
238 	      if (test == i)
239 		return REG_R7 - i + 1;
240 	    }
241 	}
242 
243     }
244   return 0;
245 }
246 
247 /* Like n_dregs_to_save, but compute number of PREGS to save.  */
248 
249 static int
n_pregs_to_save(bool is_inthandler)250 n_pregs_to_save (bool is_inthandler)
251 {
252   unsigned i;
253 
254   for (i = REG_P0; i <= REG_P5; i++)
255     if ((regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
256 	|| (i == PIC_OFFSET_TABLE_REGNUM
257 	    && (current_function_uses_pic_offset_table
258 		|| (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
259       return REG_P5 - i + 1;
260   return 0;
261 }
262 
263 /* Determine if we are going to save the frame pointer in the prologue.  */
264 
265 static bool
must_save_fp_p(void)266 must_save_fp_p (void)
267 {
268   return frame_pointer_needed || regs_ever_live[REG_FP];
269 }
270 
271 static bool
stack_frame_needed_p(void)272 stack_frame_needed_p (void)
273 {
274   /* EH return puts a new return address into the frame using an
275      address relative to the frame pointer.  */
276   if (current_function_calls_eh_return)
277     return true;
278   return frame_pointer_needed;
279 }
280 
281 /* Emit code to save registers in the prologue.  SAVEALL is nonzero if we
282    must save all registers; this is used for interrupt handlers.
283    SPREG contains (reg:SI REG_SP).  IS_INTHANDLER is true if we're doing
284    this for an interrupt (or exception) handler.  */
285 
286 static void
expand_prologue_reg_save(rtx spreg,int saveall,bool is_inthandler)287 expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
288 {
289   int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
290   int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
291   int dregno = REG_R7 + 1 - ndregs;
292   int pregno = REG_P5 + 1 - npregs;
293   int total = ndregs + npregs;
294   int i;
295   rtx pat, insn, val;
296 
297   if (total == 0)
298     return;
299 
300   val = GEN_INT (-total * 4);
301   pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
302   XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
303 					UNSPEC_PUSH_MULTIPLE);
304   XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
305 					     gen_rtx_PLUS (Pmode, spreg,
306 							   val));
307   RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
308   for (i = 0; i < total; i++)
309     {
310       rtx memref = gen_rtx_MEM (word_mode,
311 				gen_rtx_PLUS (Pmode, spreg,
312 					      GEN_INT (- i * 4 - 4)));
313       rtx subpat;
314       if (ndregs > 0)
315 	{
316 	  subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
317 							       dregno++));
318 	  ndregs--;
319 	}
320       else
321 	{
322 	  subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
323 							       pregno++));
324 	  npregs++;
325 	}
326       XVECEXP (pat, 0, i + 1) = subpat;
327       RTX_FRAME_RELATED_P (subpat) = 1;
328     }
329   insn = emit_insn (pat);
330   RTX_FRAME_RELATED_P (insn) = 1;
331 }
332 
333 /* Emit code to restore registers in the epilogue.  SAVEALL is nonzero if we
334    must save all registers; this is used for interrupt handlers.
335    SPREG contains (reg:SI REG_SP).  IS_INTHANDLER is true if we're doing
336    this for an interrupt (or exception) handler.  */
337 
338 static void
expand_epilogue_reg_restore(rtx spreg,bool saveall,bool is_inthandler)339 expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
340 {
341   int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
342   int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
343   int total = ndregs + npregs;
344   int i, regno;
345   rtx pat, insn;
346 
347   if (total == 0)
348     return;
349 
350   pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
351   XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
352 				     gen_rtx_PLUS (Pmode, spreg,
353 						   GEN_INT (total * 4)));
354 
355   if (npregs > 0)
356     regno = REG_P5 + 1;
357   else
358     regno = REG_R7 + 1;
359 
360   for (i = 0; i < total; i++)
361     {
362       rtx addr = (i > 0
363 		  ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
364 		  : spreg);
365       rtx memref = gen_rtx_MEM (word_mode, addr);
366 
367       regno--;
368       XVECEXP (pat, 0, i + 1)
369 	= gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
370 
371       if (npregs > 0)
372 	{
373 	  if (--npregs == 0)
374 	    regno = REG_R7 + 1;
375 	}
376     }
377 
378   insn = emit_insn (pat);
379   RTX_FRAME_RELATED_P (insn) = 1;
380 }
381 
382 /* Perform any needed actions needed for a function that is receiving a
383    variable number of arguments.
384 
385    CUM is as above.
386 
387    MODE and TYPE are the mode and type of the current parameter.
388 
389    PRETEND_SIZE is a variable that should be set to the amount of stack
390    that must be pushed by the prolog to pretend that our caller pushed
391    it.
392 
393    Normally, this macro will push all remaining incoming registers on the
394    stack and set PRETEND_SIZE to the length of the registers pushed.
395 
396    Blackfin specific :
397    - VDSP C compiler manual (our ABI) says that a variable args function
398      should save the R0, R1 and R2 registers in the stack.
399    - The caller will always leave space on the stack for the
400      arguments that are passed in registers, so we dont have
401      to leave any extra space.
402    - now, the vastart pointer can access all arguments from the stack.  */
403 
404 static void
setup_incoming_varargs(CUMULATIVE_ARGS * cum,enum machine_mode mode ATTRIBUTE_UNUSED,tree type ATTRIBUTE_UNUSED,int * pretend_size,int no_rtl)405 setup_incoming_varargs (CUMULATIVE_ARGS *cum,
406 			enum machine_mode mode ATTRIBUTE_UNUSED,
407 			tree type ATTRIBUTE_UNUSED, int *pretend_size,
408 			int no_rtl)
409 {
410   rtx mem;
411   int i;
412 
413   if (no_rtl)
414     return;
415 
416   /* The move for named arguments will be generated automatically by the
417      compiler.  We need to generate the move rtx for the unnamed arguments
418      if they are in the first 3 words.  We assume at least 1 named argument
419      exists, so we never generate [ARGP] = R0 here.  */
420 
421   for (i = cum->words + 1; i < max_arg_registers; i++)
422     {
423       mem = gen_rtx_MEM (Pmode,
424 			 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
425       emit_move_insn (mem, gen_rtx_REG (Pmode, i));
426     }
427 
428   *pretend_size = 0;
429 }
430 
431 /* Value should be nonzero if functions must have frame pointers.
432    Zero means the frame pointer need not be set up (and parms may
433    be accessed via the stack pointer) in functions that seem suitable.  */
434 
435 int
bfin_frame_pointer_required(void)436 bfin_frame_pointer_required (void)
437 {
438   e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
439 
440   if (fkind != SUBROUTINE)
441     return 1;
442 
443   /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
444      so we have to override it for non-leaf functions.  */
445   if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
446     return 1;
447 
448   return 0;
449 }
450 
451 /* Return the number of registers pushed during the prologue.  */
452 
453 static int
n_regs_saved_by_prologue(void)454 n_regs_saved_by_prologue (void)
455 {
456   e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
457   bool is_inthandler = fkind != SUBROUTINE;
458   tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
459   bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
460 	      || (is_inthandler && !current_function_is_leaf));
461   int ndregs = all ? 8 : n_dregs_to_save (is_inthandler);
462   int npregs = all ? 6 : n_pregs_to_save (is_inthandler);
463   int n = ndregs + npregs;
464 
465   if (all || stack_frame_needed_p ())
466     /* We use a LINK instruction in this case.  */
467     n += 2;
468   else
469     {
470       if (must_save_fp_p ())
471 	n++;
472       if (! current_function_is_leaf)
473 	n++;
474     }
475 
476   if (fkind != SUBROUTINE)
477     {
478       int i;
479 
480       /* Increment once for ASTAT.  */
481       n++;
482 
483       /* RETE/X/N.  */
484       if (lookup_attribute ("nesting", attrs))
485 	n++;
486 
487       for (i = REG_P7 + 1; i < REG_CC; i++)
488 	if (all
489 	    || regs_ever_live[i]
490 	    || (!leaf_function_p () && call_used_regs[i]))
491 	  n += i == REG_A0 || i == REG_A1 ? 2 : 1;
492     }
493   return n;
494 }
495 
496 /* Return the offset between two registers, one to be eliminated, and the other
497    its replacement, at the start of a routine.  */
498 
499 HOST_WIDE_INT
bfin_initial_elimination_offset(int from,int to)500 bfin_initial_elimination_offset (int from, int to)
501 {
502   HOST_WIDE_INT offset = 0;
503 
504   if (from == ARG_POINTER_REGNUM)
505     offset = n_regs_saved_by_prologue () * 4;
506 
507   if (to == STACK_POINTER_REGNUM)
508     {
509       if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
510 	offset += current_function_outgoing_args_size;
511       else if (current_function_outgoing_args_size)
512 	offset += FIXED_STACK_AREA;
513 
514       offset += get_frame_size ();
515     }
516 
517   return offset;
518 }
519 
520 /* Emit code to load a constant CONSTANT into register REG; setting
521    RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
522    Make sure that the insns we generate need not be split.  */
523 
524 static void
frame_related_constant_load(rtx reg,HOST_WIDE_INT constant,bool related)525 frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
526 {
527   rtx insn;
528   rtx cst = GEN_INT (constant);
529 
530   if (constant >= -32768 && constant < 65536)
531     insn = emit_move_insn (reg, cst);
532   else
533     {
534       /* We don't call split_load_immediate here, since dwarf2out.c can get
535 	 confused about some of the more clever sequences it can generate.  */
536       insn = emit_insn (gen_movsi_high (reg, cst));
537       if (related)
538 	RTX_FRAME_RELATED_P (insn) = 1;
539       insn = emit_insn (gen_movsi_low (reg, reg, cst));
540     }
541   if (related)
542     RTX_FRAME_RELATED_P (insn) = 1;
543 }
544 
545 /* Generate efficient code to add a value to the frame pointer.  We
546    can use P1 as a scratch register.  Set RTX_FRAME_RELATED_P on the
547    generated insns if FRAME is nonzero.  */
548 
549 static void
add_to_sp(rtx spreg,HOST_WIDE_INT value,int frame)550 add_to_sp (rtx spreg, HOST_WIDE_INT value, int frame)
551 {
552   if (value == 0)
553     return;
554 
555   /* Choose whether to use a sequence using a temporary register, or
556      a sequence with multiple adds.  We can add a signed 7 bit value
557      in one instruction.  */
558   if (value > 120 || value < -120)
559     {
560       rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
561       rtx insn;
562 
563       if (frame)
564 	frame_related_constant_load (tmpreg, value, TRUE);
565       else
566 	{
567 	  insn = emit_move_insn (tmpreg, GEN_INT (value));
568 	  if (frame)
569 	    RTX_FRAME_RELATED_P (insn) = 1;
570 	}
571 
572       insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
573       if (frame)
574 	RTX_FRAME_RELATED_P (insn) = 1;
575     }
576   else
577     do
578       {
579 	int size = value;
580 	rtx insn;
581 
582 	if (size > 60)
583 	  size = 60;
584 	else if (size < -60)
585 	  /* We could use -62, but that would leave the stack unaligned, so
586 	     it's no good.  */
587 	  size = -60;
588 
589 	insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (size)));
590 	if (frame)
591 	  RTX_FRAME_RELATED_P (insn) = 1;
592 	value -= size;
593       }
594     while (value != 0);
595 }
596 
597 /* Generate a LINK insn for a frame sized FRAME_SIZE.  If this constant
598    is too large, generate a sequence of insns that has the same effect.
599    SPREG contains (reg:SI REG_SP).  */
600 
601 static void
emit_link_insn(rtx spreg,HOST_WIDE_INT frame_size)602 emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
603 {
604   HOST_WIDE_INT link_size = frame_size;
605   rtx insn;
606   int i;
607 
608   if (link_size > 262140)
609     link_size = 262140;
610 
611   /* Use a LINK insn with as big a constant as possible, then subtract
612      any remaining size from the SP.  */
613   insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
614   RTX_FRAME_RELATED_P (insn) = 1;
615 
616   for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
617     {
618       rtx set = XVECEXP (PATTERN (insn), 0, i);
619       gcc_assert (GET_CODE (set) == SET);
620       RTX_FRAME_RELATED_P (set) = 1;
621     }
622 
623   frame_size -= link_size;
624 
625   if (frame_size > 0)
626     {
627       /* Must use a call-clobbered PREG that isn't the static chain.  */
628       rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
629 
630       frame_related_constant_load (tmpreg, -frame_size, TRUE);
631       insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
632       RTX_FRAME_RELATED_P (insn) = 1;
633     }
634 }
635 
636 /* Return the number of bytes we must reserve for outgoing arguments
637    in the current function's stack frame.  */
638 
639 static HOST_WIDE_INT
arg_area_size(void)640 arg_area_size (void)
641 {
642   if (current_function_outgoing_args_size)
643     {
644       if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
645 	return current_function_outgoing_args_size;
646       else
647 	return FIXED_STACK_AREA;
648     }
649   return 0;
650 }
651 
652 /* Save RETS and FP, and allocate a stack frame.  ALL is true if the
653    function must save all its registers (true only for certain interrupt
654    handlers).  */
655 
656 static void
do_link(rtx spreg,HOST_WIDE_INT frame_size,bool all)657 do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
658 {
659   frame_size += arg_area_size ();
660 
661   if (all || stack_frame_needed_p ()
662       || (must_save_fp_p () && ! current_function_is_leaf))
663     emit_link_insn (spreg, frame_size);
664   else
665     {
666       if (! current_function_is_leaf)
667 	{
668 	  rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
669 					    gen_rtx_PRE_DEC (Pmode, spreg)),
670 			       bfin_rets_rtx);
671 	  rtx insn = emit_insn (pat);
672 	  RTX_FRAME_RELATED_P (insn) = 1;
673 	}
674       if (must_save_fp_p ())
675 	{
676 	  rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
677 					    gen_rtx_PRE_DEC (Pmode, spreg)),
678 			       gen_rtx_REG (Pmode, REG_FP));
679 	  rtx insn = emit_insn (pat);
680 	  RTX_FRAME_RELATED_P (insn) = 1;
681 	}
682       add_to_sp (spreg, -frame_size, 1);
683     }
684 }
685 
686 /* Like do_link, but used for epilogues to deallocate the stack frame.  */
687 
688 static void
do_unlink(rtx spreg,HOST_WIDE_INT frame_size,bool all)689 do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all)
690 {
691   frame_size += arg_area_size ();
692 
693   if (all || stack_frame_needed_p ())
694     emit_insn (gen_unlink ());
695   else
696     {
697       rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
698 
699       add_to_sp (spreg, frame_size, 0);
700       if (must_save_fp_p ())
701 	{
702 	  rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
703 	  emit_move_insn (fpreg, postinc);
704 	  emit_insn (gen_rtx_USE (VOIDmode, fpreg));
705 	}
706       if (! current_function_is_leaf)
707 	{
708 	  emit_move_insn (bfin_rets_rtx, postinc);
709 	  emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
710 	}
711     }
712 }
713 
714 /* Generate a prologue suitable for a function of kind FKIND.  This is
715    called for interrupt and exception handler prologues.
716    SPREG contains (reg:SI REG_SP).  */
717 
718 static void
expand_interrupt_handler_prologue(rtx spreg,e_funkind fkind)719 expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
720 {
721   int i;
722   HOST_WIDE_INT frame_size = get_frame_size ();
723   rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
724   rtx predec = gen_rtx_MEM (SImode, predec1);
725   rtx insn;
726   tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
727   bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
728   tree kspisusp = lookup_attribute ("kspisusp", attrs);
729 
730   if (kspisusp)
731     {
732       insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
733       RTX_FRAME_RELATED_P (insn) = 1;
734     }
735 
736   /* We need space on the stack in case we need to save the argument
737      registers.  */
738   if (fkind == EXCPT_HANDLER)
739     {
740       insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
741       RTX_FRAME_RELATED_P (insn) = 1;
742     }
743 
744   insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
745   RTX_FRAME_RELATED_P (insn) = 1;
746 
747   /* If we're calling other functions, they won't save their call-clobbered
748      registers, so we must save everything here.  */
749   if (!current_function_is_leaf)
750     all = true;
751   expand_prologue_reg_save (spreg, all, true);
752 
753   for (i = REG_P7 + 1; i < REG_CC; i++)
754     if (all
755 	|| regs_ever_live[i]
756 	|| (!leaf_function_p () && call_used_regs[i]))
757       {
758 	if (i == REG_A0 || i == REG_A1)
759 	  insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
760 				 gen_rtx_REG (PDImode, i));
761 	else
762 	  insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
763 	RTX_FRAME_RELATED_P (insn) = 1;
764       }
765 
766   if (lookup_attribute ("nesting", attrs))
767     {
768       rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
769 					: fkind == NMI_HANDLER ? REG_RETN
770 					: REG_RETI));
771       insn = emit_move_insn (predec, srcreg);
772       RTX_FRAME_RELATED_P (insn) = 1;
773     }
774 
775   do_link (spreg, frame_size, all);
776 
777   if (fkind == EXCPT_HANDLER)
778     {
779       rtx r0reg = gen_rtx_REG (SImode, REG_R0);
780       rtx r1reg = gen_rtx_REG (SImode, REG_R1);
781       rtx r2reg = gen_rtx_REG (SImode, REG_R2);
782       rtx insn;
783 
784       insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
785       REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
786 					    NULL_RTX);
787       insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
788       REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
789 					    NULL_RTX);
790       insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
791       REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
792 					    NULL_RTX);
793       insn = emit_move_insn (r1reg, spreg);
794       REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
795 					    NULL_RTX);
796       insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
797       REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
798 					    NULL_RTX);
799       insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
800       REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
801 					    NULL_RTX);
802     }
803 }
804 
805 /* Generate an epilogue suitable for a function of kind FKIND.  This is
806    called for interrupt and exception handler epilogues.
807    SPREG contains (reg:SI REG_SP).  */
808 
809 static void
expand_interrupt_handler_epilogue(rtx spreg,e_funkind fkind)810 expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
811 {
812   int i;
813   rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
814   rtx postinc = gen_rtx_MEM (SImode, postinc1);
815   tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
816   bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
817 
818   /* A slightly crude technique to stop flow from trying to delete "dead"
819      insns.  */
820   MEM_VOLATILE_P (postinc) = 1;
821 
822   do_unlink (spreg, get_frame_size (), all);
823 
824   if (lookup_attribute ("nesting", attrs))
825     {
826       rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
827 					: fkind == NMI_HANDLER ? REG_RETN
828 					: REG_RETI));
829       emit_move_insn (srcreg, postinc);
830     }
831 
832   /* If we're calling other functions, they won't save their call-clobbered
833      registers, so we must save (and restore) everything here.  */
834   if (!current_function_is_leaf)
835     all = true;
836 
837   for (i = REG_CC - 1; i > REG_P7; i--)
838     if (all
839 	|| regs_ever_live[i]
840 	|| (!leaf_function_p () && call_used_regs[i]))
841       {
842 	if (i == REG_A0 || i == REG_A1)
843 	  {
844 	    rtx mem = gen_rtx_MEM (PDImode, postinc1);
845 	    MEM_VOLATILE_P (mem) = 1;
846 	    emit_move_insn (gen_rtx_REG (PDImode, i), mem);
847 	  }
848 	else
849 	  emit_move_insn (gen_rtx_REG (SImode, i), postinc);
850       }
851 
852   expand_epilogue_reg_restore (spreg, all, true);
853 
854   emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
855 
856   /* Deallocate any space we left on the stack in case we needed to save the
857      argument registers.  */
858   if (fkind == EXCPT_HANDLER)
859     emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
860 
861   emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
862 }
863 
864 /* Used while emitting the prologue to generate code to load the correct value
865    into the PIC register, which is passed in DEST.  */
866 
867 static rtx
bfin_load_pic_reg(rtx dest)868 bfin_load_pic_reg (rtx dest)
869 {
870   struct cgraph_local_info *i = NULL;
871   rtx addr, insn;
872 
873   if (flag_unit_at_a_time)
874     i = cgraph_local_info (current_function_decl);
875 
876   /* Functions local to the translation unit don't need to reload the
877      pic reg, since the caller always passes a usable one.  */
878   if (i && i->local)
879     return pic_offset_table_rtx;
880 
881   if (bfin_lib_id_given)
882     addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
883   else
884     addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
885 			 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
886 					 UNSPEC_LIBRARY_OFFSET));
887   insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
888   REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
889   return dest;
890 }
891 
892 /* Generate RTL for the prologue of the current function.  */
893 
894 void
bfin_expand_prologue(void)895 bfin_expand_prologue (void)
896 {
897   rtx insn;
898   HOST_WIDE_INT frame_size = get_frame_size ();
899   rtx spreg = gen_rtx_REG (Pmode, REG_SP);
900   e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
901   rtx pic_reg_loaded = NULL_RTX;
902 
903   if (fkind != SUBROUTINE)
904     {
905       expand_interrupt_handler_prologue (spreg, fkind);
906       return;
907     }
908 
909   if (current_function_limit_stack)
910     {
911       HOST_WIDE_INT offset
912 	= bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
913 					   STACK_POINTER_REGNUM);
914       rtx lim = stack_limit_rtx;
915 
916       if (GET_CODE (lim) == SYMBOL_REF)
917 	{
918 	  rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
919 	  if (TARGET_ID_SHARED_LIBRARY)
920 	    {
921 	      rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
922 	      rtx val;
923 	      pic_reg_loaded = bfin_load_pic_reg (p2reg);
924 	      val = legitimize_pic_address (stack_limit_rtx, p1reg,
925 					    pic_reg_loaded);
926 	      emit_move_insn (p1reg, val);
927 	      frame_related_constant_load (p2reg, offset, FALSE);
928 	      emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
929 	      lim = p2reg;
930 	    }
931 	  else
932 	    {
933 	      rtx limit = plus_constant (stack_limit_rtx, offset);
934 	      emit_move_insn (p2reg, limit);
935 	      lim = p2reg;
936 	    }
937 	}
938       emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
939       emit_insn (gen_trapifcc ());
940     }
941   expand_prologue_reg_save (spreg, 0, false);
942 
943   do_link (spreg, frame_size, false);
944 
945   if (TARGET_ID_SHARED_LIBRARY
946       && (current_function_uses_pic_offset_table
947 	  || !current_function_is_leaf))
948     bfin_load_pic_reg (pic_offset_table_rtx);
949 }
950 
951 /* Generate RTL for the epilogue of the current function.  NEED_RETURN is zero
952    if this is for a sibcall.  EH_RETURN is nonzero if we're expanding an
953    eh_return pattern.  */
954 
955 void
bfin_expand_epilogue(int need_return,int eh_return)956 bfin_expand_epilogue (int need_return, int eh_return)
957 {
958   rtx spreg = gen_rtx_REG (Pmode, REG_SP);
959   e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
960 
961   if (fkind != SUBROUTINE)
962     {
963       expand_interrupt_handler_epilogue (spreg, fkind);
964       return;
965     }
966 
967   do_unlink (spreg, get_frame_size (), false);
968 
969   expand_epilogue_reg_restore (spreg, false, false);
970 
971   /* Omit the return insn if this is for a sibcall.  */
972   if (! need_return)
973     return;
974 
975   if (eh_return)
976     emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
977 
978   emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
979 }
980 
981 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG.  */
982 
983 int
bfin_hard_regno_rename_ok(unsigned int old_reg ATTRIBUTE_UNUSED,unsigned int new_reg)984 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
985 			   unsigned int new_reg)
986 {
987   /* Interrupt functions can only use registers that have already been
988      saved by the prologue, even if they would normally be
989      call-clobbered.  */
990 
991   if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
992       && !regs_ever_live[new_reg])
993     return 0;
994 
995   return 1;
996 }
997 
998 /* Return the value of the return address for the frame COUNT steps up
999    from the current frame, after the prologue.
1000    We punt for everything but the current frame by returning const0_rtx.  */
1001 
1002 rtx
bfin_return_addr_rtx(int count)1003 bfin_return_addr_rtx (int count)
1004 {
1005   if (count != 0)
1006     return const0_rtx;
1007 
1008   return get_hard_reg_initial_val (Pmode, REG_RETS);
1009 }
1010 
1011 /* Try machine-dependent ways of modifying an illegitimate address X
1012    to be legitimate.  If we find one, return the new, valid address,
1013    otherwise return NULL_RTX.
1014 
1015    OLDX is the address as it was before break_out_memory_refs was called.
1016    In some cases it is useful to look at this to decide what needs to be done.
1017 
1018    MODE is the mode of the memory reference.  */
1019 
1020 rtx
legitimize_address(rtx x ATTRIBUTE_UNUSED,rtx oldx ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED)1021 legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
1022 		    enum machine_mode mode ATTRIBUTE_UNUSED)
1023 {
1024   return NULL_RTX;
1025 }
1026 
1027 static rtx
bfin_delegitimize_address(rtx orig_x)1028 bfin_delegitimize_address (rtx orig_x)
1029 {
1030   rtx x = orig_x, y;
1031 
1032   if (GET_CODE (x) != MEM)
1033     return orig_x;
1034 
1035   x = XEXP (x, 0);
1036   if (GET_CODE (x) == PLUS
1037       && GET_CODE (XEXP (x, 1)) == UNSPEC
1038       && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1039       && GET_CODE (XEXP (x, 0)) == REG
1040       && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1041     return XVECEXP (XEXP (x, 1), 0, 0);
1042 
1043   return orig_x;
1044 }
1045 
1046 /* This predicate is used to compute the length of a load/store insn.
1047    OP is a MEM rtx, we return nonzero if its addressing mode requires a
1048    32 bit instruction.  */
1049 
1050 int
effective_address_32bit_p(rtx op,enum machine_mode mode)1051 effective_address_32bit_p (rtx op, enum machine_mode mode)
1052 {
1053   HOST_WIDE_INT offset;
1054 
1055   mode = GET_MODE (op);
1056   op = XEXP (op, 0);
1057 
1058   if (GET_CODE (op) != PLUS)
1059     {
1060       gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1061 		  || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1062       return 0;
1063     }
1064 
1065   offset = INTVAL (XEXP (op, 1));
1066 
1067   /* All byte loads use a 16 bit offset.  */
1068   if (GET_MODE_SIZE (mode) == 1)
1069     return 1;
1070 
1071   if (GET_MODE_SIZE (mode) == 4)
1072     {
1073       /* Frame pointer relative loads can use a negative offset, all others
1074 	 are restricted to a small positive one.  */
1075       if (XEXP (op, 0) == frame_pointer_rtx)
1076 	return offset < -128 || offset > 60;
1077       return offset < 0 || offset > 60;
1078     }
1079 
1080   /* Must be HImode now.  */
1081   return offset < 0 || offset > 30;
1082 }
1083 
1084 /* Return cost of the memory address ADDR.
1085    All addressing modes are equally cheap on the Blackfin.  */
1086 
1087 static int
bfin_address_cost(rtx addr ATTRIBUTE_UNUSED)1088 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
1089 {
1090   return 1;
1091 }
1092 
1093 /* Subroutine of print_operand; used to print a memory reference X to FILE.  */
1094 
1095 void
print_address_operand(FILE * file,rtx x)1096 print_address_operand (FILE *file, rtx x)
1097 {
1098   switch (GET_CODE (x))
1099     {
1100     case PLUS:
1101       output_address (XEXP (x, 0));
1102       fprintf (file, "+");
1103       output_address (XEXP (x, 1));
1104       break;
1105 
1106     case PRE_DEC:
1107       fprintf (file, "--");
1108       output_address (XEXP (x, 0));
1109       break;
1110     case POST_INC:
1111       output_address (XEXP (x, 0));
1112       fprintf (file, "++");
1113       break;
1114     case POST_DEC:
1115       output_address (XEXP (x, 0));
1116       fprintf (file, "--");
1117       break;
1118 
1119     default:
1120       gcc_assert (GET_CODE (x) != MEM);
1121       print_operand (file, x, 0);
1122       break;
1123     }
1124 }
1125 
1126 /* Adding intp DImode support by Tony
1127  * -- Q: (low  word)
1128  * -- R: (high word)
1129  */
1130 
1131 void
print_operand(FILE * file,rtx x,char code)1132 print_operand (FILE *file, rtx x, char code)
1133 {
1134   enum machine_mode mode = GET_MODE (x);
1135 
1136   switch (code)
1137     {
1138     case 'j':
1139       switch (GET_CODE (x))
1140 	{
1141 	case EQ:
1142 	  fprintf (file, "e");
1143 	  break;
1144 	case NE:
1145 	  fprintf (file, "ne");
1146 	  break;
1147 	case GT:
1148 	  fprintf (file, "g");
1149 	  break;
1150 	case LT:
1151 	  fprintf (file, "l");
1152 	  break;
1153 	case GE:
1154 	  fprintf (file, "ge");
1155 	  break;
1156 	case LE:
1157 	  fprintf (file, "le");
1158 	  break;
1159 	case GTU:
1160 	  fprintf (file, "g");
1161 	  break;
1162 	case LTU:
1163 	  fprintf (file, "l");
1164 	  break;
1165 	case GEU:
1166 	  fprintf (file, "ge");
1167 	  break;
1168 	case LEU:
1169 	  fprintf (file, "le");
1170 	  break;
1171 	default:
1172 	  output_operand_lossage ("invalid %%j value");
1173 	}
1174       break;
1175 
1176     case 'J':					 /* reverse logic */
1177       switch (GET_CODE(x))
1178 	{
1179 	case EQ:
1180 	  fprintf (file, "ne");
1181 	  break;
1182 	case NE:
1183 	  fprintf (file, "e");
1184 	  break;
1185 	case GT:
1186 	  fprintf (file, "le");
1187 	  break;
1188 	case LT:
1189 	  fprintf (file, "ge");
1190 	  break;
1191 	case GE:
1192 	  fprintf (file, "l");
1193 	  break;
1194 	case LE:
1195 	  fprintf (file, "g");
1196 	  break;
1197 	case GTU:
1198 	  fprintf (file, "le");
1199 	  break;
1200 	case LTU:
1201 	  fprintf (file, "ge");
1202 	  break;
1203 	case GEU:
1204 	  fprintf (file, "l");
1205 	  break;
1206 	case LEU:
1207 	  fprintf (file, "g");
1208 	  break;
1209 	default:
1210 	  output_operand_lossage ("invalid %%J value");
1211 	}
1212       break;
1213 
1214     default:
1215       switch (GET_CODE (x))
1216 	{
1217 	case REG:
1218 	  if (code == 'h')
1219 	    {
1220 	      gcc_assert (REGNO (x) < 32);
1221 	      fprintf (file, "%s", short_reg_names[REGNO (x)]);
1222 	      /*fprintf (file, "\n%d\n ", REGNO (x));*/
1223 	      break;
1224 	    }
1225 	  else if (code == 'd')
1226 	    {
1227 	      gcc_assert (REGNO (x) < 32);
1228 	      fprintf (file, "%s", high_reg_names[REGNO (x)]);
1229 	      break;
1230 	    }
1231 	  else if (code == 'w')
1232 	    {
1233 	      gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1234 	      fprintf (file, "%s.w", reg_names[REGNO (x)]);
1235 	    }
1236 	  else if (code == 'x')
1237 	    {
1238 	      gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1239 	      fprintf (file, "%s.x", reg_names[REGNO (x)]);
1240 	    }
1241 	  else if (code == 'D')
1242 	    {
1243 	      fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1244 	    }
1245 	  else if (code == 'H')
1246 	    {
1247 	      gcc_assert (mode == DImode || mode == DFmode);
1248 	      gcc_assert (REG_P (x));
1249 	      fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1250 	    }
1251 	  else if (code == 'T')
1252 	    {
1253 	      gcc_assert (D_REGNO_P (REGNO (x)));
1254 	      fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1255 	    }
1256 	  else
1257 	    fprintf (file, "%s", reg_names[REGNO (x)]);
1258 	  break;
1259 
1260 	case MEM:
1261 	  fputc ('[', file);
1262 	  x = XEXP (x,0);
1263 	  print_address_operand (file, x);
1264 	  fputc (']', file);
1265 	  break;
1266 
1267 	case CONST_INT:
1268 	  /* Moves to half registers with d or h modifiers always use unsigned
1269 	     constants.  */
1270 	  if (code == 'd')
1271 	    x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1272 	  else if (code == 'h')
1273 	    x = GEN_INT (INTVAL (x) & 0xffff);
1274 	  else if (code == 'X')
1275 	    x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1276 	  else if (code == 'Y')
1277 	    x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1278 	  else if (code == 'Z')
1279 	    /* Used for LINK insns.  */
1280 	    x = GEN_INT (-8 - INTVAL (x));
1281 
1282 	  /* fall through */
1283 
1284 	case SYMBOL_REF:
1285 	  output_addr_const (file, x);
1286 	  break;
1287 
1288 	case CONST_DOUBLE:
1289 	  output_operand_lossage ("invalid const_double operand");
1290 	  break;
1291 
1292 	case UNSPEC:
1293 	  switch (XINT (x, 1))
1294 	    {
1295 	    case UNSPEC_MOVE_PIC:
1296 	      output_addr_const (file, XVECEXP (x, 0, 0));
1297 	      fprintf (file, "@GOT");
1298 	      break;
1299 
1300 	    case UNSPEC_LIBRARY_OFFSET:
1301 	      fprintf (file, "_current_shared_library_p5_offset_");
1302 	      break;
1303 
1304 	    default:
1305 	      gcc_unreachable ();
1306 	    }
1307 	  break;
1308 
1309 	default:
1310 	  output_addr_const (file, x);
1311 	}
1312     }
1313 }
1314 
1315 /* Argument support functions.  */
1316 
1317 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1318    for a call to a function whose data type is FNTYPE.
1319    For a library call, FNTYPE is 0.
1320    VDSP C Compiler manual, our ABI says that
1321    first 3 words of arguments will use R0, R1 and R2.
1322 */
1323 
1324 void
init_cumulative_args(CUMULATIVE_ARGS * cum,tree fntype,rtx libname ATTRIBUTE_UNUSED)1325 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
1326 		      rtx libname ATTRIBUTE_UNUSED)
1327 {
1328   static CUMULATIVE_ARGS zero_cum;
1329 
1330   *cum = zero_cum;
1331 
1332   /* Set up the number of registers to use for passing arguments.  */
1333 
1334   cum->nregs = max_arg_registers;
1335   cum->arg_regs = arg_regs;
1336 
1337   cum->call_cookie = CALL_NORMAL;
1338   /* Check for a longcall attribute.  */
1339   if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1340     cum->call_cookie |= CALL_SHORT;
1341   else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1342     cum->call_cookie |= CALL_LONG;
1343 
1344   return;
1345 }
1346 
1347 /* Update the data in CUM to advance over an argument
1348    of mode MODE and data type TYPE.
1349    (TYPE is null for libcalls where that information may not be available.)  */
1350 
1351 void
function_arg_advance(CUMULATIVE_ARGS * cum,enum machine_mode mode,tree type,int named ATTRIBUTE_UNUSED)1352 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1353 		      int named ATTRIBUTE_UNUSED)
1354 {
1355   int count, bytes, words;
1356 
1357   bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1358   words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1359 
1360   cum->words += words;
1361   cum->nregs -= words;
1362 
1363   if (cum->nregs <= 0)
1364     {
1365       cum->nregs = 0;
1366       cum->arg_regs = NULL;
1367     }
1368   else
1369     {
1370       for (count = 1; count <= words; count++)
1371         cum->arg_regs++;
1372     }
1373 
1374   return;
1375 }
1376 
1377 /* Define where to put the arguments to a function.
1378    Value is zero to push the argument on the stack,
1379    or a hard register in which to store the argument.
1380 
1381    MODE is the argument's machine mode.
1382    TYPE is the data type of the argument (as a tree).
1383     This is null for libcalls where that information may
1384     not be available.
1385    CUM is a variable of type CUMULATIVE_ARGS which gives info about
1386     the preceding args and about the function being called.
1387    NAMED is nonzero if this argument is a named parameter
1388     (otherwise it is an extra parameter matching an ellipsis).  */
1389 
1390 struct rtx_def *
function_arg(CUMULATIVE_ARGS * cum,enum machine_mode mode,tree type,int named ATTRIBUTE_UNUSED)1391 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1392 	      int named ATTRIBUTE_UNUSED)
1393 {
1394   int bytes
1395     = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1396 
1397   if (mode == VOIDmode)
1398     /* Compute operand 2 of the call insn.  */
1399     return GEN_INT (cum->call_cookie);
1400 
1401   if (bytes == -1)
1402     return NULL_RTX;
1403 
1404   if (cum->nregs)
1405     return gen_rtx_REG (mode, *(cum->arg_regs));
1406 
1407   return NULL_RTX;
1408 }
1409 
1410 /* For an arg passed partly in registers and partly in memory,
1411    this is the number of bytes passed in registers.
1412    For args passed entirely in registers or entirely in memory, zero.
1413 
1414    Refer VDSP C Compiler manual, our ABI.
1415    First 3 words are in registers. So, if a an argument is larger
1416    than the registers available, it will span the register and
1417    stack.   */
1418 
1419 static int
bfin_arg_partial_bytes(CUMULATIVE_ARGS * cum,enum machine_mode mode,tree type ATTRIBUTE_UNUSED,bool named ATTRIBUTE_UNUSED)1420 bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1421 			tree type ATTRIBUTE_UNUSED,
1422 			bool named ATTRIBUTE_UNUSED)
1423 {
1424   int bytes
1425     = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1426   int bytes_left = cum->nregs * UNITS_PER_WORD;
1427 
1428   if (bytes == -1)
1429     return 0;
1430 
1431   if (bytes_left == 0)
1432     return 0;
1433   if (bytes > bytes_left)
1434     return bytes_left;
1435   return 0;
1436 }
1437 
1438 /* Variable sized types are passed by reference.  */
1439 
1440 static bool
bfin_pass_by_reference(CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED,tree type,bool named ATTRIBUTE_UNUSED)1441 bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1442 			enum machine_mode mode ATTRIBUTE_UNUSED,
1443 			tree type, bool named ATTRIBUTE_UNUSED)
1444 {
1445   return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1446 }
1447 
1448 /* Decide whether a type should be returned in memory (true)
1449    or in a register (false).  This is called by the macro
1450    RETURN_IN_MEMORY.  */
1451 
1452 int
bfin_return_in_memory(tree type)1453 bfin_return_in_memory (tree type)
1454 {
1455   int size = int_size_in_bytes (type);
1456   return size > 2 * UNITS_PER_WORD || size == -1;
1457 }
1458 
1459 /* Register in which address to store a structure value
1460    is passed to a function.  */
1461 static rtx
bfin_struct_value_rtx(tree fntype ATTRIBUTE_UNUSED,int incoming ATTRIBUTE_UNUSED)1462 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1463 		      int incoming ATTRIBUTE_UNUSED)
1464 {
1465   return gen_rtx_REG (Pmode, REG_P0);
1466 }
1467 
1468 /* Return true when register may be used to pass function parameters.  */
1469 
1470 bool
function_arg_regno_p(int n)1471 function_arg_regno_p (int n)
1472 {
1473   int i;
1474   for (i = 0; arg_regs[i] != -1; i++)
1475     if (n == arg_regs[i])
1476       return true;
1477   return false;
1478 }
1479 
1480 /* Returns 1 if OP contains a symbol reference */
1481 
1482 int
symbolic_reference_mentioned_p(rtx op)1483 symbolic_reference_mentioned_p (rtx op)
1484 {
1485   register const char *fmt;
1486   register int i;
1487 
1488   if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1489     return 1;
1490 
1491   fmt = GET_RTX_FORMAT (GET_CODE (op));
1492   for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1493     {
1494       if (fmt[i] == 'E')
1495 	{
1496 	  register int j;
1497 
1498 	  for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1499 	    if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1500 	      return 1;
1501 	}
1502 
1503       else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1504 	return 1;
1505     }
1506 
1507   return 0;
1508 }
1509 
1510 /* Decide whether we can make a sibling call to a function.  DECL is the
1511    declaration of the function being targeted by the call and EXP is the
1512    CALL_EXPR representing the call.  */
1513 
1514 static bool
bfin_function_ok_for_sibcall(tree decl ATTRIBUTE_UNUSED,tree exp ATTRIBUTE_UNUSED)1515 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1516 			      tree exp ATTRIBUTE_UNUSED)
1517 {
1518   e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1519   return fkind == SUBROUTINE;
1520 }
1521 
1522 /* Emit RTL insns to initialize the variable parts of a trampoline at
1523    TRAMP. FNADDR is an RTX for the address of the function's pure
1524    code.  CXT is an RTX for the static chain value for the function.  */
1525 
1526 void
initialize_trampoline(tramp,fnaddr,cxt)1527 initialize_trampoline (tramp, fnaddr, cxt)
1528      rtx tramp, fnaddr, cxt;
1529 {
1530   rtx t1 = copy_to_reg (fnaddr);
1531   rtx t2 = copy_to_reg (cxt);
1532   rtx addr;
1533 
1534   addr = memory_address (Pmode, plus_constant (tramp, 2));
1535   emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1536   emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1537   addr = memory_address (Pmode, plus_constant (tramp, 6));
1538   emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1539 
1540   addr = memory_address (Pmode, plus_constant (tramp, 10));
1541   emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1542   emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1543   addr = memory_address (Pmode, plus_constant (tramp, 14));
1544   emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1545 }
1546 
1547 /* Emit insns to move operands[1] into operands[0].  */
1548 
1549 void
emit_pic_move(rtx * operands,enum machine_mode mode ATTRIBUTE_UNUSED)1550 emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1551 {
1552   rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1553 
1554   if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1555     operands[1] = force_reg (SImode, operands[1]);
1556   else
1557     operands[1] = legitimize_pic_address (operands[1], temp,
1558 					  pic_offset_table_rtx);
1559 }
1560 
1561 /* Expand a move operation in mode MODE.  The operands are in OPERANDS.  */
1562 
1563 void
expand_move(rtx * operands,enum machine_mode mode)1564 expand_move (rtx *operands, enum machine_mode mode)
1565 {
1566   if (flag_pic && SYMBOLIC_CONST (operands[1]))
1567     emit_pic_move (operands, mode);
1568 
1569   /* Don't generate memory->memory or constant->memory moves, go through a
1570      register */
1571   else if ((reload_in_progress | reload_completed) == 0
1572 	   && GET_CODE (operands[0]) == MEM
1573     	   && GET_CODE (operands[1]) != REG)
1574     operands[1] = force_reg (mode, operands[1]);
1575 }
1576 
1577 /* Split one or more DImode RTL references into pairs of SImode
1578    references.  The RTL can be REG, offsettable MEM, integer constant, or
1579    CONST_DOUBLE.  "operands" is a pointer to an array of DImode RTL to
1580    split and "num" is its length.  lo_half and hi_half are output arrays
1581    that parallel "operands".  */
1582 
1583 void
split_di(rtx operands[],int num,rtx lo_half[],rtx hi_half[])1584 split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1585 {
1586   while (num--)
1587     {
1588       rtx op = operands[num];
1589 
1590       /* simplify_subreg refuse to split volatile memory addresses,
1591          but we still have to handle it.  */
1592       if (GET_CODE (op) == MEM)
1593 	{
1594 	  lo_half[num] = adjust_address (op, SImode, 0);
1595 	  hi_half[num] = adjust_address (op, SImode, 4);
1596 	}
1597       else
1598 	{
1599 	  lo_half[num] = simplify_gen_subreg (SImode, op,
1600 					      GET_MODE (op) == VOIDmode
1601 					      ? DImode : GET_MODE (op), 0);
1602 	  hi_half[num] = simplify_gen_subreg (SImode, op,
1603 					      GET_MODE (op) == VOIDmode
1604 					      ? DImode : GET_MODE (op), 4);
1605 	}
1606     }
1607 }
1608 
1609 bool
bfin_longcall_p(rtx op,int call_cookie)1610 bfin_longcall_p (rtx op, int call_cookie)
1611 {
1612   gcc_assert (GET_CODE (op) == SYMBOL_REF);
1613   if (call_cookie & CALL_SHORT)
1614     return 0;
1615   if (call_cookie & CALL_LONG)
1616     return 1;
1617   if (TARGET_LONG_CALLS)
1618     return 1;
1619   return 0;
1620 }
1621 
1622 /* Expand a call instruction.  FNADDR is the call target, RETVAL the return value.
1623    COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
1624    SIBCALL is nonzero if this is a sibling call.  */
1625 
1626 void
bfin_expand_call(rtx retval,rtx fnaddr,rtx callarg1,rtx cookie,int sibcall)1627 bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
1628 {
1629   rtx use = NULL, call;
1630   rtx callee = XEXP (fnaddr, 0);
1631   rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (sibcall ? 3 : 2));
1632 
1633   /* In an untyped call, we can get NULL for operand 2.  */
1634   if (cookie == NULL_RTX)
1635     cookie = const0_rtx;
1636 
1637   /* Static functions and indirect calls don't need the pic register.  */
1638   if (flag_pic
1639       && GET_CODE (callee) == SYMBOL_REF
1640       && !SYMBOL_REF_LOCAL_P (callee))
1641     use_reg (&use, pic_offset_table_rtx);
1642 
1643   if ((!register_no_elim_operand (callee, Pmode)
1644        && GET_CODE (callee) != SYMBOL_REF)
1645       || (GET_CODE (callee) == SYMBOL_REF
1646 	  && (flag_pic
1647 	      || bfin_longcall_p (callee, INTVAL (cookie)))))
1648     {
1649       callee = copy_to_mode_reg (Pmode, callee);
1650       fnaddr = gen_rtx_MEM (Pmode, callee);
1651     }
1652   call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
1653 
1654   if (retval)
1655     call = gen_rtx_SET (VOIDmode, retval, call);
1656 
1657   XVECEXP (pat, 0, 0) = call;
1658   XVECEXP (pat, 0, 1) = gen_rtx_USE (VOIDmode, cookie);
1659   if (sibcall)
1660     XVECEXP (pat, 0, 2) = gen_rtx_RETURN (VOIDmode);
1661   call = emit_call_insn (pat);
1662   if (use)
1663     CALL_INSN_FUNCTION_USAGE (call) = use;
1664 }
1665 
1666 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE.  */
1667 
1668 int
hard_regno_mode_ok(int regno,enum machine_mode mode)1669 hard_regno_mode_ok (int regno, enum machine_mode mode)
1670 {
1671   /* Allow only dregs to store value of mode HI or QI */
1672   enum reg_class class = REGNO_REG_CLASS (regno);
1673 
1674   if (mode == CCmode)
1675     return 0;
1676 
1677   if (mode == V2HImode)
1678     return D_REGNO_P (regno);
1679   if (class == CCREGS)
1680     return mode == BImode;
1681   if (mode == PDImode)
1682     return regno == REG_A0 || regno == REG_A1;
1683   if (mode == SImode
1684       && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
1685     return 1;
1686 
1687   return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
1688 }
1689 
1690 /* Implements target hook vector_mode_supported_p.  */
1691 
1692 static bool
bfin_vector_mode_supported_p(enum machine_mode mode)1693 bfin_vector_mode_supported_p (enum machine_mode mode)
1694 {
1695   return mode == V2HImode;
1696 }
1697 
1698 /* Return the cost of moving data from a register in class CLASS1 to
1699    one in class CLASS2.  A cost of 2 is the default.  */
1700 
1701 int
bfin_register_move_cost(enum machine_mode mode ATTRIBUTE_UNUSED,enum reg_class class1,enum reg_class class2)1702 bfin_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1703 			 enum reg_class class1, enum reg_class class2)
1704 {
1705   /* If optimizing for size, always prefer reg-reg over reg-memory moves.  */
1706   if (optimize_size)
1707     return 2;
1708 
1709   /* There are some stalls involved when moving from a DREG to a different
1710      class reg, and using the value in one of the following instructions.
1711      Attempt to model this by slightly discouraging such moves.  */
1712   if (class1 == DREGS && class2 != DREGS)
1713     return 2 * 2;
1714 
1715   return 2;
1716 }
1717 
1718 /* Return the cost of moving data of mode M between a
1719    register and memory.  A value of 2 is the default; this cost is
1720    relative to those in `REGISTER_MOVE_COST'.
1721 
1722    ??? In theory L1 memory has single-cycle latency.  We should add a switch
1723    that tells the compiler whether we expect to use only L1 memory for the
1724    program; it'll make the costs more accurate.  */
1725 
1726 int
bfin_memory_move_cost(enum machine_mode mode ATTRIBUTE_UNUSED,enum reg_class class,int in ATTRIBUTE_UNUSED)1727 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1728 		       enum reg_class class,
1729 		       int in ATTRIBUTE_UNUSED)
1730 {
1731   /* Make memory accesses slightly more expensive than any register-register
1732      move.  Also, penalize non-DP registers, since they need secondary
1733      reloads to load and store.  */
1734   if (! reg_class_subset_p (class, DPREGS))
1735     return 10;
1736 
1737   return 8;
1738 }
1739 
1740 /* Inform reload about cases where moving X with a mode MODE to a register in
1741    CLASS requires an extra scratch register.  Return the class needed for the
1742    scratch register.  */
1743 
1744 enum reg_class
secondary_input_reload_class(enum reg_class class,enum machine_mode mode,rtx x)1745 secondary_input_reload_class (enum reg_class class, enum machine_mode mode,
1746 			      rtx x)
1747 {
1748   /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1749      in most other cases we can also use PREGS.  */
1750   enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
1751   enum reg_class x_class = NO_REGS;
1752   enum rtx_code code = GET_CODE (x);
1753 
1754   if (code == SUBREG)
1755     x = SUBREG_REG (x), code = GET_CODE (x);
1756   if (REG_P (x))
1757     {
1758       int regno = REGNO (x);
1759       if (regno >= FIRST_PSEUDO_REGISTER)
1760 	regno = reg_renumber[regno];
1761 
1762       if (regno == -1)
1763 	code = MEM;
1764       else
1765 	x_class = REGNO_REG_CLASS (regno);
1766     }
1767 
1768   /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1769      This happens as a side effect of register elimination, and we need
1770      a scratch register to do it.  */
1771   if (fp_plus_const_operand (x, mode))
1772     {
1773       rtx op2 = XEXP (x, 1);
1774       int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
1775 
1776       if (class == PREGS || class == PREGS_CLOBBERED)
1777 	return NO_REGS;
1778       /* If destination is a DREG, we can do this without a scratch register
1779 	 if the constant is valid for an add instruction.  */
1780       if (class == DREGS || class == DPREGS)
1781 	return large_constant_p ? PREGS : NO_REGS;
1782       /* Reloading to anything other than a DREG?  Use a PREG scratch
1783 	 register.  */
1784       return PREGS;
1785     }
1786 
1787   /* Data can usually be moved freely between registers of most classes.
1788      AREGS are an exception; they can only move to or from another register
1789      in AREGS or one in DREGS.  They can also be assigned the constant 0.  */
1790   if (x_class == AREGS)
1791     return class == DREGS || class == AREGS ? NO_REGS : DREGS;
1792 
1793   if (class == AREGS)
1794     {
1795       if (x != const0_rtx && x_class != DREGS)
1796 	return DREGS;
1797       else
1798 	return NO_REGS;
1799     }
1800 
1801   /* CCREGS can only be moved from/to DREGS.  */
1802   if (class == CCREGS && x_class != DREGS)
1803     return DREGS;
1804   if (x_class == CCREGS && class != DREGS)
1805     return DREGS;
1806   /* All registers other than AREGS can load arbitrary constants.  The only
1807      case that remains is MEM.  */
1808   if (code == MEM)
1809     if (! reg_class_subset_p (class, default_class))
1810       return default_class;
1811   return NO_REGS;
1812 }
1813 
1814 /* Like secondary_input_reload_class; and all we do is call that function.  */
1815 
1816 enum reg_class
secondary_output_reload_class(enum reg_class class,enum machine_mode mode,rtx x)1817 secondary_output_reload_class (enum reg_class class, enum machine_mode mode,
1818 			       rtx x)
1819 {
1820   return secondary_input_reload_class (class, mode, x);
1821 }
1822 
1823 /* Implement TARGET_HANDLE_OPTION.  */
1824 
1825 static bool
bfin_handle_option(size_t code,const char * arg,int value)1826 bfin_handle_option (size_t code, const char *arg, int value)
1827 {
1828   switch (code)
1829     {
1830     case OPT_mshared_library_id_:
1831       if (value > MAX_LIBRARY_ID)
1832 	error ("-mshared-library-id=%s is not between 0 and %d",
1833 	       arg, MAX_LIBRARY_ID);
1834       bfin_lib_id_given = 1;
1835       return true;
1836 
1837     default:
1838       return true;
1839     }
1840 }
1841 
1842 /* Implement the macro OVERRIDE_OPTIONS.  */
1843 
1844 void
override_options(void)1845 override_options (void)
1846 {
1847   if (TARGET_OMIT_LEAF_FRAME_POINTER)
1848     flag_omit_frame_pointer = 1;
1849 
1850   /* Library identification */
1851   if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
1852     error ("-mshared-library-id= specified without -mid-shared-library");
1853 
1854   if (TARGET_ID_SHARED_LIBRARY)
1855     /* ??? Provide a way to use a bigger GOT.  */
1856     flag_pic = 1;
1857 
1858   flag_schedule_insns = 0;
1859 }
1860 
1861 /* Return the destination address of BRANCH.
1862    We need to use this instead of get_attr_length, because the
1863    cbranch_with_nops pattern conservatively sets its length to 6, and
1864    we still prefer to use shorter sequences.  */
1865 
1866 static int
branch_dest(rtx branch)1867 branch_dest (rtx branch)
1868 {
1869   rtx dest;
1870   int dest_uid;
1871   rtx pat = PATTERN (branch);
1872   if (GET_CODE (pat) == PARALLEL)
1873     pat = XVECEXP (pat, 0, 0);
1874   dest = SET_SRC (pat);
1875   if (GET_CODE (dest) == IF_THEN_ELSE)
1876     dest = XEXP (dest, 1);
1877   dest = XEXP (dest, 0);
1878   dest_uid = INSN_UID (dest);
1879   return INSN_ADDRESSES (dest_uid);
1880 }
1881 
1882 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
1883    it's a branch that's predicted taken.  */
1884 
1885 static int
cbranch_predicted_taken_p(rtx insn)1886 cbranch_predicted_taken_p (rtx insn)
1887 {
1888   rtx x = find_reg_note (insn, REG_BR_PROB, 0);
1889 
1890   if (x)
1891     {
1892       int pred_val = INTVAL (XEXP (x, 0));
1893 
1894       return pred_val >= REG_BR_PROB_BASE / 2;
1895     }
1896 
1897   return 0;
1898 }
1899 
1900 /* Templates for use by asm_conditional_branch.  */
1901 
1902 static const char *ccbranch_templates[][3] = {
1903   { "if !cc jump %3;",  "if cc jump 4 (bp); jump.s %3;",  "if cc jump 6 (bp); jump.l %3;" },
1904   { "if cc jump %3;",   "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
1905   { "if !cc jump %3 (bp);",  "if cc jump 4; jump.s %3;",  "if cc jump 6; jump.l %3;" },
1906   { "if cc jump %3 (bp);",  "if !cc jump 4; jump.s %3;",  "if !cc jump 6; jump.l %3;" },
1907 };
1908 
1909 /* Output INSN, which is a conditional branch instruction with operands
1910    OPERANDS.
1911 
1912    We deal with the various forms of conditional branches that can be generated
1913    by bfin_reorg to prevent the hardware from doing speculative loads, by
1914    - emitting a sufficient number of nops, if N_NOPS is nonzero, or
1915    - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
1916    Either of these is only necessary if the branch is short, otherwise the
1917    template we use ends in an unconditional jump which flushes the pipeline
1918    anyway.  */
1919 
1920 void
asm_conditional_branch(rtx insn,rtx * operands,int n_nops,int predict_taken)1921 asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
1922 {
1923   int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
1924   /* Note : offset for instructions like if cc jmp; jump.[sl] offset
1925             is to be taken from start of if cc rather than jump.
1926             Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
1927   */
1928   int len = (offset >= -1024 && offset <= 1022 ? 0
1929 	     : offset >= -4094 && offset <= 4096 ? 1
1930 	     : 2);
1931   int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
1932   int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
1933   output_asm_insn (ccbranch_templates[idx][len], operands);
1934   gcc_assert (n_nops == 0 || !bp);
1935   if (len == 0)
1936     while (n_nops-- > 0)
1937       output_asm_insn ("nop;", NULL);
1938 }
1939 
1940 /* Emit rtl for a comparison operation CMP in mode MODE.  Operands have been
1941    stored in bfin_compare_op0 and bfin_compare_op1 already.  */
1942 
1943 rtx
bfin_gen_compare(rtx cmp,enum machine_mode mode ATTRIBUTE_UNUSED)1944 bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
1945 {
1946   enum rtx_code code1, code2;
1947   rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
1948   rtx tem = bfin_cc_rtx;
1949   enum rtx_code code = GET_CODE (cmp);
1950 
1951   /* If we have a BImode input, then we already have a compare result, and
1952      do not need to emit another comparison.  */
1953   if (GET_MODE (op0) == BImode)
1954     {
1955       gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
1956       tem = op0, code2 = code;
1957     }
1958   else
1959     {
1960       switch (code) {
1961 	/* bfin has these conditions */
1962       case EQ:
1963       case LT:
1964       case LE:
1965       case LEU:
1966       case LTU:
1967 	code1 = code;
1968 	code2 = NE;
1969 	break;
1970       default:
1971 	code1 = reverse_condition (code);
1972 	code2 = EQ;
1973 	break;
1974       }
1975       emit_insn (gen_rtx_SET (BImode, tem,
1976 			      gen_rtx_fmt_ee (code1, BImode, op0, op1)));
1977     }
1978 
1979   return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
1980 }
1981 
1982 /* Return nonzero iff C has exactly one bit set if it is interpreted
1983    as a 32 bit constant.  */
1984 
1985 int
log2constp(unsigned HOST_WIDE_INT c)1986 log2constp (unsigned HOST_WIDE_INT c)
1987 {
1988   c &= 0xFFFFFFFF;
1989   return c != 0 && (c & (c-1)) == 0;
1990 }
1991 
1992 /* Returns the number of consecutive least significant zeros in the binary
1993    representation of *V.
1994    We modify *V to contain the original value arithmetically shifted right by
1995    the number of zeroes.  */
1996 
1997 static int
shiftr_zero(HOST_WIDE_INT * v)1998 shiftr_zero (HOST_WIDE_INT *v)
1999 {
2000   unsigned HOST_WIDE_INT tmp = *v;
2001   unsigned HOST_WIDE_INT sgn;
2002   int n = 0;
2003 
2004   if (tmp == 0)
2005     return 0;
2006 
2007   sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2008   while ((tmp & 0x1) == 0 && n <= 32)
2009     {
2010       tmp = (tmp >> 1) | sgn;
2011       n++;
2012     }
2013   *v = tmp;
2014   return n;
2015 }
2016 
2017 /* After reload, split the load of an immediate constant.  OPERANDS are the
2018    operands of the movsi_insn pattern which we are splitting.  We return
2019    nonzero if we emitted a sequence to load the constant, zero if we emitted
2020    nothing because we want to use the splitter's default sequence.  */
2021 
2022 int
split_load_immediate(rtx operands[])2023 split_load_immediate (rtx operands[])
2024 {
2025   HOST_WIDE_INT val = INTVAL (operands[1]);
2026   HOST_WIDE_INT tmp;
2027   HOST_WIDE_INT shifted = val;
2028   HOST_WIDE_INT shifted_compl = ~val;
2029   int num_zero = shiftr_zero (&shifted);
2030   int num_compl_zero = shiftr_zero (&shifted_compl);
2031   unsigned int regno = REGNO (operands[0]);
2032   enum reg_class class1 = REGNO_REG_CLASS (regno);
2033 
2034   /* This case takes care of single-bit set/clear constants, which we could
2035      also implement with BITSET/BITCLR.  */
2036   if (num_zero
2037       && shifted >= -32768 && shifted < 65536
2038       && (D_REGNO_P (regno)
2039 	  || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2040     {
2041       emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2042       emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2043       return 1;
2044     }
2045 
2046   tmp = val & 0xFFFF;
2047   tmp |= -(tmp & 0x8000);
2048 
2049   /* If high word has one bit set or clear, try to use a bit operation.  */
2050   if (D_REGNO_P (regno))
2051     {
2052       if (log2constp (val & 0xFFFF0000))
2053 	{
2054 	  emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2055 	  emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2056 	  return 1;
2057 	}
2058       else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2059 	{
2060 	  emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2061 	  emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2062 	}
2063     }
2064 
2065   if (D_REGNO_P (regno))
2066     {
2067       if (CONST_7BIT_IMM_P (tmp))
2068 	{
2069 	  emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2070 	  emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2071 	  return 1;
2072 	}
2073 
2074       if ((val & 0xFFFF0000) == 0)
2075 	{
2076 	  emit_insn (gen_movsi (operands[0], const0_rtx));
2077 	  emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2078 	  return 1;
2079 	}
2080 
2081       if ((val & 0xFFFF0000) == 0xFFFF0000)
2082 	{
2083 	  emit_insn (gen_movsi (operands[0], constm1_rtx));
2084 	  emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2085 	  return 1;
2086 	}
2087     }
2088 
2089   /* Need DREGs for the remaining case.  */
2090   if (regno > REG_R7)
2091     return 0;
2092 
2093   if (optimize_size
2094       && num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
2095     {
2096       /* If optimizing for size, generate a sequence that has more instructions
2097 	 but is shorter.  */
2098       emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2099       emit_insn (gen_ashlsi3 (operands[0], operands[0],
2100 			      GEN_INT (num_compl_zero)));
2101       emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2102       return 1;
2103     }
2104   return 0;
2105 }
2106 
2107 /* Return true if the legitimate memory address for a memory operand of mode
2108    MODE.  Return false if not.  */
2109 
2110 static bool
bfin_valid_add(enum machine_mode mode,HOST_WIDE_INT value)2111 bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2112 {
2113   unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2114   int sz = GET_MODE_SIZE (mode);
2115   int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2116   /* The usual offsettable_memref machinery doesn't work so well for this
2117      port, so we deal with the problem here.  */
2118   unsigned HOST_WIDE_INT mask = sz == 8 ? 0x7ffe : 0x7fff;
2119   return (v & ~(mask << shift)) == 0;
2120 }
2121 
2122 static bool
bfin_valid_reg_p(unsigned int regno,int strict)2123 bfin_valid_reg_p (unsigned int regno, int strict)
2124 {
2125   return ((strict && REGNO_OK_FOR_BASE_STRICT_P (regno))
2126 	  || (!strict && REGNO_OK_FOR_BASE_NONSTRICT_P (regno)));
2127 }
2128 
2129 bool
bfin_legitimate_address_p(enum machine_mode mode,rtx x,int strict)2130 bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2131 {
2132   switch (GET_CODE (x)) {
2133   case REG:
2134     if (bfin_valid_reg_p (REGNO (x), strict))
2135       return true;
2136     break;
2137   case PLUS:
2138     if (REG_P (XEXP (x, 0))
2139 	&& bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict)
2140 	&& (GET_CODE (XEXP (x, 1)) == UNSPEC
2141 	    || (GET_CODE (XEXP (x, 1)) == CONST_INT
2142 		&& bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2143       return true;
2144     break;
2145   case POST_INC:
2146   case POST_DEC:
2147     if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2148 	&& REG_P (XEXP (x, 0))
2149 	&& bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict))
2150       return true;
2151   case PRE_DEC:
2152     if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2153 	&& XEXP (x, 0) == stack_pointer_rtx
2154 	&& REG_P (XEXP (x, 0))
2155 	&& bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict))
2156       return true;
2157     break;
2158   default:
2159     break;
2160   }
2161   return false;
2162 }
2163 
2164 static bool
bfin_rtx_costs(rtx x,int code,int outer_code,int * total)2165 bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
2166 {
2167   int cost2 = COSTS_N_INSNS (1);
2168 
2169   switch (code)
2170     {
2171     case CONST_INT:
2172       if (outer_code == SET || outer_code == PLUS)
2173         *total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
2174       else if (outer_code == AND)
2175         *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2176       else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2177         *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2178       else if (outer_code == LEU || outer_code == LTU)
2179         *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2180       else if (outer_code == MULT)
2181         *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2182       else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2183         *total = 0;
2184       else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2185 	       || outer_code == LSHIFTRT)
2186         *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2187       else if (outer_code == IOR || outer_code == XOR)
2188         *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2189       else
2190 	*total = cost2;
2191       return true;
2192 
2193     case CONST:
2194     case LABEL_REF:
2195     case SYMBOL_REF:
2196     case CONST_DOUBLE:
2197       *total = COSTS_N_INSNS (2);
2198       return true;
2199 
2200     case PLUS:
2201       if (GET_MODE (x) == Pmode)
2202 	{
2203 	  if (GET_CODE (XEXP (x, 0)) == MULT
2204 	      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2205 	    {
2206 	      HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
2207 	      if (val == 2 || val == 4)
2208 		{
2209 		  *total = cost2;
2210 		  *total += rtx_cost (XEXP (XEXP (x, 0), 0), outer_code);
2211 		  *total += rtx_cost (XEXP (x, 1), outer_code);
2212 		  return true;
2213 		}
2214 	    }
2215 	}
2216 
2217       /* fall through */
2218 
2219     case MINUS:
2220     case ASHIFT:
2221     case ASHIFTRT:
2222     case LSHIFTRT:
2223       if (GET_MODE (x) == DImode)
2224 	*total = 6 * cost2;
2225       return false;
2226 
2227     case AND:
2228     case IOR:
2229     case XOR:
2230       if (GET_MODE (x) == DImode)
2231 	*total = 2 * cost2;
2232       return false;
2233 
2234     case MULT:
2235       if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD)
2236 	*total = COSTS_N_INSNS (3);
2237       return false;
2238 
2239     default:
2240       return false;
2241     }
2242 }
2243 
2244 static void
bfin_internal_label(FILE * stream,const char * prefix,unsigned long num)2245 bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
2246 {
2247   fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
2248 }
2249 
2250 /* Used for communication between {push,pop}_multiple_operation (which
2251    we use not only as a predicate) and the corresponding output functions.  */
2252 static int first_preg_to_save, first_dreg_to_save;
2253 
2254 int
push_multiple_operation(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2255 push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2256 {
2257   int lastdreg = 8, lastpreg = 6;
2258   int i, group;
2259 
2260   first_preg_to_save = lastpreg;
2261   first_dreg_to_save = lastdreg;
2262   for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2263     {
2264       rtx t = XVECEXP (op, 0, i);
2265       rtx src, dest;
2266       int regno;
2267 
2268       if (GET_CODE (t) != SET)
2269 	return 0;
2270 
2271       src = SET_SRC (t);
2272       dest = SET_DEST (t);
2273       if (GET_CODE (dest) != MEM || ! REG_P (src))
2274 	return 0;
2275       dest = XEXP (dest, 0);
2276       if (GET_CODE (dest) != PLUS
2277 	  || ! REG_P (XEXP (dest, 0))
2278 	  || REGNO (XEXP (dest, 0)) != REG_SP
2279 	  || GET_CODE (XEXP (dest, 1)) != CONST_INT
2280 	  || INTVAL (XEXP (dest, 1)) != -i * 4)
2281 	return 0;
2282 
2283       regno = REGNO (src);
2284       if (group == 0)
2285 	{
2286 	  if (D_REGNO_P (regno))
2287 	    {
2288 	      group = 1;
2289 	      first_dreg_to_save = lastdreg = regno - REG_R0;
2290 	    }
2291 	  else if (regno >= REG_P0 && regno <= REG_P7)
2292 	    {
2293 	      group = 2;
2294 	      first_preg_to_save = lastpreg = regno - REG_P0;
2295 	    }
2296 	  else
2297 	    return 0;
2298 
2299 	  continue;
2300 	}
2301 
2302       if (group == 1)
2303 	{
2304 	  if (regno >= REG_P0 && regno <= REG_P7)
2305 	    {
2306 	      group = 2;
2307 	      first_preg_to_save = lastpreg = regno - REG_P0;
2308 	    }
2309 	  else if (regno != REG_R0 + lastdreg + 1)
2310 	    return 0;
2311 	  else
2312 	    lastdreg++;
2313 	}
2314       else if (group == 2)
2315 	{
2316 	  if (regno != REG_P0 + lastpreg + 1)
2317 	    return 0;
2318 	  lastpreg++;
2319 	}
2320     }
2321   return 1;
2322 }
2323 
2324 int
pop_multiple_operation(rtx op,enum machine_mode mode ATTRIBUTE_UNUSED)2325 pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2326 {
2327   int lastdreg = 8, lastpreg = 6;
2328   int i, group;
2329 
2330   for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
2331     {
2332       rtx t = XVECEXP (op, 0, i);
2333       rtx src, dest;
2334       int regno;
2335 
2336       if (GET_CODE (t) != SET)
2337 	return 0;
2338 
2339       src = SET_SRC (t);
2340       dest = SET_DEST (t);
2341       if (GET_CODE (src) != MEM || ! REG_P (dest))
2342 	return 0;
2343       src = XEXP (src, 0);
2344 
2345       if (i == 1)
2346 	{
2347 	  if (! REG_P (src) || REGNO (src) != REG_SP)
2348 	    return 0;
2349 	}
2350       else if (GET_CODE (src) != PLUS
2351 	       || ! REG_P (XEXP (src, 0))
2352 	       || REGNO (XEXP (src, 0)) != REG_SP
2353 	       || GET_CODE (XEXP (src, 1)) != CONST_INT
2354 	       || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
2355 	return 0;
2356 
2357       regno = REGNO (dest);
2358       if (group == 0)
2359 	{
2360 	  if (regno == REG_R7)
2361 	    {
2362 	      group = 1;
2363 	      lastdreg = 7;
2364 	    }
2365 	  else if (regno != REG_P0 + lastpreg - 1)
2366 	    return 0;
2367 	  else
2368 	    lastpreg--;
2369 	}
2370       else if (group == 1)
2371 	{
2372 	  if (regno != REG_R0 + lastdreg - 1)
2373 	    return 0;
2374 	  else
2375 	    lastdreg--;
2376 	}
2377     }
2378   first_dreg_to_save = lastdreg;
2379   first_preg_to_save = lastpreg;
2380   return 1;
2381 }
2382 
2383 /* Emit assembly code for one multi-register push described by INSN, with
2384    operands in OPERANDS.  */
2385 
2386 void
output_push_multiple(rtx insn,rtx * operands)2387 output_push_multiple (rtx insn, rtx *operands)
2388 {
2389   char buf[80];
2390   int ok;
2391 
2392   /* Validate the insn again, and compute first_[dp]reg_to_save. */
2393   ok = push_multiple_operation (PATTERN (insn), VOIDmode);
2394   gcc_assert (ok);
2395 
2396   if (first_dreg_to_save == 8)
2397     sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
2398   else if (first_preg_to_save == 6)
2399     sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
2400   else
2401     sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
2402 	     first_dreg_to_save, first_preg_to_save);
2403 
2404   output_asm_insn (buf, operands);
2405 }
2406 
2407 /* Emit assembly code for one multi-register pop described by INSN, with
2408    operands in OPERANDS.  */
2409 
2410 void
output_pop_multiple(rtx insn,rtx * operands)2411 output_pop_multiple (rtx insn, rtx *operands)
2412 {
2413   char buf[80];
2414   int ok;
2415 
2416   /* Validate the insn again, and compute first_[dp]reg_to_save. */
2417   ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
2418   gcc_assert (ok);
2419 
2420   if (first_dreg_to_save == 8)
2421     sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
2422   else if (first_preg_to_save == 6)
2423     sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
2424   else
2425     sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
2426 	     first_dreg_to_save, first_preg_to_save);
2427 
2428   output_asm_insn (buf, operands);
2429 }
2430 
2431 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE.  */
2432 
2433 static void
single_move_for_movmem(rtx dst,rtx src,enum machine_mode mode,HOST_WIDE_INT offset)2434 single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
2435 {
2436   rtx scratch = gen_reg_rtx (mode);
2437   rtx srcmem, dstmem;
2438 
2439   srcmem = adjust_address_nv (src, mode, offset);
2440   dstmem = adjust_address_nv (dst, mode, offset);
2441   emit_move_insn (scratch, srcmem);
2442   emit_move_insn (dstmem, scratch);
2443 }
2444 
2445 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2446    alignment ALIGN_EXP.  Return true if successful, false if we should fall
2447    back on a different method.  */
2448 
2449 bool
bfin_expand_movmem(rtx dst,rtx src,rtx count_exp,rtx align_exp)2450 bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
2451 {
2452   rtx srcreg, destreg, countreg;
2453   HOST_WIDE_INT align = 0;
2454   unsigned HOST_WIDE_INT count = 0;
2455 
2456   if (GET_CODE (align_exp) == CONST_INT)
2457     align = INTVAL (align_exp);
2458   if (GET_CODE (count_exp) == CONST_INT)
2459     {
2460       count = INTVAL (count_exp);
2461 #if 0
2462       if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
2463 	return false;
2464 #endif
2465     }
2466 
2467   /* If optimizing for size, only do single copies inline.  */
2468   if (optimize_size)
2469     {
2470       if (count == 2 && align < 2)
2471 	return false;
2472       if (count == 4 && align < 4)
2473 	return false;
2474       if (count != 1 && count != 2 && count != 4)
2475 	return false;
2476     }
2477   if (align < 2 && count != 1)
2478     return false;
2479 
2480   destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
2481   if (destreg != XEXP (dst, 0))
2482     dst = replace_equiv_address_nv (dst, destreg);
2483   srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
2484   if (srcreg != XEXP (src, 0))
2485     src = replace_equiv_address_nv (src, srcreg);
2486 
2487   if (count != 0 && align >= 2)
2488     {
2489       unsigned HOST_WIDE_INT offset = 0;
2490 
2491       if (align >= 4)
2492 	{
2493 	  if ((count & ~3) == 4)
2494 	    {
2495 	      single_move_for_movmem (dst, src, SImode, offset);
2496 	      offset = 4;
2497 	    }
2498 	  else if (count & ~3)
2499 	    {
2500 	      HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
2501 	      countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2502 
2503 	      emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
2504 	    }
2505 	  if (count & 2)
2506 	    {
2507 	      single_move_for_movmem (dst, src, HImode, offset);
2508 	      offset += 2;
2509 	    }
2510 	}
2511       else
2512 	{
2513 	  if ((count & ~1) == 2)
2514 	    {
2515 	      single_move_for_movmem (dst, src, HImode, offset);
2516 	      offset = 2;
2517 	    }
2518 	  else if (count & ~1)
2519 	    {
2520 	      HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
2521 	      countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2522 
2523 	      emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
2524 	    }
2525 	}
2526       if (count & 1)
2527 	{
2528 	  single_move_for_movmem (dst, src, QImode, offset);
2529 	}
2530       return true;
2531     }
2532   return false;
2533 }
2534 
2535 
2536 static int
bfin_adjust_cost(rtx insn,rtx link,rtx dep_insn,int cost)2537 bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
2538 {
2539   enum attr_type insn_type, dep_insn_type;
2540   int dep_insn_code_number;
2541 
2542   /* Anti and output dependencies have zero cost.  */
2543   if (REG_NOTE_KIND (link) != 0)
2544     return 0;
2545 
2546   dep_insn_code_number = recog_memoized (dep_insn);
2547 
2548   /* If we can't recognize the insns, we can't really do anything.  */
2549   if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
2550     return cost;
2551 
2552   insn_type = get_attr_type (insn);
2553   dep_insn_type = get_attr_type (dep_insn);
2554 
2555   if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
2556     {
2557       rtx pat = PATTERN (dep_insn);
2558       rtx dest = SET_DEST (pat);
2559       rtx src = SET_SRC (pat);
2560       if (! ADDRESS_REGNO_P (REGNO (dest)) || ! D_REGNO_P (REGNO (src)))
2561 	return cost;
2562       return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
2563     }
2564 
2565   return cost;
2566 }
2567 
2568 /* We use the machine specific reorg pass for emitting CSYNC instructions
2569    after conditional branches as needed.
2570 
2571    The Blackfin is unusual in that a code sequence like
2572      if cc jump label
2573      r0 = (p0)
2574    may speculatively perform the load even if the condition isn't true.  This
2575    happens for a branch that is predicted not taken, because the pipeline
2576    isn't flushed or stalled, so the early stages of the following instructions,
2577    which perform the memory reference, are allowed to execute before the
2578    jump condition is evaluated.
2579    Therefore, we must insert additional instructions in all places where this
2580    could lead to incorrect behavior.  The manual recommends CSYNC, while
2581    VDSP seems to use NOPs (even though its corresponding compiler option is
2582    named CSYNC).
2583 
2584    When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
2585    When optimizing for size, we turn the branch into a predicted taken one.
2586    This may be slower due to mispredicts, but saves code size.  */
2587 
2588 static void
bfin_reorg(void)2589 bfin_reorg (void)
2590 {
2591   rtx insn, last_condjump = NULL_RTX;
2592   int cycles_since_jump = INT_MAX;
2593 
2594   if (! TARGET_SPECLD_ANOMALY || ! TARGET_CSYNC_ANOMALY)
2595     return;
2596 
2597   /* First pass: find predicted-false branches; if something after them
2598      needs nops, insert them or change the branch to predict true.  */
2599   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2600     {
2601       rtx pat;
2602 
2603       if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
2604 	continue;
2605 
2606       pat = PATTERN (insn);
2607       if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2608 	  || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2609 	  || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2610 	continue;
2611 
2612       if (JUMP_P (insn))
2613 	{
2614 	  if (any_condjump_p (insn)
2615 	      && ! cbranch_predicted_taken_p (insn))
2616 	    {
2617 	      last_condjump = insn;
2618 	      cycles_since_jump = 0;
2619 	    }
2620 	  else
2621 	    cycles_since_jump = INT_MAX;
2622 	}
2623       else if (INSN_P (insn))
2624 	{
2625 	  enum attr_type type = get_attr_type (insn);
2626 	  int delay_needed = 0;
2627 	  if (cycles_since_jump < INT_MAX)
2628 	    cycles_since_jump++;
2629 
2630 	  if (type == TYPE_MCLD && TARGET_SPECLD_ANOMALY)
2631 	    {
2632 	      rtx pat = single_set (insn);
2633 	      if (may_trap_p (SET_SRC (pat)))
2634 		delay_needed = 3;
2635 	    }
2636 	  else if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2637 	    delay_needed = 4;
2638 
2639 	  if (delay_needed > cycles_since_jump)
2640 	    {
2641 	      rtx pat;
2642 	      int num_clobbers;
2643 	      rtx *op = recog_data.operand;
2644 
2645 	      delay_needed -= cycles_since_jump;
2646 
2647 	      extract_insn (last_condjump);
2648 	      if (optimize_size)
2649 		{
2650 		  pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
2651 						     op[3]);
2652 		  cycles_since_jump = INT_MAX;
2653 		}
2654 	      else
2655 		/* Do not adjust cycles_since_jump in this case, so that
2656 		   we'll increase the number of NOPs for a subsequent insn
2657 		   if necessary.  */
2658 		pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
2659 					     GEN_INT (delay_needed));
2660 	      PATTERN (last_condjump) = pat;
2661 	      INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
2662 	    }
2663 	}
2664     }
2665   /* Second pass: for predicted-true branches, see if anything at the
2666      branch destination needs extra nops.  */
2667   if (! TARGET_CSYNC_ANOMALY)
2668     return;
2669 
2670   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2671     {
2672       if (JUMP_P (insn)
2673 	  && any_condjump_p (insn)
2674 	  && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
2675 	      || cbranch_predicted_taken_p (insn)))
2676 	{
2677 	  rtx target = JUMP_LABEL (insn);
2678 	  rtx label = target;
2679 	  cycles_since_jump = 0;
2680 	  for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
2681 	    {
2682 	      rtx pat;
2683 
2684 	      if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
2685 		continue;
2686 
2687 	      pat = PATTERN (target);
2688 	      if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2689 		  || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2690 		  || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2691 		continue;
2692 
2693 	      if (INSN_P (target))
2694 		{
2695 		  enum attr_type type = get_attr_type (target);
2696 		  int delay_needed = 0;
2697 		  if (cycles_since_jump < INT_MAX)
2698 		    cycles_since_jump++;
2699 
2700 		  if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2701 		    delay_needed = 2;
2702 
2703 		  if (delay_needed > cycles_since_jump)
2704 		    {
2705 		      rtx prev = prev_real_insn (label);
2706 		      delay_needed -= cycles_since_jump;
2707 		      if (dump_file)
2708 			fprintf (dump_file, "Adding %d nops after %d\n",
2709 				 delay_needed, INSN_UID (label));
2710 		      if (JUMP_P (prev)
2711 			  && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
2712 			{
2713 			  rtx x;
2714 			  HOST_WIDE_INT v;
2715 
2716 			  if (dump_file)
2717 			    fprintf (dump_file,
2718 				     "Reducing nops on insn %d.\n",
2719 				     INSN_UID (prev));
2720 			  x = PATTERN (prev);
2721 			  x = XVECEXP (x, 0, 1);
2722 			  v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
2723 			  XVECEXP (x, 0, 0) = GEN_INT (v);
2724 			}
2725 		      while (delay_needed-- > 0)
2726 			emit_insn_after (gen_nop (), label);
2727 		      break;
2728 		    }
2729 		}
2730 	    }
2731 	}
2732     }
2733 }
2734 
2735 /* Handle interrupt_handler, exception_handler and nmi_handler function
2736    attributes; arguments as in struct attribute_spec.handler.  */
2737 
2738 static tree
handle_int_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)2739 handle_int_attribute (tree *node, tree name,
2740 		      tree args ATTRIBUTE_UNUSED,
2741 		      int flags ATTRIBUTE_UNUSED,
2742 		      bool *no_add_attrs)
2743 {
2744   tree x = *node;
2745   if (TREE_CODE (x) == FUNCTION_DECL)
2746     x = TREE_TYPE (x);
2747 
2748   if (TREE_CODE (x) != FUNCTION_TYPE)
2749     {
2750       warning (OPT_Wattributes, "%qs attribute only applies to functions",
2751 	       IDENTIFIER_POINTER (name));
2752       *no_add_attrs = true;
2753     }
2754   else if (funkind (x) != SUBROUTINE)
2755     error ("multiple function type attributes specified");
2756 
2757   return NULL_TREE;
2758 }
2759 
2760 /* Return 0 if the attributes for two types are incompatible, 1 if they
2761    are compatible, and 2 if they are nearly compatible (which causes a
2762    warning to be generated).  */
2763 
2764 static int
bfin_comp_type_attributes(tree type1,tree type2)2765 bfin_comp_type_attributes (tree type1, tree type2)
2766 {
2767   e_funkind kind1, kind2;
2768 
2769   if (TREE_CODE (type1) != FUNCTION_TYPE)
2770     return 1;
2771 
2772   kind1 = funkind (type1);
2773   kind2 = funkind (type2);
2774 
2775   if (kind1 != kind2)
2776     return 0;
2777 
2778   /*  Check for mismatched modifiers */
2779   if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
2780       != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
2781     return 0;
2782 
2783   if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
2784       != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
2785     return 0;
2786 
2787   if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
2788       != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
2789     return 0;
2790 
2791   if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
2792       != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
2793     return 0;
2794 
2795   return 1;
2796 }
2797 
2798 /* Handle a "longcall" or "shortcall" attribute; arguments as in
2799    struct attribute_spec.handler.  */
2800 
2801 static tree
bfin_handle_longcall_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)2802 bfin_handle_longcall_attribute (tree *node, tree name,
2803 				tree args ATTRIBUTE_UNUSED,
2804 				int flags ATTRIBUTE_UNUSED,
2805 				bool *no_add_attrs)
2806 {
2807   if (TREE_CODE (*node) != FUNCTION_TYPE
2808       && TREE_CODE (*node) != FIELD_DECL
2809       && TREE_CODE (*node) != TYPE_DECL)
2810     {
2811       warning (OPT_Wattributes, "`%s' attribute only applies to functions",
2812 	       IDENTIFIER_POINTER (name));
2813       *no_add_attrs = true;
2814     }
2815 
2816   if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
2817        && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
2818       || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
2819 	  && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
2820     {
2821       warning (OPT_Wattributes,
2822 	       "can't apply both longcall and shortcall attributes to the same function");
2823       *no_add_attrs = true;
2824     }
2825 
2826   return NULL_TREE;
2827 }
2828 
2829 /* Table of valid machine attributes.  */
2830 const struct attribute_spec bfin_attribute_table[] =
2831 {
2832   /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2833   { "interrupt_handler", 0, 0, false, true,  true, handle_int_attribute },
2834   { "exception_handler", 0, 0, false, true,  true, handle_int_attribute },
2835   { "nmi_handler", 0, 0, false, true,  true, handle_int_attribute },
2836   { "nesting", 0, 0, false, true,  true, NULL },
2837   { "kspisusp", 0, 0, false, true,  true, NULL },
2838   { "saveall", 0, 0, false, true,  true, NULL },
2839   { "longcall",  0, 0, false, true,  true,  bfin_handle_longcall_attribute },
2840   { "shortcall", 0, 0, false, true,  true,  bfin_handle_longcall_attribute },
2841   { NULL, 0, 0, false, false, false, NULL }
2842 };
2843 
2844 /* Output the assembler code for a thunk function.  THUNK_DECL is the
2845    declaration for the thunk function itself, FUNCTION is the decl for
2846    the target function.  DELTA is an immediate constant offset to be
2847    added to THIS.  If VCALL_OFFSET is nonzero, the word at
2848    *(*this + vcall_offset) should be added to THIS.  */
2849 
2850 static void
bfin_output_mi_thunk(FILE * file ATTRIBUTE_UNUSED,tree thunk ATTRIBUTE_UNUSED,HOST_WIDE_INT delta,HOST_WIDE_INT vcall_offset,tree function)2851 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
2852 		      tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
2853 		      HOST_WIDE_INT vcall_offset, tree function)
2854 {
2855   rtx xops[3];
2856   /* The this parameter is passed as the first argument.  */
2857   rtx this = gen_rtx_REG (Pmode, REG_R0);
2858 
2859   /* Adjust the this parameter by a fixed constant.  */
2860   if (delta)
2861     {
2862       xops[1] = this;
2863       if (delta >= -64 && delta <= 63)
2864 	{
2865 	  xops[0] = GEN_INT (delta);
2866 	  output_asm_insn ("%1 += %0;", xops);
2867 	}
2868       else if (delta >= -128 && delta < -64)
2869 	{
2870 	  xops[0] = GEN_INT (delta + 64);
2871 	  output_asm_insn ("%1 += -64; %1 += %0;", xops);
2872 	}
2873       else if (delta > 63 && delta <= 126)
2874 	{
2875 	  xops[0] = GEN_INT (delta - 63);
2876 	  output_asm_insn ("%1 += 63; %1 += %0;", xops);
2877 	}
2878       else
2879 	{
2880 	  xops[0] = GEN_INT (delta);
2881 	  output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
2882 	}
2883     }
2884 
2885   /* Adjust the this parameter by a value stored in the vtable.  */
2886   if (vcall_offset)
2887     {
2888       rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
2889       rtx tmp = gen_rtx_REG (Pmode, REG_R2);
2890 
2891       xops[1] = tmp;
2892       xops[2] = p2tmp;
2893       output_asm_insn ("%2 = r0; %2 = [%2];", xops);
2894 
2895       /* Adjust the this parameter.  */
2896       xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
2897       if (!memory_operand (xops[0], Pmode))
2898 	{
2899 	  rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
2900 	  xops[0] = GEN_INT (vcall_offset);
2901 	  xops[1] = tmp2;
2902 	  output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
2903 	  xops[0] = gen_rtx_MEM (Pmode, p2tmp);
2904 	}
2905       xops[2] = this;
2906       output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
2907     }
2908 
2909   xops[0] = XEXP (DECL_RTL (function), 0);
2910   if (1 || !flag_pic || (*targetm.binds_local_p) (function))
2911     output_asm_insn ("jump.l\t%P0", xops);
2912 }
2913 
2914 /* Codes for all the Blackfin builtins.  */
2915 enum bfin_builtins
2916 {
2917   BFIN_BUILTIN_CSYNC,
2918   BFIN_BUILTIN_SSYNC,
2919   BFIN_BUILTIN_MAX
2920 };
2921 
2922 #define def_builtin(NAME, TYPE, CODE)					\
2923 do {									\
2924   lang_hooks.builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD,	\
2925 			       NULL, NULL_TREE);			\
2926 } while (0)
2927 
2928 /* Set up all builtin functions for this target.  */
2929 static void
bfin_init_builtins(void)2930 bfin_init_builtins (void)
2931 {
2932   tree void_ftype_void
2933     = build_function_type (void_type_node, void_list_node);
2934 
2935   /* Add the remaining MMX insns with somewhat more complicated types.  */
2936   def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
2937   def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
2938 }
2939 
2940 /* Expand an expression EXP that calls a built-in function,
2941    with result going to TARGET if that's convenient
2942    (and in mode MODE if that's convenient).
2943    SUBTARGET may be used as the target for computing one of EXP's operands.
2944    IGNORE is nonzero if the value is to be ignored.  */
2945 
2946 static rtx
bfin_expand_builtin(tree exp,rtx target ATTRIBUTE_UNUSED,rtx subtarget ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)2947 bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
2948 		     rtx subtarget ATTRIBUTE_UNUSED,
2949 		     enum machine_mode mode ATTRIBUTE_UNUSED,
2950 		     int ignore ATTRIBUTE_UNUSED)
2951 {
2952   tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2953   unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2954 
2955   switch (fcode)
2956     {
2957     case BFIN_BUILTIN_CSYNC:
2958       emit_insn (gen_csync ());
2959       return 0;
2960     case BFIN_BUILTIN_SSYNC:
2961       emit_insn (gen_ssync ());
2962       return 0;
2963 
2964     default:
2965       gcc_unreachable ();
2966     }
2967 }
2968 
2969 #undef TARGET_INIT_BUILTINS
2970 #define TARGET_INIT_BUILTINS bfin_init_builtins
2971 
2972 #undef TARGET_EXPAND_BUILTIN
2973 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
2974 
2975 #undef TARGET_ASM_GLOBALIZE_LABEL
2976 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
2977 
2978 #undef TARGET_ASM_FILE_START
2979 #define TARGET_ASM_FILE_START output_file_start
2980 
2981 #undef TARGET_ATTRIBUTE_TABLE
2982 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
2983 
2984 #undef TARGET_COMP_TYPE_ATTRIBUTES
2985 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
2986 
2987 #undef TARGET_RTX_COSTS
2988 #define TARGET_RTX_COSTS bfin_rtx_costs
2989 
2990 #undef  TARGET_ADDRESS_COST
2991 #define TARGET_ADDRESS_COST bfin_address_cost
2992 
2993 #undef TARGET_ASM_INTERNAL_LABEL
2994 #define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
2995 
2996 #undef TARGET_MACHINE_DEPENDENT_REORG
2997 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
2998 
2999 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
3000 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
3001 
3002 #undef TARGET_ASM_OUTPUT_MI_THUNK
3003 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
3004 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3005 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
3006 
3007 #undef TARGET_SCHED_ADJUST_COST
3008 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
3009 
3010 #undef TARGET_PROMOTE_PROTOTYPES
3011 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
3012 #undef TARGET_PROMOTE_FUNCTION_ARGS
3013 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
3014 #undef TARGET_PROMOTE_FUNCTION_RETURN
3015 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
3016 
3017 #undef TARGET_ARG_PARTIAL_BYTES
3018 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
3019 
3020 #undef TARGET_PASS_BY_REFERENCE
3021 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
3022 
3023 #undef TARGET_SETUP_INCOMING_VARARGS
3024 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
3025 
3026 #undef TARGET_STRUCT_VALUE_RTX
3027 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
3028 
3029 #undef TARGET_VECTOR_MODE_SUPPORTED_P
3030 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
3031 
3032 #undef TARGET_HANDLE_OPTION
3033 #define TARGET_HANDLE_OPTION bfin_handle_option
3034 
3035 #undef TARGET_DEFAULT_TARGET_FLAGS
3036 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
3037 
3038 #undef TARGET_DELEGITIMIZE_ADDRESS
3039 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
3040 
3041 struct gcc_target targetm = TARGET_INITIALIZER;
3042