1 /* Subroutines used for code generation on the Tilera TILEPro.
2    Copyright (C) 2011-2016 Free Software Foundation, Inc.
3    Contributed by Walter Lee (walt@tilera.com)
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify it
8    under the terms of the GNU General Public License as published
9    by the Free Software Foundation; either version 3, or (at your
10    option) any later version.
11 
12    GCC is distributed in the hope that it will be useful, but WITHOUT
13    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
15    License for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "df.h"
30 #include "tm_p.h"
31 #include "stringpool.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "regs.h"
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "diagnostic.h"
38 #include "output.h"
39 #include "insn-attr.h"
40 #include "alias.h"
41 #include "explow.h"
42 #include "calls.h"
43 #include "varasm.h"
44 #include "expr.h"
45 #include "langhooks.h"
46 #include "cfgrtl.h"
47 #include "tm-constrs.h"
48 #include "dwarf2.h"
49 #include "fold-const.h"
50 #include "stor-layout.h"
51 #include "gimplify.h"
52 #include "tilepro-builtins.h"
53 #include "tilepro-multiply.h"
54 #include "builtins.h"
55 
56 /* This file should be included last.  */
57 #include "target-def.h"
58 
59 /* SYMBOL_REF for GOT */
60 static GTY(()) rtx g_got_symbol = NULL;
61 
62 /* Report whether we're printing out the first address fragment of a
63    POST_INC or POST_DEC memory reference, from TARGET_PRINT_OPERAND to
64    TARGET_PRINT_OPERAND_ADDRESS.  */
65 static bool output_memory_autoinc_first;
66 
67 
68 
69 /* Option handling  */
70 
71 /* Implement TARGET_OPTION_OVERRIDE.  */
72 static void
tilepro_option_override(void)73 tilepro_option_override (void)
74 {
75   /* When modulo scheduling is enabled, we still rely on regular
76      scheduler for bundling.  */
77   if (flag_modulo_sched)
78     flag_resched_modulo_sched = 1;
79 }
80 
81 
82 
83 /* Implement TARGET_SCALAR_MODE_SUPPORTED_P.  */
84 static bool
tilepro_scalar_mode_supported_p(machine_mode mode)85 tilepro_scalar_mode_supported_p (machine_mode mode)
86 {
87   switch (mode)
88     {
89     case QImode:
90     case HImode:
91     case SImode:
92     case DImode:
93       return true;
94 
95     case SFmode:
96     case DFmode:
97       return true;
98 
99     default:
100       return false;
101     }
102 }
103 
104 
105 /* Implement TARGET_VECTOR_MODE_SUPPORTED_P.  */
106 static bool
tile_vector_mode_supported_p(machine_mode mode)107 tile_vector_mode_supported_p (machine_mode mode)
108 {
109   return mode == V4QImode || mode == V2HImode;
110 }
111 
112 
113 /* Implement TARGET_CANNOT_FORCE_CONST_MEM.  */
114 static bool
tilepro_cannot_force_const_mem(machine_mode mode ATTRIBUTE_UNUSED,rtx x ATTRIBUTE_UNUSED)115 tilepro_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED,
116 				rtx x ATTRIBUTE_UNUSED)
117 {
118   return true;
119 }
120 
121 
122 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL.  */
123 static bool
tilepro_function_ok_for_sibcall(tree decl,tree exp ATTRIBUTE_UNUSED)124 tilepro_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
125 {
126   return decl != NULL;
127 }
128 
129 
130 /* Implement TARGET_PASS_BY_REFERENCE.  Variable sized types are
131    passed by reference.  */
132 static bool
tilepro_pass_by_reference(cumulative_args_t cum ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,const_tree type,bool named ATTRIBUTE_UNUSED)133 tilepro_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
134 			   machine_mode mode ATTRIBUTE_UNUSED,
135 			   const_tree type, bool named ATTRIBUTE_UNUSED)
136 {
137   return (type && TYPE_SIZE (type)
138 	  && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST);
139 }
140 
141 
142 /* Implement TARGET_RETURN_IN_MEMORY.  */
143 static bool
tilepro_return_in_memory(const_tree type,const_tree fndecl ATTRIBUTE_UNUSED)144 tilepro_return_in_memory (const_tree type, const_tree fndecl ATTRIBUTE_UNUSED)
145 {
146   return !IN_RANGE (int_size_in_bytes (type),
147 		    0, TILEPRO_NUM_RETURN_REGS * UNITS_PER_WORD);
148 }
149 
150 
151 /* Implement TARGET_FUNCTION_ARG_BOUNDARY.  */
152 static unsigned int
tilepro_function_arg_boundary(machine_mode mode,const_tree type)153 tilepro_function_arg_boundary (machine_mode mode, const_tree type)
154 {
155   unsigned int alignment;
156 
157   alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
158   if (alignment < PARM_BOUNDARY)
159     alignment = PARM_BOUNDARY;
160   if (alignment > STACK_BOUNDARY)
161     alignment = STACK_BOUNDARY;
162   return alignment;
163 }
164 
165 
166 /* Implement TARGET_FUNCTION_ARG.  */
167 static rtx
tilepro_function_arg(cumulative_args_t cum_v,machine_mode mode,const_tree type,bool named ATTRIBUTE_UNUSED)168 tilepro_function_arg (cumulative_args_t cum_v,
169 		      machine_mode mode,
170 		      const_tree type, bool named ATTRIBUTE_UNUSED)
171 {
172   CUMULATIVE_ARGS cum = *get_cumulative_args (cum_v);
173   int byte_size = ((mode == BLKmode)
174 		   ? int_size_in_bytes (type) : GET_MODE_SIZE (mode));
175   bool doubleword_aligned_p;
176 
177   if (cum >= TILEPRO_NUM_ARG_REGS)
178     return NULL_RTX;
179 
180   /* See whether the argument has doubleword alignment.  */
181   doubleword_aligned_p =
182     tilepro_function_arg_boundary (mode, type) > BITS_PER_WORD;
183 
184   if (doubleword_aligned_p)
185     cum += cum & 1;
186 
187   /* The ABI does not allow parameters to be passed partially in reg
188      and partially in stack.  */
189   if ((cum + (byte_size + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
190       > TILEPRO_NUM_ARG_REGS)
191     return NULL_RTX;
192 
193   return gen_rtx_REG (mode, cum);
194 }
195 
196 
197 /* Implement TARGET_FUNCTION_ARG_ADVANCE.  */
198 static void
tilepro_function_arg_advance(cumulative_args_t cum_v,machine_mode mode,const_tree type,bool named ATTRIBUTE_UNUSED)199 tilepro_function_arg_advance (cumulative_args_t cum_v,
200 			      machine_mode mode,
201 			      const_tree type, bool named ATTRIBUTE_UNUSED)
202 {
203   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
204 
205   int byte_size = ((mode == BLKmode)
206 		   ? int_size_in_bytes (type) : GET_MODE_SIZE (mode));
207   int word_size = (byte_size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
208   bool doubleword_aligned_p;
209 
210   /* See whether the argument has doubleword alignment.  */
211   doubleword_aligned_p =
212     tilepro_function_arg_boundary (mode, type) > BITS_PER_WORD;
213 
214   if (doubleword_aligned_p)
215     *cum += *cum & 1;
216 
217   /* If the current argument does not fit in the pretend_args space,
218      skip over it.  */
219   if (*cum < TILEPRO_NUM_ARG_REGS
220       && *cum + word_size > TILEPRO_NUM_ARG_REGS)
221     *cum = TILEPRO_NUM_ARG_REGS;
222 
223   *cum += word_size;
224 }
225 
226 
227 /* Implement TARGET_FUNCTION_VALUE.  */
228 static rtx
tilepro_function_value(const_tree valtype,const_tree fn_decl_or_type,bool outgoing ATTRIBUTE_UNUSED)229 tilepro_function_value (const_tree valtype, const_tree fn_decl_or_type,
230 			bool outgoing ATTRIBUTE_UNUSED)
231 {
232   machine_mode mode;
233   int unsigned_p;
234 
235   mode = TYPE_MODE (valtype);
236   unsigned_p = TYPE_UNSIGNED (valtype);
237 
238   mode = promote_function_mode (valtype, mode, &unsigned_p,
239 				fn_decl_or_type, 1);
240 
241   return gen_rtx_REG (mode, 0);
242 }
243 
244 
245 /* Implement TARGET_LIBCALL_VALUE.  */
246 static rtx
tilepro_libcall_value(machine_mode mode,const_rtx fun ATTRIBUTE_UNUSED)247 tilepro_libcall_value (machine_mode mode,
248 		       const_rtx fun ATTRIBUTE_UNUSED)
249 {
250   return gen_rtx_REG (mode, 0);
251 }
252 
253 
254 /* Implement FUNCTION_VALUE_REGNO_P.  */
255 static bool
tilepro_function_value_regno_p(const unsigned int regno)256 tilepro_function_value_regno_p (const unsigned int regno)
257 {
258   return regno < TILEPRO_NUM_RETURN_REGS;
259 }
260 
261 
262 /* Implement TARGET_BUILD_BUILTIN_VA_LIST.  */
263 static tree
tilepro_build_builtin_va_list(void)264 tilepro_build_builtin_va_list (void)
265 {
266   tree f_args, f_skip, record, type_decl;
267   bool owp;
268 
269   record = lang_hooks.types.make_type (RECORD_TYPE);
270 
271   type_decl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
272 			  get_identifier ("__va_list_tag"), record);
273 
274   f_args = build_decl (BUILTINS_LOCATION, FIELD_DECL,
275 		       get_identifier ("__args"), ptr_type_node);
276   f_skip = build_decl (BUILTINS_LOCATION, FIELD_DECL,
277 		       get_identifier ("__skip"), ptr_type_node);
278 
279   DECL_FIELD_CONTEXT (f_args) = record;
280 
281   DECL_FIELD_CONTEXT (f_skip) = record;
282 
283   TREE_CHAIN (record) = type_decl;
284   TYPE_NAME (record) = type_decl;
285   TYPE_FIELDS (record) = f_args;
286   TREE_CHAIN (f_args) = f_skip;
287 
288   /* We know this is being padded and we want it too.  It is an
289      internal type so hide the warnings from the user.  */
290   owp = warn_padded;
291   warn_padded = false;
292 
293   layout_type (record);
294 
295   warn_padded = owp;
296 
297   /* The correct type is an array type of one element.  */
298   return record;
299 }
300 
301 
302 /* Implement TARGET_EXPAND_BUILTIN_VA_START.  */
303 static void
tilepro_va_start(tree valist,rtx nextarg ATTRIBUTE_UNUSED)304 tilepro_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
305 {
306   tree f_args, f_skip;
307   tree args, skip, t;
308 
309   f_args = TYPE_FIELDS (TREE_TYPE (valist));
310   f_skip = TREE_CHAIN (f_args);
311 
312   args =
313     build3 (COMPONENT_REF, TREE_TYPE (f_args), valist, f_args, NULL_TREE);
314   skip =
315     build3 (COMPONENT_REF, TREE_TYPE (f_skip), valist, f_skip, NULL_TREE);
316 
317   /* Find the __args area.  */
318   t = make_tree (TREE_TYPE (args), virtual_incoming_args_rtx);
319   t = fold_build_pointer_plus_hwi (t,
320 				   UNITS_PER_WORD *
321 				   (crtl->args.info - TILEPRO_NUM_ARG_REGS));
322 
323   if (crtl->args.pretend_args_size > 0)
324     t = fold_build_pointer_plus_hwi (t, -STACK_POINTER_OFFSET);
325 
326   t = build2 (MODIFY_EXPR, TREE_TYPE (args), args, t);
327   TREE_SIDE_EFFECTS (t) = 1;
328   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
329 
330   /* Find the __skip area.  */
331   t = make_tree (TREE_TYPE (skip), virtual_incoming_args_rtx);
332   t = fold_build_pointer_plus_hwi (t, -STACK_POINTER_OFFSET);
333   t = build2 (MODIFY_EXPR, TREE_TYPE (skip), skip, t);
334   TREE_SIDE_EFFECTS (t) = 1;
335   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
336 }
337 
338 
339 /* Implement TARGET_SETUP_INCOMING_VARARGS.  */
340 static void
tilepro_setup_incoming_varargs(cumulative_args_t cum,machine_mode mode,tree type,int * pretend_args,int no_rtl)341 tilepro_setup_incoming_varargs (cumulative_args_t cum,
342 				machine_mode mode,
343 				tree type, int *pretend_args, int no_rtl)
344 {
345   CUMULATIVE_ARGS local_cum = *get_cumulative_args (cum);
346   int first_reg;
347 
348   /* The caller has advanced CUM up to, but not beyond, the last named
349      argument.  Advance a local copy of CUM past the last "real" named
350      argument, to find out how many registers are left over.  */
351   targetm.calls.function_arg_advance (pack_cumulative_args (&local_cum),
352 				      mode, type, true);
353   first_reg = local_cum;
354 
355   if (local_cum < TILEPRO_NUM_ARG_REGS)
356     {
357       *pretend_args = UNITS_PER_WORD * (TILEPRO_NUM_ARG_REGS - first_reg);
358 
359       if (!no_rtl)
360 	{
361 	  alias_set_type set = get_varargs_alias_set ();
362 	  rtx tmp =
363 	    gen_rtx_MEM (BLKmode, plus_constant (Pmode, \
364 						 virtual_incoming_args_rtx,
365 						 -STACK_POINTER_OFFSET -
366 						 UNITS_PER_WORD *
367 						 (TILEPRO_NUM_ARG_REGS -
368 						  first_reg)));
369 	  MEM_NOTRAP_P (tmp) = 1;
370 	  set_mem_alias_set (tmp, set);
371 	  move_block_from_reg (first_reg, tmp,
372 			       TILEPRO_NUM_ARG_REGS - first_reg);
373 	}
374     }
375   else
376     *pretend_args = 0;
377 }
378 
379 
380 /* Implement TARGET_GIMPLIFY_VA_ARG_EXPR.  Gimplify va_arg by updating
381    the va_list structure VALIST as required to retrieve an argument of
382    type TYPE, and returning that argument.
383 
384    ret = va_arg(VALIST, TYPE);
385 
386    generates code equivalent to:
387 
388     paddedsize = (sizeof(TYPE) + 3) & -4;
389     if ((VALIST.__args + paddedsize > VALIST.__skip)
390 	& (VALIST.__args <= VALIST.__skip))
391       addr = VALIST.__skip + STACK_POINTER_OFFSET;
392     else
393       addr = VALIST.__args;
394     VALIST.__args = addr + paddedsize;
395     ret = *(TYPE *)addr;                                          */
396 static tree
tilepro_gimplify_va_arg_expr(tree valist,tree type,gimple_seq * pre_p,gimple_seq * post_p ATTRIBUTE_UNUSED)397 tilepro_gimplify_va_arg_expr (tree valist, tree type, gimple_seq * pre_p,
398 			      gimple_seq * post_p ATTRIBUTE_UNUSED)
399 {
400   tree f_args, f_skip;
401   tree args, skip;
402   HOST_WIDE_INT size, rsize;
403   tree addr, tmp;
404   bool pass_by_reference_p;
405 
406   f_args = TYPE_FIELDS (va_list_type_node);
407   f_skip = TREE_CHAIN (f_args);
408 
409   args =
410     build3 (COMPONENT_REF, TREE_TYPE (f_args), valist, f_args, NULL_TREE);
411   skip =
412     build3 (COMPONENT_REF, TREE_TYPE (f_skip), valist, f_skip, NULL_TREE);
413 
414   addr = create_tmp_var (ptr_type_node, "va_arg");
415 
416   /* if an object is dynamically sized, a pointer to it is passed
417      instead of the object itself.  */
418   pass_by_reference_p = pass_by_reference (NULL, TYPE_MODE (type), type,
419 					   false);
420 
421   if (pass_by_reference_p)
422     type = build_pointer_type (type);
423 
424   size = int_size_in_bytes (type);
425   rsize = ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD) * UNITS_PER_WORD;
426 
427   /* If the alignment of the type is greater than the default for a
428      parameter, align to STACK_BOUNDARY.  */
429   if (TYPE_ALIGN (type) > PARM_BOUNDARY)
430     {
431       /* Assert the only case we generate code for: when
432          stack boundary = 2 * parm boundary.  */
433       gcc_assert (STACK_BOUNDARY == PARM_BOUNDARY * 2);
434 
435       tmp = build2 (BIT_AND_EXPR, sizetype,
436 		    fold_convert (sizetype, unshare_expr (args)),
437 		    size_int (PARM_BOUNDARY / 8));
438       tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
439 		    unshare_expr (args), tmp);
440 
441       gimplify_assign (unshare_expr (args), tmp, pre_p);
442     }
443 
444   /* Build conditional expression to calculate addr. The expression
445      will be gimplified later.  */
446   tmp = fold_build_pointer_plus_hwi (unshare_expr (args), rsize);
447   tmp = build2 (TRUTH_AND_EXPR, boolean_type_node,
448 		build2 (GT_EXPR, boolean_type_node, tmp, unshare_expr (skip)),
449 		build2 (LE_EXPR, boolean_type_node, unshare_expr (args),
450 			unshare_expr (skip)));
451 
452   tmp = build3 (COND_EXPR, ptr_type_node, tmp,
453 		build2 (POINTER_PLUS_EXPR, ptr_type_node, unshare_expr (skip),
454 			size_int (STACK_POINTER_OFFSET)),
455 		unshare_expr (args));
456 
457   gimplify_assign (addr, tmp, pre_p);
458 
459   /* Update VALIST.__args.  */
460   tmp = fold_build_pointer_plus_hwi (addr, rsize);
461   gimplify_assign (unshare_expr (args), tmp, pre_p);
462 
463   addr = fold_convert (build_pointer_type (type), addr);
464 
465   if (pass_by_reference_p)
466     addr = build_va_arg_indirect_ref (addr);
467 
468   return build_va_arg_indirect_ref (addr);
469 }
470 
471 
472 
473 /* Implement TARGET_RTX_COSTS.  */
474 static bool
tilepro_rtx_costs(rtx x,machine_mode mode,int outer_code,int opno,int * total,bool speed)475 tilepro_rtx_costs (rtx x, machine_mode mode, int outer_code, int opno,
476 		   int *total, bool speed)
477 {
478   int code = GET_CODE (x);
479 
480   switch (code)
481     {
482     case CONST_INT:
483       /* If this is an 8-bit constant, return zero since it can be
484          used nearly anywhere with no cost.  If it is a valid operand
485          for an ADD or AND, likewise return 0 if we know it will be
486          used in that context.  Otherwise, return 2 since it might be
487          used there later.  All other constants take at least two
488          insns.  */
489       if (satisfies_constraint_I (x))
490 	{
491 	  *total = 0;
492 	  return true;
493 	}
494       else if (outer_code == PLUS && add_operand (x, VOIDmode))
495 	{
496 	  /* Slightly penalize large constants even though we can add
497 	     them in one instruction, because it forces the use of
498 	     2-wide bundling mode.  */
499 	  *total = 1;
500 	  return true;
501 	}
502       else if (move_operand (x, SImode))
503 	{
504 	  /* We can materialize in one move.  */
505 	  *total = COSTS_N_INSNS (1);
506 	  return true;
507 	}
508       else
509 	{
510 	  /* We can materialize in two moves.  */
511 	  *total = COSTS_N_INSNS (2);
512 	  return true;
513 	}
514 
515       return false;
516 
517     case CONST:
518     case LABEL_REF:
519     case SYMBOL_REF:
520       *total = COSTS_N_INSNS (2);
521       return true;
522 
523     case CONST_DOUBLE:
524       *total = COSTS_N_INSNS (4);
525       return true;
526 
527     case HIGH:
528       *total = 0;
529       return true;
530 
531     case MEM:
532       /* If outer-code was a sign or zero extension, a cost of
533          COSTS_N_INSNS (1) was already added in, so account for
534          that.  */
535       if (outer_code == ZERO_EXTEND || outer_code == SIGN_EXTEND)
536 	*total = COSTS_N_INSNS (1);
537       else
538 	*total = COSTS_N_INSNS (2);
539       return true;
540 
541     case PLUS:
542       /* Convey that s[123]a are efficient.  */
543       if (GET_CODE (XEXP (x, 0)) == MULT
544 	  && cint_248_operand (XEXP (XEXP (x, 0), 1), VOIDmode))
545 	{
546 	  *total = (rtx_cost (XEXP (XEXP (x, 0), 0), mode,
547 			      (enum rtx_code) outer_code, opno, speed)
548 		    + rtx_cost (XEXP (x, 1), mode,
549 				(enum rtx_code) outer_code, opno, speed)
550 		    + COSTS_N_INSNS (1));
551 	  return true;
552 	}
553       return false;
554 
555     case MULT:
556       *total = COSTS_N_INSNS (2);
557       return false;
558 
559     case SIGN_EXTEND:
560     case ZERO_EXTEND:
561       if (outer_code == MULT)
562 	*total = 0;
563       else
564 	*total = COSTS_N_INSNS (1);
565       return false;
566 
567     case DIV:
568     case UDIV:
569     case MOD:
570     case UMOD:
571       /* These are handled by software and are very expensive.  */
572       *total = COSTS_N_INSNS (100);
573       return false;
574 
575     case UNSPEC:
576     case UNSPEC_VOLATILE:
577       {
578 	int num = XINT (x, 1);
579 
580 	if (num <= TILEPRO_LAST_LATENCY_1_INSN)
581 	  *total = COSTS_N_INSNS (1);
582 	else if (num <= TILEPRO_LAST_LATENCY_2_INSN)
583 	  *total = COSTS_N_INSNS (2);
584 	else if (num > TILEPRO_LAST_LATENCY_INSN)
585 	  {
586 	    if (outer_code == PLUS)
587 	      *total = 0;
588 	    else
589 	      *total = COSTS_N_INSNS (1);
590 	  }
591 	else
592 	  {
593 	    switch (num)
594 	      {
595 	      case UNSPEC_BLOCKAGE:
596 	      case UNSPEC_NETWORK_BARRIER:
597 		*total = 0;
598 		break;
599 
600 	      case UNSPEC_LNK_AND_LABEL:
601 	      case UNSPEC_MF:
602 	      case UNSPEC_NETWORK_RECEIVE:
603 	      case UNSPEC_NETWORK_SEND:
604 	      case UNSPEC_TLS_GD_ADD:
605 		*total = COSTS_N_INSNS (1);
606 		break;
607 
608 	      case UNSPEC_TLS_IE_LOAD:
609 		*total = COSTS_N_INSNS (2);
610 		break;
611 
612 	      case UNSPEC_SP_SET:
613 		*total = COSTS_N_INSNS (3);
614 		break;
615 
616 	      case UNSPEC_SP_TEST:
617 		*total = COSTS_N_INSNS (4);
618 		break;
619 
620 	      case UNSPEC_LATENCY_L2:
621 		*total = COSTS_N_INSNS (8);
622 		break;
623 
624 	      case UNSPEC_TLS_GD_CALL:
625 		*total = COSTS_N_INSNS (30);
626 		break;
627 
628 	      case UNSPEC_LATENCY_MISS:
629 		*total = COSTS_N_INSNS (80);
630 		break;
631 
632 	      default:
633 		*total = COSTS_N_INSNS (1);
634 	      }
635 	  }
636 	return true;
637       }
638 
639     default:
640       return false;
641     }
642 }
643 
644 
645 
646 /* Returns an SImode integer rtx with value VAL.  */
647 static rtx
gen_int_si(HOST_WIDE_INT val)648 gen_int_si (HOST_WIDE_INT val)
649 {
650   return gen_int_mode (val, SImode);
651 }
652 
653 
654 /* Create a temporary variable to hold a partial result, to enable
655    CSE.  */
656 static rtx
create_temp_reg_if_possible(machine_mode mode,rtx default_reg)657 create_temp_reg_if_possible (machine_mode mode, rtx default_reg)
658 {
659   return can_create_pseudo_p ()? gen_reg_rtx (mode) : default_reg;
660 }
661 
662 
663 /* Functions to save and restore machine-specific function data.  */
664 static struct machine_function *
tilepro_init_machine_status(void)665 tilepro_init_machine_status (void)
666 {
667   return ggc_cleared_alloc<machine_function> ();
668 }
669 
670 
671 /* Do anything needed before RTL is emitted for each function.  */
672 void
tilepro_init_expanders(void)673 tilepro_init_expanders (void)
674 {
675   /* Arrange to initialize and mark the machine per-function
676      status.  */
677   init_machine_status = tilepro_init_machine_status;
678 
679   if (cfun && cfun->machine && flag_pic)
680     {
681       static int label_num = 0;
682 
683       char text_label_name[32];
684 
685       struct machine_function *machine = cfun->machine;
686 
687       ASM_GENERATE_INTERNAL_LABEL (text_label_name, "L_PICLNK", label_num++);
688 
689       machine->text_label_symbol =
690 	gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (text_label_name));
691 
692       machine->text_label_rtx =
693 	gen_rtx_REG (Pmode, TILEPRO_PIC_TEXT_LABEL_REGNUM);
694 
695       machine->got_rtx = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
696 
697       machine->calls_tls_get_addr = false;
698     }
699 }
700 
701 
702 /* Return true if X contains a thread-local symbol.  */
703 static bool
tilepro_tls_referenced_p(rtx x)704 tilepro_tls_referenced_p (rtx x)
705 {
706   if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS)
707     x = XEXP (XEXP (x, 0), 0);
708 
709   if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x))
710     return true;
711 
712   /* That's all we handle in tilepro_legitimize_tls_address for
713      now.  */
714   return false;
715 }
716 
717 
718 /* Return true if X requires a scratch register.  It is given that
719    flag_pic is on and that X satisfies CONSTANT_P.  */
720 static int
tilepro_pic_address_needs_scratch(rtx x)721 tilepro_pic_address_needs_scratch (rtx x)
722 {
723   if (GET_CODE (x) == CONST
724       && GET_CODE (XEXP (x, 0)) == PLUS
725       && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
726 	  || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
727       && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
728     return true;
729 
730   return false;
731 }
732 
733 
734 /* Implement TARGET_LEGITIMATE_CONSTANT_P.  This is all constants for
735    which we are willing to load the value into a register via a move
736    pattern.  TLS cannot be treated as a constant because it can
737    include a function call.  */
738 static bool
tilepro_legitimate_constant_p(machine_mode mode ATTRIBUTE_UNUSED,rtx x)739 tilepro_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
740 {
741   switch (GET_CODE (x))
742     {
743     case CONST:
744     case SYMBOL_REF:
745       return !tilepro_tls_referenced_p (x);
746 
747     default:
748       return true;
749     }
750 }
751 
752 
753 /* Return true if the constant value X is a legitimate general operand
754    when generating PIC code.  It is given that flag_pic is on and that
755    X satisfies CONSTANT_P.  */
756 bool
tilepro_legitimate_pic_operand_p(rtx x)757 tilepro_legitimate_pic_operand_p (rtx x)
758 {
759   if (tilepro_pic_address_needs_scratch (x))
760     return false;
761 
762   if (tilepro_tls_referenced_p (x))
763     return false;
764 
765   return true;
766 }
767 
768 
769 /* Return true if the rtx X can be used as an address operand.  */
770 static bool
tilepro_legitimate_address_p(machine_mode ARG_UNUSED (mode),rtx x,bool strict)771 tilepro_legitimate_address_p (machine_mode ARG_UNUSED (mode), rtx x,
772 			      bool strict)
773 {
774   if (GET_CODE (x) == SUBREG)
775     x = SUBREG_REG (x);
776 
777   switch (GET_CODE (x))
778     {
779     case POST_INC:
780     case POST_DEC:
781       if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
782 	return false;
783 
784       x = XEXP (x, 0);
785       break;
786 
787     case POST_MODIFY:
788       if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
789 	return false;
790 
791       if (GET_CODE (XEXP (x, 1)) != PLUS)
792 	return false;
793 
794       if (!rtx_equal_p (XEXP (x, 0), XEXP (XEXP (x, 1), 0)))
795 	return false;
796 
797       if (!satisfies_constraint_I (XEXP (XEXP (x, 1), 1)))
798 	return false;
799 
800       x = XEXP (x, 0);
801       break;
802 
803     case REG:
804       break;
805 
806     default:
807       return false;
808     }
809 
810   /* Check if x is a valid reg.  */
811   if (!REG_P (x))
812     return false;
813 
814   if (strict)
815     return REGNO_OK_FOR_BASE_P (REGNO (x));
816   else
817     return true;
818 }
819 
820 
821 /* Return the rtx containing SYMBOL_REF to the text label.  */
822 static rtx
tilepro_text_label_symbol(void)823 tilepro_text_label_symbol (void)
824 {
825   return cfun->machine->text_label_symbol;
826 }
827 
828 
829 /* Return the register storing the value of the text label.  */
830 static rtx
tilepro_text_label_rtx(void)831 tilepro_text_label_rtx (void)
832 {
833   return cfun->machine->text_label_rtx;
834 }
835 
836 
837 /* Return the register storing the value of the global offset
838    table.  */
839 static rtx
tilepro_got_rtx(void)840 tilepro_got_rtx (void)
841 {
842   return cfun->machine->got_rtx;
843 }
844 
845 
846 /* Return the SYMBOL_REF for _GLOBAL_OFFSET_TABLE_.  */
847 static rtx
tilepro_got_symbol(void)848 tilepro_got_symbol (void)
849 {
850   if (g_got_symbol == NULL)
851     g_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
852 
853   return g_got_symbol;
854 }
855 
856 
857 /* Return a reference to the got to be used by tls references.  */
858 static rtx
tilepro_tls_got(void)859 tilepro_tls_got (void)
860 {
861   rtx temp;
862   if (flag_pic)
863     {
864       crtl->uses_pic_offset_table = 1;
865       return tilepro_got_rtx ();
866     }
867 
868   temp = gen_reg_rtx (Pmode);
869   emit_move_insn (temp, tilepro_got_symbol ());
870 
871   return temp;
872 }
873 
874 
875 /* ADDR contains a thread-local SYMBOL_REF.  Generate code to compute
876    this (thread-local) address.  */
877 static rtx
tilepro_legitimize_tls_address(rtx addr)878 tilepro_legitimize_tls_address (rtx addr)
879 {
880   rtx ret;
881 
882   gcc_assert (can_create_pseudo_p ());
883 
884   if (GET_CODE (addr) == SYMBOL_REF)
885     switch (SYMBOL_REF_TLS_MODEL (addr))
886       {
887       case TLS_MODEL_GLOBAL_DYNAMIC:
888       case TLS_MODEL_LOCAL_DYNAMIC:
889 	{
890 	  rtx r0, temp1, temp2, temp3, got;
891 	  rtx_insn *last;
892 
893 	  ret = gen_reg_rtx (Pmode);
894 	  r0 = gen_rtx_REG (Pmode, 0);
895 	  temp1 = gen_reg_rtx (Pmode);
896 	  temp2 = gen_reg_rtx (Pmode);
897 	  temp3 = gen_reg_rtx (Pmode);
898 
899 	  got = tilepro_tls_got ();
900 	  emit_insn (gen_tls_gd_addhi (temp1, got, addr));
901 	  emit_insn (gen_tls_gd_addlo (temp2, temp1, addr));
902 	  emit_move_insn (r0, temp2);
903 	  emit_insn (gen_tls_gd_call (addr));
904 	  emit_move_insn (temp3, r0);
905 	  last = emit_insn (gen_tls_gd_add (ret, temp3, addr));
906 	  set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
907 	  break;
908 	}
909       case TLS_MODEL_INITIAL_EXEC:
910 	{
911 	  rtx temp1, temp2, temp3, got;
912 	  rtx_insn *last;
913 
914 	  ret = gen_reg_rtx (Pmode);
915 	  temp1 = gen_reg_rtx (Pmode);
916 	  temp2 = gen_reg_rtx (Pmode);
917 	  temp3 = gen_reg_rtx (Pmode);
918 
919 	  got = tilepro_tls_got ();
920 	  emit_insn (gen_tls_ie_addhi (temp1, got, addr));
921 	  emit_insn (gen_tls_ie_addlo (temp2, temp1, addr));
922 	  emit_insn (gen_tls_ie_load (temp3, temp2, addr));
923 	  last =
924 	    emit_move_insn(ret,
925 			   gen_rtx_PLUS (Pmode,
926 					 gen_rtx_REG (Pmode,
927 						      THREAD_POINTER_REGNUM),
928 					 temp3));
929 	  set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
930 	  break;
931 	}
932       case TLS_MODEL_LOCAL_EXEC:
933 	{
934 	  rtx temp1;
935 	  rtx_insn *last;
936 
937 	  ret = gen_reg_rtx (Pmode);
938 	  temp1 = gen_reg_rtx (Pmode);
939 
940 	  emit_insn (gen_tls_le_addhi (temp1,
941 				       gen_rtx_REG (Pmode,
942 						    THREAD_POINTER_REGNUM),
943 				       addr));
944 	  last = emit_insn (gen_tls_le_addlo (ret, temp1, addr));
945 	  set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
946 	  break;
947 	}
948       default:
949 	gcc_unreachable ();
950       }
951   else if (GET_CODE (addr) == CONST)
952     {
953       rtx base, offset;
954 
955       gcc_assert (GET_CODE (XEXP (addr, 0)) == PLUS);
956 
957       base = tilepro_legitimize_tls_address (XEXP (XEXP (addr, 0), 0));
958       offset = XEXP (XEXP (addr, 0), 1);
959 
960       base = force_operand (base, NULL_RTX);
961       ret = force_reg (Pmode, gen_rtx_PLUS (Pmode, base, offset));
962     }
963   else
964     gcc_unreachable ();
965 
966   return ret;
967 }
968 
969 
970 /* Legitimize PIC addresses.  If the address is already
971    position-independent, we return ORIG.  Newly generated
972    position-independent addresses go into a reg.  This is REG if
973    nonzero, otherwise we allocate register(s) as necessary.  */
974 static rtx
tilepro_legitimize_pic_address(rtx orig,machine_mode mode ATTRIBUTE_UNUSED,rtx reg)975 tilepro_legitimize_pic_address (rtx orig,
976 				machine_mode mode ATTRIBUTE_UNUSED,
977 				rtx reg)
978 {
979   if (GET_CODE (orig) == SYMBOL_REF)
980     {
981       rtx address, pic_ref;
982 
983       if (reg == 0)
984 	{
985 	  gcc_assert (can_create_pseudo_p ());
986 	  reg = gen_reg_rtx (Pmode);
987 	}
988 
989       if (SYMBOL_REF_LOCAL_P (orig))
990 	{
991 	  /* If not during reload, allocate another temp reg here for
992 	     loading in the address, so that these instructions can be
993 	     optimized properly.  */
994 	  rtx temp_reg = create_temp_reg_if_possible (Pmode, reg);
995 	  rtx text_label_symbol = tilepro_text_label_symbol ();
996 	  rtx text_label_rtx = tilepro_text_label_rtx ();
997 
998 	  emit_insn (gen_addli_pcrel (temp_reg, text_label_rtx, orig,
999 				      text_label_symbol));
1000 	  emit_insn (gen_auli_pcrel (temp_reg, temp_reg, orig,
1001 				     text_label_symbol));
1002 
1003 	  /* Note: this is conservative.  We use the text_label but we
1004 	     don't use the pic_offset_table.  However, in some cases
1005 	     we may need the pic_offset_table (see
1006 	     tilepro_fixup_pcrel_references).  */
1007 	  crtl->uses_pic_offset_table = 1;
1008 
1009 	  address = temp_reg;
1010 
1011 	  emit_move_insn (reg, address);
1012 	  return reg;
1013 	}
1014       else
1015 	{
1016 	  /* If not during reload, allocate another temp reg here for
1017 	     loading in the address, so that these instructions can be
1018 	     optimized properly.  */
1019 	  rtx temp_reg = create_temp_reg_if_possible (Pmode, reg);
1020 
1021 	  gcc_assert (flag_pic);
1022 	  if (flag_pic == 1)
1023 	    {
1024 	      emit_insn (gen_add_got16 (temp_reg,
1025 					tilepro_got_rtx (), orig));
1026 	    }
1027 	  else
1028 	    {
1029 	      rtx temp_reg2 = create_temp_reg_if_possible (Pmode, reg);
1030 	      emit_insn (gen_addhi_got32 (temp_reg2,
1031 					  tilepro_got_rtx (), orig));
1032 	      emit_insn (gen_addlo_got32 (temp_reg, temp_reg2, orig));
1033 	    }
1034 
1035 	  address = temp_reg;
1036 
1037 	  pic_ref = gen_const_mem (Pmode, address);
1038 	  crtl->uses_pic_offset_table = 1;
1039 	  emit_move_insn (reg, pic_ref);
1040 	  /* The following put a REG_EQUAL note on this insn, so that
1041 	     it can be optimized by loop.  But it causes the label to
1042 	     be optimized away.  */
1043 	  /* set_unique_reg_note (insn, REG_EQUAL, orig); */
1044 	  return reg;
1045 	}
1046     }
1047   else if (GET_CODE (orig) == CONST)
1048     {
1049       rtx base, offset;
1050 
1051       if (GET_CODE (XEXP (orig, 0)) == PLUS
1052 	  && XEXP (XEXP (orig, 0), 0) == tilepro_got_rtx ())
1053 	return orig;
1054 
1055       if (reg == 0)
1056 	{
1057 	  gcc_assert (can_create_pseudo_p ());
1058 	  reg = gen_reg_rtx (Pmode);
1059 	}
1060 
1061       gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
1062       base = tilepro_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode,
1063 					     reg);
1064       offset =
1065 	tilepro_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1066 					base == reg ? 0 : reg);
1067 
1068       if (CONST_INT_P (offset))
1069 	{
1070 	  if (can_create_pseudo_p ())
1071 	    offset = force_reg (Pmode, offset);
1072 	  else
1073 	    /* If we reach here, then something is seriously
1074 	       wrong.  */
1075 	    gcc_unreachable ();
1076 	}
1077 
1078       if (can_create_pseudo_p ())
1079 	return force_reg (Pmode, gen_rtx_PLUS (Pmode, base, offset));
1080       else
1081 	gcc_unreachable ();
1082     }
1083   else if (GET_CODE (orig) == LABEL_REF)
1084     {
1085       rtx address, temp_reg;
1086       rtx text_label_symbol;
1087       rtx text_label_rtx;
1088 
1089       if (reg == 0)
1090 	{
1091 	  gcc_assert (can_create_pseudo_p ());
1092 	  reg = gen_reg_rtx (Pmode);
1093 	}
1094 
1095       /* If not during reload, allocate another temp reg here for
1096          loading in the address, so that these instructions can be
1097          optimized properly.  */
1098       temp_reg = create_temp_reg_if_possible (Pmode, reg);
1099       text_label_symbol = tilepro_text_label_symbol ();
1100       text_label_rtx = tilepro_text_label_rtx ();
1101 
1102       emit_insn (gen_addli_pcrel (temp_reg, text_label_rtx, orig,
1103 				  text_label_symbol));
1104       emit_insn (gen_auli_pcrel (temp_reg, temp_reg, orig,
1105 				 text_label_symbol));
1106 
1107       /* Note: this is conservative.  We use the text_label but we
1108          don't use the pic_offset_table.  */
1109       crtl->uses_pic_offset_table = 1;
1110 
1111       address = temp_reg;
1112 
1113       emit_move_insn (reg, address);
1114 
1115       return reg;
1116     }
1117 
1118   return orig;
1119 }
1120 
1121 
1122 /* Implement TARGET_LEGITIMIZE_ADDRESS.  */
1123 static rtx
tilepro_legitimize_address(rtx x,rtx oldx ATTRIBUTE_UNUSED,machine_mode mode)1124 tilepro_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1125 			    machine_mode mode)
1126 {
1127   if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
1128       && symbolic_operand (x, Pmode) && tilepro_tls_referenced_p (x))
1129     {
1130       return tilepro_legitimize_tls_address (x);
1131     }
1132   else if (flag_pic)
1133     {
1134       return tilepro_legitimize_pic_address (x, mode, 0);
1135     }
1136   else
1137     return x;
1138 }
1139 
1140 
1141 /* Implement TARGET_DELEGITIMIZE_ADDRESS.  */
1142 static rtx
tilepro_delegitimize_address(rtx x)1143 tilepro_delegitimize_address (rtx x)
1144 {
1145   x = delegitimize_mem_from_attrs (x);
1146 
1147   if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
1148     {
1149       switch (XINT (XEXP (x, 0), 1))
1150 	{
1151 	case UNSPEC_PCREL_SYM:
1152 	case UNSPEC_GOT16_SYM:
1153 	case UNSPEC_GOT32_SYM:
1154 	case UNSPEC_TLS_GD:
1155 	case UNSPEC_TLS_IE:
1156 	  x = XVECEXP (XEXP (x, 0), 0, 0);
1157 	  break;
1158 	}
1159     }
1160 
1161   return x;
1162 }
1163 
1164 
1165 /* Emit code to load the PIC register.  */
1166 static void
load_pic_register(bool delay_pic_helper ATTRIBUTE_UNUSED)1167 load_pic_register (bool delay_pic_helper ATTRIBUTE_UNUSED)
1168 {
1169   int orig_flag_pic = flag_pic;
1170 
1171   rtx got_symbol = tilepro_got_symbol ();
1172   rtx text_label_symbol = tilepro_text_label_symbol ();
1173   rtx text_label_rtx = tilepro_text_label_rtx ();
1174   flag_pic = 0;
1175 
1176   emit_insn (gen_insn_lnk_and_label (text_label_rtx, text_label_symbol));
1177 
1178   emit_insn (gen_addli_pcrel (tilepro_got_rtx (),
1179 			      text_label_rtx, got_symbol, text_label_symbol));
1180 
1181   emit_insn (gen_auli_pcrel (tilepro_got_rtx (),
1182 			     tilepro_got_rtx (),
1183 			     got_symbol, text_label_symbol));
1184 
1185   flag_pic = orig_flag_pic;
1186 
1187   /* Need to emit this whether or not we obey regdecls, since
1188      setjmp/longjmp can cause life info to screw up.  ??? In the case
1189      where we don't obey regdecls, this is not sufficient since we may
1190      not fall out the bottom.  */
1191   emit_use (tilepro_got_rtx ());
1192 }
1193 
1194 
1195 /* Return the simd variant of the constant NUM of mode MODE, by
1196    replicating it to fill an interger of mode SImode.  NUM is first
1197    truncated to fit in MODE.  */
1198 rtx
tilepro_simd_int(rtx num,machine_mode mode)1199 tilepro_simd_int (rtx num, machine_mode mode)
1200 {
1201   HOST_WIDE_INT n = 0;
1202 
1203   gcc_assert (CONST_INT_P (num));
1204 
1205   n = INTVAL (num);
1206 
1207   switch (mode)
1208     {
1209     case QImode:
1210       n = 0x01010101 * (n & 0x000000FF);
1211       break;
1212     case HImode:
1213       n = 0x00010001 * (n & 0x0000FFFF);
1214       break;
1215     case SImode:
1216       break;
1217     case DImode:
1218       break;
1219     default:
1220       gcc_unreachable ();
1221     }
1222 
1223   return gen_int_si (n);
1224 }
1225 
1226 
1227 /* Split one or more DImode RTL references into pairs of SImode
1228    references.  The RTL can be REG, offsettable MEM, integer constant,
1229    or CONST_DOUBLE.  "operands" is a pointer to an array of DImode RTL
1230    to split and "num" is its length.  lo_half and hi_half are output
1231    arrays that parallel "operands".  */
1232 void
split_di(rtx operands[],int num,rtx lo_half[],rtx hi_half[])1233 split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1234 {
1235   while (num--)
1236     {
1237       rtx op = operands[num];
1238 
1239       /* simplify_subreg refuse to split volatile memory addresses,
1240          but we still have to handle it.  */
1241       if (MEM_P (op))
1242 	{
1243 	  lo_half[num] = adjust_address (op, SImode, 0);
1244 	  hi_half[num] = adjust_address (op, SImode, 4);
1245 	}
1246       else
1247 	{
1248 	  lo_half[num] = simplify_gen_subreg (SImode, op,
1249 					      GET_MODE (op) == VOIDmode
1250 					      ? DImode : GET_MODE (op), 0);
1251 	  hi_half[num] = simplify_gen_subreg (SImode, op,
1252 					      GET_MODE (op) == VOIDmode
1253 					      ? DImode : GET_MODE (op), 4);
1254 	}
1255     }
1256 }
1257 
1258 
1259 /* Returns true iff val can be moved into a register in one
1260    instruction.  And if it can, it emits the code to move the
1261    constant.
1262 
1263    If three_wide_only is true, this insists on an instruction that
1264    works in a bundle containing three instructions.  */
1265 static bool
expand_set_cint32_one_inst(rtx dest_reg,HOST_WIDE_INT val,bool three_wide_only)1266 expand_set_cint32_one_inst (rtx dest_reg,
1267 			    HOST_WIDE_INT val, bool three_wide_only)
1268 {
1269   val = trunc_int_for_mode (val, SImode);
1270 
1271   if (val == trunc_int_for_mode (val, QImode))
1272     {
1273       /* Success! */
1274       emit_move_insn (dest_reg, GEN_INT (val));
1275       return true;
1276     }
1277   else if (!three_wide_only)
1278     {
1279       rtx imm_op = GEN_INT (val);
1280 
1281       if (satisfies_constraint_J (imm_op)
1282 	  || satisfies_constraint_K (imm_op)
1283 	  || satisfies_constraint_N (imm_op)
1284 	  || satisfies_constraint_P (imm_op))
1285 	{
1286 	  emit_move_insn (dest_reg, imm_op);
1287 	  return true;
1288 	}
1289     }
1290 
1291   return false;
1292 }
1293 
1294 
1295 /* Implement SImode rotatert.  */
1296 static HOST_WIDE_INT
rotate_right(HOST_WIDE_INT n,int count)1297 rotate_right (HOST_WIDE_INT n, int count)
1298 {
1299   unsigned HOST_WIDE_INT x = n & 0xFFFFFFFF;
1300   if (count == 0)
1301     return x;
1302   return ((x >> count) | (x << (32 - count))) & 0xFFFFFFFF;
1303 }
1304 
1305 
1306 /* Return true iff n contains exactly one contiguous sequence of 1
1307    bits, possibly wrapping around from high bits to low bits.  */
1308 bool
tilepro_bitfield_operand_p(HOST_WIDE_INT n,int * first_bit,int * last_bit)1309 tilepro_bitfield_operand_p (HOST_WIDE_INT n, int *first_bit, int *last_bit)
1310 {
1311   int i;
1312 
1313   if (n == 0)
1314     return false;
1315 
1316   for (i = 0; i < 32; i++)
1317     {
1318       unsigned HOST_WIDE_INT x = rotate_right (n, i);
1319       if (!(x & 1))
1320 	continue;
1321 
1322       /* See if x is a power of two minus one, i.e. only consecutive 1
1323          bits starting from bit 0.  */
1324       if ((x & (x + 1)) == 0)
1325 	{
1326 	  if (first_bit != NULL)
1327 	    *first_bit = i;
1328 	  if (last_bit != NULL)
1329 	    *last_bit = (i + exact_log2 (x ^ (x >> 1))) & 31;
1330 
1331 	  return true;
1332 	}
1333     }
1334 
1335   return false;
1336 }
1337 
1338 
1339 /* Create code to move the CONST_INT value in src_val to dest_reg.  */
1340 static void
expand_set_cint32(rtx dest_reg,rtx src_val)1341 expand_set_cint32 (rtx dest_reg, rtx src_val)
1342 {
1343   HOST_WIDE_INT val;
1344   int leading_zeroes, trailing_zeroes;
1345   int lower, upper;
1346   int three_wide_only;
1347   rtx temp;
1348 
1349   gcc_assert (CONST_INT_P (src_val));
1350   val = trunc_int_for_mode (INTVAL (src_val), SImode);
1351 
1352   /* See if we can generate the constant in one instruction.  */
1353   if (expand_set_cint32_one_inst (dest_reg, val, false))
1354     return;
1355 
1356   /* Create a temporary variable to hold a partial result, to enable
1357      CSE.  */
1358   temp = create_temp_reg_if_possible (SImode, dest_reg);
1359 
1360   leading_zeroes = 31 - floor_log2 (val & 0xFFFFFFFF);
1361   trailing_zeroes = exact_log2 (val & -val);
1362 
1363   lower = trunc_int_for_mode (val, HImode);
1364   upper = trunc_int_for_mode ((val - lower) >> 16, HImode);
1365 
1366   /* First try all three-wide instructions that generate a constant
1367      (i.e. movei) followed by various shifts and rotates. If none of
1368      those work, try various two-wide ways of generating a constant
1369      followed by various shifts and rotates.  */
1370   for (three_wide_only = 1; three_wide_only >= 0; three_wide_only--)
1371     {
1372       int count;
1373 
1374       if (expand_set_cint32_one_inst (temp, val >> trailing_zeroes,
1375 				      three_wide_only))
1376 	{
1377 	  /* 0xFFFFA500 becomes:
1378 	     movei temp, 0xFFFFFFA5
1379 	     shli dest, temp, 8  */
1380 	  emit_move_insn (dest_reg,
1381 			  gen_rtx_ASHIFT (SImode, temp,
1382 					  GEN_INT (trailing_zeroes)));
1383 	  return;
1384 	}
1385 
1386       if (expand_set_cint32_one_inst (temp, val << leading_zeroes,
1387 				      three_wide_only))
1388 	{
1389 	  /* 0x7FFFFFFF becomes:
1390 	     movei temp, -2
1391 	     shri dest, temp, 1  */
1392 	  emit_move_insn (dest_reg,
1393 			  gen_rtx_LSHIFTRT (SImode, temp,
1394 					    GEN_INT (leading_zeroes)));
1395 	  return;
1396 	}
1397 
1398       /* Try rotating a one-instruction immediate, since rotate is
1399          3-wide.  */
1400       for (count = 1; count < 32; count++)
1401 	{
1402 	  HOST_WIDE_INT r = rotate_right (val, count);
1403 	  if (expand_set_cint32_one_inst (temp, r, three_wide_only))
1404 	    {
1405 	      /* 0xFFA5FFFF becomes:
1406 	         movei temp, 0xFFFFFFA5
1407 	         rli dest, temp, 16  */
1408 	      emit_move_insn (dest_reg,
1409 			      gen_rtx_ROTATE (SImode, temp, GEN_INT (count)));
1410 	      return;
1411 	    }
1412 	}
1413 
1414       if (lower == trunc_int_for_mode (lower, QImode))
1415 	{
1416 	  /* We failed to use two 3-wide instructions, but the low 16
1417 	     bits are a small number so just use a 2-wide + 3-wide
1418 	     auli + addi pair rather than anything more exotic.
1419 
1420 	     0x12340056 becomes:
1421 	     auli temp, zero, 0x1234
1422 	     addi dest, temp, 0x56  */
1423 	  break;
1424 	}
1425     }
1426 
1427   /* Fallback case: use a auli + addli/addi pair.  */
1428   emit_move_insn (temp, GEN_INT (upper << 16));
1429   emit_move_insn (dest_reg, (gen_rtx_PLUS (SImode, temp, GEN_INT (lower))));
1430 }
1431 
1432 
1433 /* Load OP1, a 32-bit constant, into OP0, a register.  We know it
1434    can't be done in one insn when we get here, the move expander
1435    guarantees this.  */
1436 void
tilepro_expand_set_const32(rtx op0,rtx op1)1437 tilepro_expand_set_const32 (rtx op0, rtx op1)
1438 {
1439   machine_mode mode = GET_MODE (op0);
1440   rtx temp;
1441 
1442   if (CONST_INT_P (op1))
1443     {
1444       /* TODO: I don't know if we want to split large constants now,
1445          or wait until later (with a define_split).
1446 
1447          Does splitting early help CSE?  Does it harm other
1448          optimizations that might fold loads? */
1449       expand_set_cint32 (op0, op1);
1450     }
1451   else
1452     {
1453       temp = create_temp_reg_if_possible (mode, op0);
1454 
1455       /* A symbol, emit in the traditional way.  */
1456       emit_move_insn (temp, gen_rtx_HIGH (mode, op1));
1457       emit_move_insn (op0, gen_rtx_LO_SUM (mode, temp, op1));
1458     }
1459 }
1460 
1461 
1462 /* Expand a move instruction.  Return true if all work is done.  */
1463 bool
tilepro_expand_mov(machine_mode mode,rtx * operands)1464 tilepro_expand_mov (machine_mode mode, rtx *operands)
1465 {
1466   /* Handle sets of MEM first.  */
1467   if (MEM_P (operands[0]))
1468     {
1469       if (can_create_pseudo_p ())
1470 	operands[0] = validize_mem (operands[0]);
1471 
1472       if (reg_or_0_operand (operands[1], mode))
1473 	return false;
1474 
1475       if (!reload_in_progress)
1476 	operands[1] = force_reg (mode, operands[1]);
1477     }
1478 
1479   /* Fixup TLS cases.  */
1480   if (CONSTANT_P (operands[1]) && tilepro_tls_referenced_p (operands[1]))
1481     {
1482       operands[1] = tilepro_legitimize_tls_address (operands[1]);
1483       return false;
1484     }
1485 
1486   /* Fixup PIC cases.  */
1487   if (flag_pic && CONSTANT_P (operands[1]))
1488     {
1489       if (tilepro_pic_address_needs_scratch (operands[1]))
1490 	operands[1] = tilepro_legitimize_pic_address (operands[1], mode, 0);
1491 
1492       if (symbolic_operand (operands[1], mode))
1493 	{
1494 	  operands[1] = tilepro_legitimize_pic_address (operands[1],
1495 							mode,
1496 							(reload_in_progress ?
1497 							 operands[0] :
1498 							 NULL_RTX));
1499 	  return false;
1500 	}
1501     }
1502 
1503   /* Fixup for UNSPEC addresses.  */
1504   if (flag_pic
1505       && GET_CODE (operands[1]) == HIGH
1506       && GET_CODE (XEXP (operands[1], 0)) == CONST
1507       && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == UNSPEC)
1508     {
1509       rtx unspec = XEXP (XEXP (operands[1], 0), 0);
1510       int unspec_num = XINT (unspec, 1);
1511       if (unspec_num == UNSPEC_PCREL_SYM)
1512 	{
1513 	  emit_insn (gen_auli_pcrel (operands[0], const0_rtx,
1514 				     XVECEXP (unspec, 0, 0),
1515 				     XVECEXP (unspec, 0, 1)));
1516 	  return true;
1517 	}
1518       else if (flag_pic == 2 && unspec_num == UNSPEC_GOT32_SYM)
1519 	{
1520 	  emit_insn (gen_addhi_got32 (operands[0], const0_rtx,
1521 				      XVECEXP (unspec, 0, 0)));
1522 	  return true;
1523 	}
1524       else if (HAVE_AS_TLS && unspec_num == UNSPEC_TLS_GD)
1525 	{
1526 	  emit_insn (gen_tls_gd_addhi (operands[0], const0_rtx,
1527 				       XVECEXP (unspec, 0, 0)));
1528 	  return true;
1529 	}
1530       else if (HAVE_AS_TLS && unspec_num == UNSPEC_TLS_IE)
1531 	{
1532 	  emit_insn (gen_tls_ie_addhi (operands[0], const0_rtx,
1533 				       XVECEXP (unspec, 0, 0)));
1534 	  return true;
1535 	}
1536       else if (HAVE_AS_TLS && unspec_num == UNSPEC_TLS_LE)
1537 	{
1538 	  emit_insn (gen_tls_le_addhi (operands[0], const0_rtx,
1539 				       XVECEXP (unspec, 0, 0)));
1540 	  return true;
1541 	}
1542     }
1543 
1544   /* Accept non-constants and valid constants unmodified.  */
1545   if (!CONSTANT_P (operands[1])
1546       || GET_CODE (operands[1]) == HIGH || move_operand (operands[1], mode))
1547     return false;
1548 
1549   /* Split large integers.  */
1550   if (GET_MODE_SIZE (mode) <= 4)
1551     {
1552       tilepro_expand_set_const32 (operands[0], operands[1]);
1553       return true;
1554     }
1555 
1556   return false;
1557 }
1558 
1559 
1560 /* Expand the "insv" pattern.  */
1561 void
tilepro_expand_insv(rtx operands[4])1562 tilepro_expand_insv (rtx operands[4])
1563 {
1564   rtx first_rtx = operands[2];
1565   HOST_WIDE_INT first = INTVAL (first_rtx);
1566   HOST_WIDE_INT width = INTVAL (operands[1]);
1567   rtx v = operands[3];
1568 
1569   /* Shift the inserted bits into position.  */
1570   if (first != 0)
1571     {
1572       if (CONST_INT_P (v))
1573 	{
1574 	  /* Shift the constant into mm position.  */
1575 	  v = gen_int_si (INTVAL (v) << first);
1576 	}
1577       else
1578 	{
1579 	  /* Shift over the value to be inserted.  */
1580 	  rtx tmp = gen_reg_rtx (SImode);
1581 	  emit_insn (gen_ashlsi3 (tmp, v, first_rtx));
1582 	  v = tmp;
1583 	}
1584     }
1585 
1586   /* Insert the shifted bits using an 'mm' insn.  */
1587   emit_insn (gen_insn_mm (operands[0], v, operands[0], first_rtx,
1588 			  GEN_INT (first + width - 1)));
1589 }
1590 
1591 
1592 /* Expand unaligned loads.  */
1593 void
tilepro_expand_unaligned_load(rtx dest_reg,rtx mem,HOST_WIDE_INT bitsize,HOST_WIDE_INT bit_offset,bool sign)1594 tilepro_expand_unaligned_load (rtx dest_reg, rtx mem, HOST_WIDE_INT bitsize,
1595 			       HOST_WIDE_INT bit_offset, bool sign)
1596 {
1597   machine_mode mode;
1598   rtx addr_lo, addr_hi;
1599   rtx mem_lo, mem_hi, hi;
1600   rtx mema, wide_result;
1601   int last_byte_offset;
1602   HOST_WIDE_INT byte_offset = bit_offset / BITS_PER_UNIT;
1603 
1604   mode = GET_MODE (dest_reg);
1605 
1606   hi = gen_reg_rtx (mode);
1607 
1608   if (bitsize == 2 * BITS_PER_UNIT && (bit_offset % BITS_PER_UNIT) == 0)
1609     {
1610       rtx lo;
1611 
1612       /* When just loading a two byte value, we can load the two bytes
1613          individually and combine them efficiently.  */
1614 
1615       mem_lo = adjust_address (mem, QImode, byte_offset);
1616       mem_hi = adjust_address (mem, QImode, byte_offset + 1);
1617 
1618       lo = gen_reg_rtx (mode);
1619       emit_insn (gen_zero_extendqisi2 (lo, mem_lo));
1620 
1621       if (sign)
1622 	{
1623 	  rtx tmp = gen_reg_rtx (mode);
1624 
1625 	  /* Do a signed load of the second byte then shift and OR it
1626 	     in.  */
1627 	  emit_insn (gen_extendqisi2 (gen_lowpart (SImode, hi), mem_hi));
1628 	  emit_insn (gen_ashlsi3 (gen_lowpart (SImode, tmp),
1629 				  gen_lowpart (SImode, hi), GEN_INT (8)));
1630 	  emit_insn (gen_iorsi3 (gen_lowpart (SImode, dest_reg),
1631 				 gen_lowpart (SImode, lo),
1632 				 gen_lowpart (SImode, tmp)));
1633 	}
1634       else
1635 	{
1636 	  /* Do two unsigned loads and use intlb to interleave
1637 	     them.  */
1638 	  emit_insn (gen_zero_extendqisi2 (gen_lowpart (SImode, hi), mem_hi));
1639 	  emit_insn (gen_insn_intlb (gen_lowpart (SImode, dest_reg),
1640 				     gen_lowpart (SImode, hi),
1641 				     gen_lowpart (SImode, lo)));
1642 	}
1643 
1644       return;
1645     }
1646 
1647   mema = XEXP (mem, 0);
1648 
1649   /* AND addresses cannot be in any alias set, since they may
1650      implicitly alias surrounding code.  Ideally we'd have some alias
1651      set that covered all types except those with alignment 8 or
1652      higher.  */
1653   addr_lo = force_reg (Pmode, plus_constant (Pmode, mema, byte_offset));
1654   mem_lo = change_address (mem, mode,
1655 			   gen_rtx_AND (Pmode, addr_lo, GEN_INT (-4)));
1656   set_mem_alias_set (mem_lo, 0);
1657 
1658   /* Load the high word at an address that will not fault if the low
1659      address is aligned and at the very end of a page.  */
1660   last_byte_offset = (bit_offset + bitsize - 1) / BITS_PER_UNIT;
1661   addr_hi = force_reg (Pmode, plus_constant (Pmode, mema, last_byte_offset));
1662   mem_hi = change_address (mem, mode,
1663 			   gen_rtx_AND (Pmode, addr_hi, GEN_INT (-4)));
1664   set_mem_alias_set (mem_hi, 0);
1665 
1666   if (bitsize == 32)
1667     {
1668       addr_lo = make_safe_from (addr_lo, dest_reg);
1669       wide_result = dest_reg;
1670     }
1671   else
1672     {
1673       wide_result = gen_reg_rtx (mode);
1674     }
1675 
1676   /* Load hi first in case dest_reg is used in mema.  */
1677   emit_move_insn (hi, mem_hi);
1678   emit_move_insn (wide_result, mem_lo);
1679 
1680   emit_insn (gen_insn_dword_align (gen_lowpart (SImode, wide_result),
1681 				   gen_lowpart (SImode, wide_result),
1682 				   gen_lowpart (SImode, hi), addr_lo));
1683 
1684   if (bitsize != 32)
1685     {
1686       rtx extracted =
1687 	extract_bit_field (gen_lowpart (SImode, wide_result),
1688 			   bitsize, bit_offset % BITS_PER_UNIT,
1689 			   !sign, gen_lowpart (SImode, dest_reg),
1690 			   SImode, SImode, false);
1691 
1692       if (extracted != dest_reg)
1693 	emit_move_insn (dest_reg, gen_lowpart (SImode, extracted));
1694     }
1695 }
1696 
1697 
1698 /* Expand unaligned stores.  */
1699 static void
tilepro_expand_unaligned_store(rtx mem,rtx src,HOST_WIDE_INT bitsize,HOST_WIDE_INT bit_offset)1700 tilepro_expand_unaligned_store (rtx mem, rtx src, HOST_WIDE_INT bitsize,
1701 				HOST_WIDE_INT bit_offset)
1702 {
1703   HOST_WIDE_INT byte_offset = bit_offset / BITS_PER_UNIT;
1704   HOST_WIDE_INT bytesize = bitsize / BITS_PER_UNIT;
1705   HOST_WIDE_INT shift_amt;
1706   HOST_WIDE_INT i;
1707   rtx mem_addr;
1708   rtx store_val;
1709 
1710   for (i = 0, shift_amt = 0; i < bytesize; i++, shift_amt += BITS_PER_UNIT)
1711     {
1712       mem_addr = adjust_address (mem, QImode, byte_offset + i);
1713 
1714       if (shift_amt)
1715 	{
1716 	  store_val = expand_simple_binop (SImode, LSHIFTRT,
1717 					   gen_lowpart (SImode, src),
1718 					   GEN_INT (shift_amt), NULL, 1,
1719 					   OPTAB_LIB_WIDEN);
1720 	  store_val = gen_lowpart (QImode, store_val);
1721 	}
1722       else
1723 	{
1724 	  store_val = gen_lowpart (QImode, src);
1725 	}
1726 
1727       emit_move_insn (mem_addr, store_val);
1728     }
1729 }
1730 
1731 
1732 /* Implement the movmisalign patterns.  One of the operands is a
1733    memory that is not naturally aligned.  Emit instructions to load
1734    it.  */
1735 void
tilepro_expand_movmisalign(machine_mode mode,rtx * operands)1736 tilepro_expand_movmisalign (machine_mode mode, rtx *operands)
1737 {
1738   if (MEM_P (operands[1]))
1739     {
1740       rtx tmp;
1741 
1742       if (register_operand (operands[0], mode))
1743 	tmp = operands[0];
1744       else
1745 	tmp = gen_reg_rtx (mode);
1746 
1747       tilepro_expand_unaligned_load (tmp, operands[1],
1748 				     GET_MODE_BITSIZE (mode), 0, true);
1749 
1750       if (tmp != operands[0])
1751 	emit_move_insn (operands[0], tmp);
1752     }
1753   else if (MEM_P (operands[0]))
1754     {
1755       if (!reg_or_0_operand (operands[1], mode))
1756 	operands[1] = force_reg (mode, operands[1]);
1757 
1758       tilepro_expand_unaligned_store (operands[0], operands[1],
1759 				      GET_MODE_BITSIZE (mode), 0);
1760     }
1761   else
1762     gcc_unreachable ();
1763 }
1764 
1765 
1766 /* Implement the addsi3 pattern.  */
1767 bool
tilepro_expand_addsi(rtx op0,rtx op1,rtx op2)1768 tilepro_expand_addsi (rtx op0, rtx op1, rtx op2)
1769 {
1770   rtx temp;
1771   HOST_WIDE_INT n;
1772   HOST_WIDE_INT high;
1773 
1774   /* Skip anything that only takes one instruction.  */
1775   if (add_operand (op2, SImode))
1776     return false;
1777 
1778   /* We can only optimize ints here (it should be impossible to get
1779      here with any other type, but it is harmless to check.  */
1780   if (!CONST_INT_P (op2))
1781     return false;
1782 
1783   temp = create_temp_reg_if_possible (SImode, op0);
1784   n = INTVAL (op2);
1785   high = (n + (n & 0x8000)) & ~0xffff;
1786 
1787   emit_move_insn (temp, gen_rtx_PLUS (SImode, op1, gen_int_si (high)));
1788   emit_move_insn (op0, gen_rtx_PLUS (SImode, temp, gen_int_si (n - high)));
1789 
1790   return true;
1791 }
1792 
1793 
1794 /* Implement the allocate_stack pattern (alloca).  */
1795 void
tilepro_allocate_stack(rtx op0,rtx op1)1796 tilepro_allocate_stack (rtx op0, rtx op1)
1797 {
1798   /* Technically the correct way to initialize chain_loc is with
1799    * gen_frame_mem() instead of gen_rtx_MEM(), but gen_frame_mem()
1800    * sets the alias_set to that of a frame reference.  Some of our
1801    * tests rely on some unsafe assumption about when the chaining
1802    * update is done, we need to be conservative about reordering the
1803    * chaining instructions.
1804    */
1805   rtx fp_addr = gen_reg_rtx (Pmode);
1806   rtx fp_value = gen_reg_rtx (Pmode);
1807   rtx fp_loc;
1808 
1809   emit_move_insn (fp_addr, gen_rtx_PLUS (Pmode, stack_pointer_rtx,
1810 					 GEN_INT (UNITS_PER_WORD)));
1811 
1812   fp_loc = gen_frame_mem (Pmode, fp_addr);
1813 
1814   emit_move_insn (fp_value, fp_loc);
1815 
1816   op1 = force_reg (Pmode, op1);
1817 
1818   emit_move_insn (stack_pointer_rtx,
1819 		  gen_rtx_MINUS (Pmode, stack_pointer_rtx, op1));
1820 
1821   emit_move_insn (fp_addr, gen_rtx_PLUS (Pmode, stack_pointer_rtx,
1822 					 GEN_INT (UNITS_PER_WORD)));
1823 
1824   fp_loc = gen_frame_mem (Pmode, fp_addr);
1825 
1826   emit_move_insn (fp_loc, fp_value);
1827 
1828   emit_move_insn (op0, virtual_stack_dynamic_rtx);
1829 }
1830 
1831 
1832 
1833 /* Multiplies */
1834 
1835 /* Returns the insn_code in ENTRY.  */
1836 static enum insn_code
tilepro_multiply_get_opcode(const struct tilepro_multiply_insn_seq_entry * entry)1837 tilepro_multiply_get_opcode (const struct tilepro_multiply_insn_seq_entry
1838 			     *entry)
1839 {
1840   return tilepro_multiply_insn_seq_decode_opcode[entry->compressed_opcode];
1841 }
1842 
1843 
1844 /* Returns the length of the 'op' array.  */
1845 static int
tilepro_multiply_get_num_ops(const struct tilepro_multiply_insn_seq * seq)1846 tilepro_multiply_get_num_ops (const struct tilepro_multiply_insn_seq *seq)
1847 {
1848   /* The array either uses all of its allocated slots or is terminated
1849      by a bogus opcode. Either way, the array size is the index of the
1850      last valid opcode plus one.  */
1851   int i;
1852   for (i = tilepro_multiply_insn_seq_MAX_OPERATIONS - 1; i >= 0; i--)
1853     if (tilepro_multiply_get_opcode (&seq->op[i]) != CODE_FOR_nothing)
1854       return i + 1;
1855 
1856   /* An empty array is not allowed.  */
1857   gcc_unreachable ();
1858 }
1859 
1860 
1861 /* We precompute a number of expression trees for multiplying by
1862    constants.  This generates code for such an expression tree by
1863    walking through the nodes in the tree (which are conveniently
1864    pre-linearized) and emitting an instruction for each one.  */
1865 static void
tilepro_expand_constant_multiply_given_sequence(rtx result,rtx src,const struct tilepro_multiply_insn_seq * seq)1866 tilepro_expand_constant_multiply_given_sequence (rtx result, rtx src,
1867 						 const struct
1868 						 tilepro_multiply_insn_seq
1869 						 *seq)
1870 {
1871   int i;
1872   int num_ops;
1873 
1874   /* Keep track of the subexpressions computed so far, so later
1875      instructions can refer to them.  We seed the array with zero and
1876      the value being multiplied.  */
1877   int num_subexprs = 2;
1878   rtx subexprs[tilepro_multiply_insn_seq_MAX_OPERATIONS + 2];
1879   subexprs[0] = const0_rtx;
1880   subexprs[1] = src;
1881 
1882   /* Determine how many instructions we are going to generate.  */
1883   num_ops = tilepro_multiply_get_num_ops (seq);
1884   gcc_assert (num_ops > 0
1885 	      && num_ops <= tilepro_multiply_insn_seq_MAX_OPERATIONS);
1886 
1887   for (i = 0; i < num_ops; i++)
1888     {
1889       const struct tilepro_multiply_insn_seq_entry *entry = &seq->op[i];
1890 
1891       /* Figure out where to store the output of this instruction.  */
1892       const bool is_last_op = (i + 1 == num_ops);
1893       rtx out = is_last_op ? result : gen_reg_rtx (SImode);
1894 
1895       enum insn_code opcode = tilepro_multiply_get_opcode (entry);
1896       if (opcode == CODE_FOR_ashlsi3)
1897 	{
1898 	  /* Handle shift by immediate. This is a special case because
1899 	     the meaning of the second operand is a constant shift
1900 	     count rather than an operand index.  */
1901 
1902 	  /* Make sure the shift count is in range. Zero should not
1903 	     happen.  */
1904 	  const int shift_count = entry->rhs;
1905 	  gcc_assert (shift_count > 0 && shift_count < 32);
1906 
1907 	  /* Emit the actual instruction.  */
1908 	  emit_insn (GEN_FCN (opcode)
1909 		     (out, subexprs[entry->lhs],
1910 		      gen_rtx_CONST_INT (SImode, shift_count)));
1911 	}
1912       else
1913 	{
1914 	  /* Handle a normal two-operand instruction, such as add or
1915 	     s1a.  */
1916 
1917 	  /* Make sure we are referring to a previously computed
1918 	     subexpression.  */
1919 	  gcc_assert (entry->rhs < num_subexprs);
1920 
1921 	  /* Emit the actual instruction.  */
1922 	  emit_insn (GEN_FCN (opcode)
1923 		     (out, subexprs[entry->lhs], subexprs[entry->rhs]));
1924 	}
1925 
1926       /* Record this subexpression for use by later expressions.  */
1927       subexprs[num_subexprs++] = out;
1928     }
1929 }
1930 
1931 
1932 /* bsearch helper function.  */
1933 static int
tilepro_compare_multipliers(const void * key,const void * t)1934 tilepro_compare_multipliers (const void *key, const void *t)
1935 {
1936   return *(const int *) key -
1937     ((const struct tilepro_multiply_insn_seq *) t)->multiplier;
1938 }
1939 
1940 
1941 /* Returns the tilepro_multiply_insn_seq for multiplier, or NULL if
1942    none exists.  */
1943 static const struct tilepro_multiply_insn_seq *
tilepro_find_multiply_insn_seq_for_constant(int multiplier)1944 tilepro_find_multiply_insn_seq_for_constant (int multiplier)
1945 {
1946   return ((const struct tilepro_multiply_insn_seq *)
1947 	  bsearch (&multiplier, tilepro_multiply_insn_seq_table,
1948 		   tilepro_multiply_insn_seq_table_size,
1949 		   sizeof tilepro_multiply_insn_seq_table[0],
1950 		   tilepro_compare_multipliers));
1951 }
1952 
1953 
1954 /* Try to a expand constant multiply in SImode by looking it up in a
1955    precompiled table.  OP0 is the result operand, OP1 is the source
1956    operand, and MULTIPLIER is the value of the constant.  Return true
1957    if it succeeds.  */
1958 static bool
tilepro_expand_const_mulsi(rtx op0,rtx op1,int multiplier)1959 tilepro_expand_const_mulsi (rtx op0, rtx op1, int multiplier)
1960 {
1961   /* See if we have precomputed an efficient way to multiply by this
1962      constant.  */
1963   const struct tilepro_multiply_insn_seq *seq =
1964     tilepro_find_multiply_insn_seq_for_constant (multiplier);
1965   if (seq != NULL)
1966     {
1967       tilepro_expand_constant_multiply_given_sequence (op0, op1, seq);
1968       return true;
1969     }
1970   else
1971     return false;
1972 }
1973 
1974 
1975 /* Expand the mulsi pattern.  */
1976 bool
tilepro_expand_mulsi(rtx op0,rtx op1,rtx op2)1977 tilepro_expand_mulsi (rtx op0, rtx op1, rtx op2)
1978 {
1979   if (CONST_INT_P (op2))
1980     {
1981       HOST_WIDE_INT n = trunc_int_for_mode (INTVAL (op2), SImode);
1982       return tilepro_expand_const_mulsi (op0, op1, n);
1983     }
1984   return false;
1985 }
1986 
1987 
1988 /* Expand a high multiply pattern in SImode.  RESULT, OP1, OP2 are the
1989    operands, and SIGN is true if it's a signed multiply, and false if
1990    it's an unsigned multiply.  */
1991 static void
tilepro_expand_high_multiply(rtx result,rtx op1,rtx op2,bool sign)1992 tilepro_expand_high_multiply (rtx result, rtx op1, rtx op2, bool sign)
1993 {
1994   rtx tmp0 = gen_reg_rtx (SImode);
1995   rtx tmp1 = gen_reg_rtx (SImode);
1996   rtx tmp2 = gen_reg_rtx (SImode);
1997   rtx tmp3 = gen_reg_rtx (SImode);
1998   rtx tmp4 = gen_reg_rtx (SImode);
1999   rtx tmp5 = gen_reg_rtx (SImode);
2000   rtx tmp6 = gen_reg_rtx (SImode);
2001   rtx tmp7 = gen_reg_rtx (SImode);
2002   rtx tmp8 = gen_reg_rtx (SImode);
2003   rtx tmp9 = gen_reg_rtx (SImode);
2004   rtx tmp10 = gen_reg_rtx (SImode);
2005   rtx tmp11 = gen_reg_rtx (SImode);
2006   rtx tmp12 = gen_reg_rtx (SImode);
2007   rtx tmp13 = gen_reg_rtx (SImode);
2008   rtx result_lo = gen_reg_rtx (SImode);
2009 
2010   if (sign)
2011     {
2012       emit_insn (gen_insn_mulhl_su (tmp0, op1, op2));
2013       emit_insn (gen_insn_mulhl_su (tmp1, op2, op1));
2014       emit_insn (gen_insn_mulll_uu (tmp2, op1, op2));
2015       emit_insn (gen_insn_mulhh_ss (tmp3, op1, op2));
2016     }
2017   else
2018     {
2019       emit_insn (gen_insn_mulhl_uu (tmp0, op1, op2));
2020       emit_insn (gen_insn_mulhl_uu (tmp1, op2, op1));
2021       emit_insn (gen_insn_mulll_uu (tmp2, op1, op2));
2022       emit_insn (gen_insn_mulhh_uu (tmp3, op1, op2));
2023     }
2024 
2025   emit_move_insn (tmp4, (gen_rtx_ASHIFT (SImode, tmp0, GEN_INT (16))));
2026 
2027   emit_move_insn (tmp5, (gen_rtx_ASHIFT (SImode, tmp1, GEN_INT (16))));
2028 
2029   emit_move_insn (tmp6, (gen_rtx_PLUS (SImode, tmp4, tmp5)));
2030   emit_move_insn (result_lo, (gen_rtx_PLUS (SImode, tmp2, tmp6)));
2031 
2032   emit_move_insn (tmp7, gen_rtx_LTU (SImode, tmp6, tmp4));
2033   emit_move_insn (tmp8, gen_rtx_LTU (SImode, result_lo, tmp2));
2034 
2035   if (sign)
2036     {
2037       emit_move_insn (tmp9, (gen_rtx_ASHIFTRT (SImode, tmp0, GEN_INT (16))));
2038       emit_move_insn (tmp10, (gen_rtx_ASHIFTRT (SImode, tmp1, GEN_INT (16))));
2039     }
2040   else
2041     {
2042       emit_move_insn (tmp9, (gen_rtx_LSHIFTRT (SImode, tmp0, GEN_INT (16))));
2043       emit_move_insn (tmp10, (gen_rtx_LSHIFTRT (SImode, tmp1, GEN_INT (16))));
2044     }
2045 
2046   emit_move_insn (tmp11, (gen_rtx_PLUS (SImode, tmp3, tmp7)));
2047   emit_move_insn (tmp12, (gen_rtx_PLUS (SImode, tmp8, tmp9)));
2048   emit_move_insn (tmp13, (gen_rtx_PLUS (SImode, tmp11, tmp12)));
2049   emit_move_insn (result, (gen_rtx_PLUS (SImode, tmp13, tmp10)));
2050 }
2051 
2052 
2053 /* Implement smulsi3_highpart.  */
2054 void
tilepro_expand_smulsi3_highpart(rtx op0,rtx op1,rtx op2)2055 tilepro_expand_smulsi3_highpart (rtx op0, rtx op1, rtx op2)
2056 {
2057   tilepro_expand_high_multiply (op0, op1, op2, true);
2058 }
2059 
2060 
2061 /* Implement umulsi3_highpart.  */
2062 void
tilepro_expand_umulsi3_highpart(rtx op0,rtx op1,rtx op2)2063 tilepro_expand_umulsi3_highpart (rtx op0, rtx op1, rtx op2)
2064 {
2065   tilepro_expand_high_multiply (op0, op1, op2, false);
2066 }
2067 
2068 
2069 
2070 /* Compare and branches  */
2071 
2072 /* Helper function to handle DImode for tilepro_emit_setcc_internal.  */
2073 static bool
tilepro_emit_setcc_internal_di(rtx res,enum rtx_code code,rtx op0,rtx op1)2074 tilepro_emit_setcc_internal_di (rtx res, enum rtx_code code, rtx op0, rtx op1)
2075 {
2076   rtx operands[2], lo_half[2], hi_half[2];
2077   rtx tmp, tmp0, tmp1, tmp2;
2078   bool swap = false;
2079 
2080   /* Reduce the number of cases we need to handle by reversing the
2081      operands.  */
2082   switch (code)
2083     {
2084     case EQ:
2085     case NE:
2086     case LE:
2087     case LT:
2088     case LEU:
2089     case LTU:
2090       /* We handle these compares directly.  */
2091       break;
2092 
2093     case GE:
2094     case GT:
2095     case GEU:
2096     case GTU:
2097       /* Reverse the operands.  */
2098       swap = true;
2099       break;
2100 
2101     default:
2102       /* We should not have called this with any other code.  */
2103       gcc_unreachable ();
2104     }
2105 
2106   if (swap)
2107     {
2108       code = swap_condition (code);
2109       tmp = op0, op0 = op1, op1 = tmp;
2110     }
2111 
2112   operands[0] = op0;
2113   operands[1] = op1;
2114 
2115   split_di (operands, 2, lo_half, hi_half);
2116 
2117   if (!reg_or_0_operand (lo_half[0], SImode))
2118     lo_half[0] = force_reg (SImode, lo_half[0]);
2119 
2120   if (!reg_or_0_operand (hi_half[0], SImode))
2121     hi_half[0] = force_reg (SImode, hi_half[0]);
2122 
2123   if (!CONST_INT_P (lo_half[1]) && !register_operand (lo_half[1], SImode))
2124     lo_half[1] = force_reg (SImode, lo_half[1]);
2125 
2126   if (!CONST_INT_P (hi_half[1]) && !register_operand (hi_half[1], SImode))
2127     hi_half[1] = force_reg (SImode, hi_half[1]);
2128 
2129   tmp0 = gen_reg_rtx (SImode);
2130   tmp1 = gen_reg_rtx (SImode);
2131   tmp2 = gen_reg_rtx (SImode);
2132 
2133   switch (code)
2134     {
2135     case EQ:
2136       emit_insn (gen_insn_seq (tmp0, lo_half[0], lo_half[1]));
2137       emit_insn (gen_insn_seq (tmp1, hi_half[0], hi_half[1]));
2138       emit_insn (gen_andsi3 (res, tmp0, tmp1));
2139       return true;
2140       break;
2141     case NE:
2142       emit_insn (gen_insn_sne (tmp0, lo_half[0], lo_half[1]));
2143       emit_insn (gen_insn_sne (tmp1, hi_half[0], hi_half[1]));
2144       emit_insn (gen_iorsi3 (res, tmp0, tmp1));
2145       return true;
2146       break;
2147     case LE:
2148       emit_insn (gen_insn_slte (tmp0, hi_half[0], hi_half[1]));
2149       emit_insn (gen_insn_seq (tmp1, hi_half[0], hi_half[1]));
2150       emit_insn (gen_insn_slte_u (tmp2, lo_half[0], lo_half[1]));
2151       emit_insn (gen_insn_mvnz (res, tmp0, tmp1, tmp2));
2152       return true;
2153     case LT:
2154       if (operands[1] == const0_rtx)
2155 	{
2156 	  emit_insn (gen_lshrsi3 (res, hi_half[0], GEN_INT (31)));
2157 	  return true;
2158 	}
2159       else
2160 	{
2161 	  emit_insn (gen_insn_slt (tmp0, hi_half[0], hi_half[1]));
2162 	  emit_insn (gen_insn_seq (tmp1, hi_half[0], hi_half[1]));
2163 	  emit_insn (gen_insn_slt_u (tmp2, lo_half[0], lo_half[1]));
2164 	  emit_insn (gen_insn_mvnz (res, tmp0, tmp1, tmp2));
2165 	}
2166       return true;
2167     case LEU:
2168       emit_insn (gen_insn_slte_u (tmp0, hi_half[0], hi_half[1]));
2169       emit_insn (gen_insn_seq (tmp1, hi_half[0], hi_half[1]));
2170       emit_insn (gen_insn_slte_u (tmp2, lo_half[0], lo_half[1]));
2171       emit_insn (gen_insn_mvnz (res, tmp0, tmp1, tmp2));
2172       return true;
2173     case LTU:
2174       emit_insn (gen_insn_slt_u (tmp0, hi_half[0], hi_half[1]));
2175       emit_insn (gen_insn_seq (tmp1, hi_half[0], hi_half[1]));
2176       emit_insn (gen_insn_slt_u (tmp2, lo_half[0], lo_half[1]));
2177       emit_insn (gen_insn_mvnz (res, tmp0, tmp1, tmp2));
2178       return true;
2179     default:
2180       gcc_unreachable ();
2181     }
2182 
2183   return false;
2184 }
2185 
2186 
2187 /* Certain simplifications can be done to make invalid setcc
2188    operations valid.  Return the final comparison, or NULL if we can't
2189    work.  */
2190 static bool
tilepro_emit_setcc_internal(rtx res,enum rtx_code code,rtx op0,rtx op1,machine_mode cmp_mode)2191 tilepro_emit_setcc_internal (rtx res, enum rtx_code code, rtx op0, rtx op1,
2192 			     machine_mode cmp_mode)
2193 {
2194   rtx tmp;
2195   bool swap = false;
2196 
2197   if (cmp_mode == DImode)
2198     {
2199       return tilepro_emit_setcc_internal_di (res, code, op0, op1);
2200     }
2201 
2202   /* The general case: fold the comparison code to the types of
2203      compares that we have, choosing the branch as necessary.  */
2204 
2205   switch (code)
2206     {
2207     case EQ:
2208     case NE:
2209     case LE:
2210     case LT:
2211     case LEU:
2212     case LTU:
2213       /* We have these compares.  */
2214       break;
2215 
2216     case GE:
2217     case GT:
2218     case GEU:
2219     case GTU:
2220       /* We do not have these compares, so we reverse the
2221          operands.  */
2222       swap = true;
2223       break;
2224 
2225     default:
2226       /* We should not have called this with any other code.  */
2227       gcc_unreachable ();
2228     }
2229 
2230   if (swap)
2231     {
2232       code = swap_condition (code);
2233       tmp = op0, op0 = op1, op1 = tmp;
2234     }
2235 
2236   if (!reg_or_0_operand (op0, SImode))
2237     op0 = force_reg (SImode, op0);
2238 
2239   if (!CONST_INT_P (op1) && !register_operand (op1, SImode))
2240     op1 = force_reg (SImode, op1);
2241 
2242   /* Return the setcc comparison.  */
2243   emit_insn (gen_rtx_SET (res, gen_rtx_fmt_ee (code, SImode, op0, op1)));
2244 
2245   return true;
2246 }
2247 
2248 
2249 /* Implement cstore patterns.  */
2250 bool
tilepro_emit_setcc(rtx operands[],machine_mode cmp_mode)2251 tilepro_emit_setcc (rtx operands[], machine_mode cmp_mode)
2252 {
2253   return
2254     tilepro_emit_setcc_internal (operands[0], GET_CODE (operands[1]),
2255 				 operands[2], operands[3], cmp_mode);
2256 }
2257 
2258 
2259 /* Return whether CODE is a signed comparison.  */
2260 static bool
signed_compare_p(enum rtx_code code)2261 signed_compare_p (enum rtx_code code)
2262 {
2263   return (code == EQ || code == NE || code == LT || code == LE
2264 	  || code == GT || code == GE);
2265 }
2266 
2267 
2268 /* Generate the comparison for an SImode conditional branch.  */
2269 static rtx
tilepro_emit_cc_test(enum rtx_code code,rtx op0,rtx op1,machine_mode cmp_mode,bool eq_ne_only)2270 tilepro_emit_cc_test (enum rtx_code code, rtx op0, rtx op1,
2271 		      machine_mode cmp_mode, bool eq_ne_only)
2272 {
2273   enum rtx_code branch_code;
2274   rtx temp;
2275 
2276   /* Check for a compare against zero using a comparison we can do
2277      directly.  */
2278   if (cmp_mode != DImode
2279       && op1 == const0_rtx
2280       && (code == EQ || code == NE
2281 	  || (!eq_ne_only && signed_compare_p (code))))
2282     {
2283       op0 = force_reg (SImode, op0);
2284       return gen_rtx_fmt_ee (code, VOIDmode, op0, const0_rtx);
2285     }
2286 
2287   /* The general case: fold the comparison code to the types of
2288      compares that we have, choosing the branch as necessary.  */
2289   switch (code)
2290     {
2291     case EQ:
2292     case LE:
2293     case LT:
2294     case LEU:
2295     case LTU:
2296       /* We have these compares.  */
2297       branch_code = NE;
2298       break;
2299 
2300     case NE:
2301     case GE:
2302     case GT:
2303     case GEU:
2304     case GTU:
2305       /* These must be reversed (except NE, but let's
2306          canonicalize).  */
2307       code = reverse_condition (code);
2308       branch_code = EQ;
2309       break;
2310 
2311     default:
2312       gcc_unreachable ();
2313     }
2314 
2315   if (cmp_mode != DImode
2316       && CONST_INT_P (op1) && (!satisfies_constraint_I (op1) || code == LEU))
2317     {
2318       HOST_WIDE_INT n = trunc_int_for_mode (INTVAL (op1), SImode);
2319 
2320       switch (code)
2321 	{
2322 	case EQ:
2323 	  /* Subtract off the value we want to compare against and see
2324 	     if we get zero.  This is cheaper than creating a constant
2325 	     in a register. Except that subtracting -128 is more
2326 	     expensive than seqi to -128, so we leave that alone.  */
2327 	  /* ??? Don't do this when comparing against symbols,
2328 	     otherwise we'll reduce (&x == 0x1234) to (&x-0x1234 ==
2329 	     0), which will be declared false out of hand (at least
2330 	     for non-weak).  */
2331 	  if (!(symbolic_operand (op0, VOIDmode)
2332 		|| (REG_P (op0) && REG_POINTER (op0))))
2333 	    {
2334 	      /* To compare against MIN_INT, we add MIN_INT and check
2335 	         for 0.  */
2336 	      HOST_WIDE_INT add;
2337 	      if (n != -2147483647 - 1)
2338 		add = -n;
2339 	      else
2340 		add = n;
2341 
2342 	      op0 = force_reg (SImode, op0);
2343 	      temp = gen_reg_rtx (SImode);
2344 	      emit_insn (gen_addsi3 (temp, op0, gen_int_si (add)));
2345 	      return gen_rtx_fmt_ee (reverse_condition (branch_code),
2346 				     VOIDmode, temp, const0_rtx);
2347 	    }
2348 	  break;
2349 
2350 	case LEU:
2351 	  if (n == -1)
2352 	    break;
2353 	  /* FALLTHRU */
2354 
2355 	case LTU:
2356 	  /* Change ((unsigned)x < 0x1000) into !((unsigned)x >> 12),
2357 	     etc.  */
2358 	  {
2359 	    int first = exact_log2 (code == LTU ? n : n + 1);
2360 	    if (first != -1)
2361 	      {
2362 		op0 = force_reg (SImode, op0);
2363 		temp = gen_reg_rtx (SImode);
2364 		emit_move_insn (temp,
2365 				gen_rtx_LSHIFTRT (SImode, op0,
2366 						  gen_int_si (first)));
2367 		return gen_rtx_fmt_ee (reverse_condition (branch_code),
2368 				       VOIDmode, temp, const0_rtx);
2369 	      }
2370 	  }
2371 	  break;
2372 
2373 	default:
2374 	  break;
2375 	}
2376     }
2377 
2378   /* Compute a flag saying whether we should branch.  */
2379   temp = gen_reg_rtx (SImode);
2380   tilepro_emit_setcc_internal (temp, code, op0, op1, cmp_mode);
2381 
2382   /* Return the branch comparison.  */
2383   return gen_rtx_fmt_ee (branch_code, VOIDmode, temp, const0_rtx);
2384 }
2385 
2386 
2387 /* Generate the comparison for a conditional branch.  */
2388 void
tilepro_emit_conditional_branch(rtx operands[],machine_mode cmp_mode)2389 tilepro_emit_conditional_branch (rtx operands[], machine_mode cmp_mode)
2390 {
2391   rtx cmp_rtx =
2392     tilepro_emit_cc_test (GET_CODE (operands[0]), operands[1], operands[2],
2393 			  cmp_mode, false);
2394   rtx branch_rtx = gen_rtx_SET (pc_rtx,
2395 				gen_rtx_IF_THEN_ELSE (VOIDmode, cmp_rtx,
2396 						      gen_rtx_LABEL_REF
2397 						      (VOIDmode,
2398 						       operands[3]),
2399 						      pc_rtx));
2400   emit_jump_insn (branch_rtx);
2401 }
2402 
2403 
2404 /* Implement the movsicc pattern.  */
2405 rtx
tilepro_emit_conditional_move(rtx cmp)2406 tilepro_emit_conditional_move (rtx cmp)
2407 {
2408   return
2409     tilepro_emit_cc_test (GET_CODE (cmp), XEXP (cmp, 0), XEXP (cmp, 1),
2410 			  GET_MODE (XEXP (cmp, 0)), true);
2411 }
2412 
2413 
2414 /* Return true if INSN is annotated with a REG_BR_PROB note that
2415    indicates it's a branch that's predicted taken.  */
2416 static bool
cbranch_predicted_p(rtx_insn * insn)2417 cbranch_predicted_p (rtx_insn *insn)
2418 {
2419   rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2420 
2421   if (x)
2422     {
2423       int pred_val = XINT (x, 0);
2424 
2425       return pred_val >= REG_BR_PROB_BASE / 2;
2426     }
2427 
2428   return false;
2429 }
2430 
2431 
2432 /* Output assembly code for a specific branch instruction, appending
2433    the branch prediction flag to the opcode if appropriate.  */
2434 static const char *
tilepro_output_simple_cbranch_with_opcode(rtx_insn * insn,const char * opcode,int regop,bool netreg_p,bool reverse_predicted)2435 tilepro_output_simple_cbranch_with_opcode (rtx_insn *insn, const char *opcode,
2436 					   int regop, bool netreg_p,
2437 					   bool reverse_predicted)
2438 {
2439   static char buf[64];
2440   sprintf (buf, "%s%s\t%%%c%d, %%l0", opcode,
2441 	   (cbranch_predicted_p (insn) ^ reverse_predicted) ? "t" : "",
2442 	   netreg_p ? 'N' : 'r', regop);
2443   return buf;
2444 }
2445 
2446 
2447 /* Output assembly code for a specific branch instruction, appending
2448    the branch prediction flag to the opcode if appropriate.  */
2449 const char *
tilepro_output_cbranch_with_opcode(rtx_insn * insn,rtx * operands,const char * opcode,const char * rev_opcode,int regop,bool netreg_p)2450 tilepro_output_cbranch_with_opcode (rtx_insn *insn, rtx *operands,
2451 				    const char *opcode,
2452 				    const char *rev_opcode,
2453 				    int regop, bool netreg_p)
2454 {
2455   const char *branch_if_false;
2456   rtx taken, not_taken;
2457   bool is_simple_branch;
2458 
2459   gcc_assert (LABEL_P (operands[0]));
2460 
2461   is_simple_branch = true;
2462   if (INSN_ADDRESSES_SET_P ())
2463     {
2464       int from_addr = INSN_ADDRESSES (INSN_UID (insn));
2465       int to_addr = INSN_ADDRESSES (INSN_UID (operands[0]));
2466       int delta = to_addr - from_addr;
2467       is_simple_branch = IN_RANGE (delta, -524288, 524280);
2468     }
2469 
2470   if (is_simple_branch)
2471     {
2472       /* Just a simple conditional branch.  */
2473       return
2474 	tilepro_output_simple_cbranch_with_opcode (insn, opcode, regop,
2475 						   netreg_p, false);
2476     }
2477 
2478   /* Generate a reversed branch around a direct jump.  This fallback
2479      does not use branch-likely instructions.  */
2480   not_taken = gen_label_rtx ();
2481   taken = operands[0];
2482 
2483   /* Generate the reversed branch to NOT_TAKEN.  */
2484   operands[0] = not_taken;
2485   branch_if_false =
2486     tilepro_output_simple_cbranch_with_opcode (insn, rev_opcode, regop,
2487 					       netreg_p, true);
2488   output_asm_insn (branch_if_false, operands);
2489 
2490   output_asm_insn ("j\t%l0", &taken);
2491 
2492   /* Output NOT_TAKEN.  */
2493   targetm.asm_out.internal_label (asm_out_file, "L",
2494 				  CODE_LABEL_NUMBER (not_taken));
2495   return "";
2496 }
2497 
2498 
2499 /* Output assembly code for a conditional branch instruction.  */
2500 const char *
tilepro_output_cbranch(rtx_insn * insn,rtx * operands,bool reversed)2501 tilepro_output_cbranch (rtx_insn *insn, rtx *operands, bool reversed)
2502 {
2503   enum rtx_code code = GET_CODE (operands[1]);
2504   const char *opcode;
2505   const char *rev_opcode;
2506 
2507   if (reversed)
2508     code = reverse_condition (code);
2509 
2510   switch (code)
2511     {
2512     case NE:
2513       opcode = "bnz";
2514       rev_opcode = "bz";
2515       break;
2516     case EQ:
2517       opcode = "bz";
2518       rev_opcode = "bnz";
2519       break;
2520     case GE:
2521       opcode = "bgez";
2522       rev_opcode = "blz";
2523       break;
2524     case GT:
2525       opcode = "bgz";
2526       rev_opcode = "blez";
2527       break;
2528     case LE:
2529       opcode = "blez";
2530       rev_opcode = "bgz";
2531       break;
2532     case LT:
2533       opcode = "blz";
2534       rev_opcode = "bgez";
2535       break;
2536     default:
2537       gcc_unreachable ();
2538     }
2539 
2540   return
2541     tilepro_output_cbranch_with_opcode (insn, operands, opcode, rev_opcode,
2542 					2, false);
2543 }
2544 
2545 
2546 /* Implement the tablejump pattern.  */
2547 void
tilepro_expand_tablejump(rtx op0,rtx op1)2548 tilepro_expand_tablejump (rtx op0, rtx op1)
2549 {
2550   if (flag_pic)
2551     {
2552       rtx table = gen_rtx_LABEL_REF (Pmode, op1);
2553       rtx temp = gen_reg_rtx (Pmode);
2554       rtx text_label_symbol = tilepro_text_label_symbol ();
2555       rtx text_label_rtx = tilepro_text_label_rtx ();
2556 
2557       emit_insn (gen_addli_pcrel (temp, text_label_rtx,
2558 				  table, text_label_symbol));
2559       emit_insn (gen_auli_pcrel (temp, temp, table, text_label_symbol));
2560       emit_move_insn (temp,
2561 		      gen_rtx_PLUS (Pmode,
2562 				    convert_to_mode (Pmode, op0, false),
2563 				    temp));
2564       op0 = temp;
2565     }
2566 
2567   emit_jump_insn (gen_tablejump_aux (op0, op1));
2568 }
2569 
2570 
2571 /* Expand a builtin vector binary op, by calling gen function GEN with
2572    operands in the proper modes.  DEST is converted to DEST_MODE, and
2573    src0 and src1 (if DO_SRC1 is true) is converted to SRC_MODE.  */
2574 void
tilepro_expand_builtin_vector_binop(rtx (* gen)(rtx,rtx,rtx),machine_mode dest_mode,rtx dest,machine_mode src_mode,rtx src0,rtx src1,bool do_src1)2575 tilepro_expand_builtin_vector_binop (rtx (*gen) (rtx, rtx, rtx),
2576 				     machine_mode dest_mode,
2577 				     rtx dest,
2578 				     machine_mode src_mode,
2579 				     rtx src0, rtx src1, bool do_src1)
2580 {
2581   dest = gen_lowpart (dest_mode, dest);
2582 
2583   if (src0 == const0_rtx)
2584     src0 = CONST0_RTX (src_mode);
2585   else
2586     src0 = gen_lowpart (src_mode, src0);
2587 
2588   if (do_src1)
2589     {
2590       if (src1 == const0_rtx)
2591 	src1 = CONST0_RTX (src_mode);
2592       else
2593 	src1 = gen_lowpart (src_mode, src1);
2594     }
2595 
2596   emit_insn ((*gen) (dest, src0, src1));
2597 }
2598 
2599 
2600 
2601 /* Intrinsics  */
2602 
2603 struct tile_builtin_info
2604 {
2605   enum insn_code icode;
2606   tree fndecl;
2607 };
2608 
2609 static struct tile_builtin_info tilepro_builtin_info[TILEPRO_BUILTIN_max] = {
2610   { CODE_FOR_addsi3,                    NULL }, /* add */
2611   { CODE_FOR_insn_addb,                 NULL }, /* addb */
2612   { CODE_FOR_insn_addbs_u,              NULL }, /* addbs_u */
2613   { CODE_FOR_insn_addh,                 NULL }, /* addh */
2614   { CODE_FOR_insn_addhs,                NULL }, /* addhs */
2615   { CODE_FOR_insn_addib,                NULL }, /* addib */
2616   { CODE_FOR_insn_addih,                NULL }, /* addih */
2617   { CODE_FOR_insn_addlis,               NULL }, /* addlis */
2618   { CODE_FOR_ssaddsi3,                  NULL }, /* adds */
2619   { CODE_FOR_insn_adiffb_u,             NULL }, /* adiffb_u */
2620   { CODE_FOR_insn_adiffh,               NULL }, /* adiffh */
2621   { CODE_FOR_andsi3,                    NULL }, /* and */
2622   { CODE_FOR_insn_auli,                 NULL }, /* auli */
2623   { CODE_FOR_insn_avgb_u,               NULL }, /* avgb_u */
2624   { CODE_FOR_insn_avgh,                 NULL }, /* avgh */
2625   { CODE_FOR_insn_bitx,                 NULL }, /* bitx */
2626   { CODE_FOR_bswapsi2,                  NULL }, /* bytex */
2627   { CODE_FOR_clzsi2,                    NULL }, /* clz */
2628   { CODE_FOR_insn_crc32_32,             NULL }, /* crc32_32 */
2629   { CODE_FOR_insn_crc32_8,              NULL }, /* crc32_8 */
2630   { CODE_FOR_ctzsi2,                    NULL }, /* ctz */
2631   { CODE_FOR_insn_drain,                NULL }, /* drain */
2632   { CODE_FOR_insn_dtlbpr,               NULL }, /* dtlbpr */
2633   { CODE_FOR_insn_dword_align,          NULL }, /* dword_align */
2634   { CODE_FOR_insn_finv,                 NULL }, /* finv */
2635   { CODE_FOR_insn_flush,                NULL }, /* flush */
2636   { CODE_FOR_insn_fnop,                 NULL }, /* fnop */
2637   { CODE_FOR_insn_icoh,                 NULL }, /* icoh */
2638   { CODE_FOR_insn_ill,                  NULL }, /* ill */
2639   { CODE_FOR_insn_info,                 NULL }, /* info */
2640   { CODE_FOR_insn_infol,                NULL }, /* infol */
2641   { CODE_FOR_insn_inthb,                NULL }, /* inthb */
2642   { CODE_FOR_insn_inthh,                NULL }, /* inthh */
2643   { CODE_FOR_insn_intlb,                NULL }, /* intlb */
2644   { CODE_FOR_insn_intlh,                NULL }, /* intlh */
2645   { CODE_FOR_insn_inv,                  NULL }, /* inv */
2646   { CODE_FOR_insn_lb,                   NULL }, /* lb */
2647   { CODE_FOR_insn_lb_u,                 NULL }, /* lb_u */
2648   { CODE_FOR_insn_lh,                   NULL }, /* lh */
2649   { CODE_FOR_insn_lh_u,                 NULL }, /* lh_u */
2650   { CODE_FOR_insn_lnk,                  NULL }, /* lnk */
2651   { CODE_FOR_insn_lw,                   NULL }, /* lw */
2652   { CODE_FOR_insn_lw_na,                NULL }, /* lw_na */
2653   { CODE_FOR_insn_lb_L2,                NULL }, /* lb_L2 */
2654   { CODE_FOR_insn_lb_u_L2,              NULL }, /* lb_u_L2 */
2655   { CODE_FOR_insn_lh_L2,                NULL }, /* lh_L2 */
2656   { CODE_FOR_insn_lh_u_L2,              NULL }, /* lh_u_L2 */
2657   { CODE_FOR_insn_lw_L2,                NULL }, /* lw_L2 */
2658   { CODE_FOR_insn_lw_na_L2,             NULL }, /* lw_na_L2 */
2659   { CODE_FOR_insn_lb_miss,              NULL }, /* lb_miss */
2660   { CODE_FOR_insn_lb_u_miss,            NULL }, /* lb_u_miss */
2661   { CODE_FOR_insn_lh_miss,              NULL }, /* lh_miss */
2662   { CODE_FOR_insn_lh_u_miss,            NULL }, /* lh_u_miss */
2663   { CODE_FOR_insn_lw_miss,              NULL }, /* lw_miss */
2664   { CODE_FOR_insn_lw_na_miss,           NULL }, /* lw_na_miss */
2665   { CODE_FOR_insn_maxb_u,               NULL }, /* maxb_u */
2666   { CODE_FOR_insn_maxh,                 NULL }, /* maxh */
2667   { CODE_FOR_insn_maxib_u,              NULL }, /* maxib_u */
2668   { CODE_FOR_insn_maxih,                NULL }, /* maxih */
2669   { CODE_FOR_memory_barrier,            NULL }, /* mf */
2670   { CODE_FOR_insn_mfspr,                NULL }, /* mfspr */
2671   { CODE_FOR_insn_minb_u,               NULL }, /* minb_u */
2672   { CODE_FOR_insn_minh,                 NULL }, /* minh */
2673   { CODE_FOR_insn_minib_u,              NULL }, /* minib_u */
2674   { CODE_FOR_insn_minih,                NULL }, /* minih */
2675   { CODE_FOR_insn_mm,                   NULL }, /* mm */
2676   { CODE_FOR_insn_mnz,                  NULL }, /* mnz */
2677   { CODE_FOR_insn_mnzb,                 NULL }, /* mnzb */
2678   { CODE_FOR_insn_mnzh,                 NULL }, /* mnzh */
2679   { CODE_FOR_movsi,                     NULL }, /* move */
2680   { CODE_FOR_insn_movelis,              NULL }, /* movelis */
2681   { CODE_FOR_insn_mtspr,                NULL }, /* mtspr */
2682   { CODE_FOR_insn_mulhh_ss,             NULL }, /* mulhh_ss */
2683   { CODE_FOR_insn_mulhh_su,             NULL }, /* mulhh_su */
2684   { CODE_FOR_insn_mulhh_uu,             NULL }, /* mulhh_uu */
2685   { CODE_FOR_insn_mulhha_ss,            NULL }, /* mulhha_ss */
2686   { CODE_FOR_insn_mulhha_su,            NULL }, /* mulhha_su */
2687   { CODE_FOR_insn_mulhha_uu,            NULL }, /* mulhha_uu */
2688   { CODE_FOR_insn_mulhhsa_uu,           NULL }, /* mulhhsa_uu */
2689   { CODE_FOR_insn_mulhl_ss,             NULL }, /* mulhl_ss */
2690   { CODE_FOR_insn_mulhl_su,             NULL }, /* mulhl_su */
2691   { CODE_FOR_insn_mulhl_us,             NULL }, /* mulhl_us */
2692   { CODE_FOR_insn_mulhl_uu,             NULL }, /* mulhl_uu */
2693   { CODE_FOR_insn_mulhla_ss,            NULL }, /* mulhla_ss */
2694   { CODE_FOR_insn_mulhla_su,            NULL }, /* mulhla_su */
2695   { CODE_FOR_insn_mulhla_us,            NULL }, /* mulhla_us */
2696   { CODE_FOR_insn_mulhla_uu,            NULL }, /* mulhla_uu */
2697   { CODE_FOR_insn_mulhlsa_uu,           NULL }, /* mulhlsa_uu */
2698   { CODE_FOR_insn_mulll_ss,             NULL }, /* mulll_ss */
2699   { CODE_FOR_insn_mulll_su,             NULL }, /* mulll_su */
2700   { CODE_FOR_insn_mulll_uu,             NULL }, /* mulll_uu */
2701   { CODE_FOR_insn_mullla_ss,            NULL }, /* mullla_ss */
2702   { CODE_FOR_insn_mullla_su,            NULL }, /* mullla_su */
2703   { CODE_FOR_insn_mullla_uu,            NULL }, /* mullla_uu */
2704   { CODE_FOR_insn_mulllsa_uu,           NULL }, /* mulllsa_uu */
2705   { CODE_FOR_insn_mvnz,                 NULL }, /* mvnz */
2706   { CODE_FOR_insn_mvz,                  NULL }, /* mvz */
2707   { CODE_FOR_insn_mz,                   NULL }, /* mz */
2708   { CODE_FOR_insn_mzb,                  NULL }, /* mzb */
2709   { CODE_FOR_insn_mzh,                  NULL }, /* mzh */
2710   { CODE_FOR_insn_nap,                  NULL }, /* nap */
2711   { CODE_FOR_nop,                       NULL }, /* nop */
2712   { CODE_FOR_insn_nor,                  NULL }, /* nor */
2713   { CODE_FOR_iorsi3,                    NULL }, /* or */
2714   { CODE_FOR_insn_packbs_u,             NULL }, /* packbs_u */
2715   { CODE_FOR_insn_packhb,               NULL }, /* packhb */
2716   { CODE_FOR_insn_packhs,               NULL }, /* packhs */
2717   { CODE_FOR_insn_packlb,               NULL }, /* packlb */
2718   { CODE_FOR_popcountsi2,               NULL }, /* pcnt */
2719   { CODE_FOR_insn_prefetch,             NULL }, /* prefetch */
2720   { CODE_FOR_insn_prefetch_L1,          NULL }, /* prefetch_L1 */
2721   { CODE_FOR_rotlsi3,                   NULL }, /* rl */
2722   { CODE_FOR_insn_s1a,                  NULL }, /* s1a */
2723   { CODE_FOR_insn_s2a,                  NULL }, /* s2a */
2724   { CODE_FOR_insn_s3a,                  NULL }, /* s3a */
2725   { CODE_FOR_insn_sadab_u,              NULL }, /* sadab_u */
2726   { CODE_FOR_insn_sadah,                NULL }, /* sadah */
2727   { CODE_FOR_insn_sadah_u,              NULL }, /* sadah_u */
2728   { CODE_FOR_insn_sadb_u,               NULL }, /* sadb_u */
2729   { CODE_FOR_insn_sadh,                 NULL }, /* sadh */
2730   { CODE_FOR_insn_sadh_u,               NULL }, /* sadh_u */
2731   { CODE_FOR_insn_sb,                   NULL }, /* sb */
2732   { CODE_FOR_insn_seq,                  NULL }, /* seq */
2733   { CODE_FOR_insn_seqb,                 NULL }, /* seqb */
2734   { CODE_FOR_insn_seqh,                 NULL }, /* seqh */
2735   { CODE_FOR_insn_seqib,                NULL }, /* seqib */
2736   { CODE_FOR_insn_seqih,                NULL }, /* seqih */
2737   { CODE_FOR_insn_sh,                   NULL }, /* sh */
2738   { CODE_FOR_ashlsi3,                   NULL }, /* shl */
2739   { CODE_FOR_insn_shlb,                 NULL }, /* shlb */
2740   { CODE_FOR_insn_shlh,                 NULL }, /* shlh */
2741   { CODE_FOR_insn_shlb,                 NULL }, /* shlib */
2742   { CODE_FOR_insn_shlh,                 NULL }, /* shlih */
2743   { CODE_FOR_lshrsi3,                   NULL }, /* shr */
2744   { CODE_FOR_insn_shrb,                 NULL }, /* shrb */
2745   { CODE_FOR_insn_shrh,                 NULL }, /* shrh */
2746   { CODE_FOR_insn_shrb,                 NULL }, /* shrib */
2747   { CODE_FOR_insn_shrh,                 NULL }, /* shrih */
2748   { CODE_FOR_insn_slt,                  NULL }, /* slt */
2749   { CODE_FOR_insn_slt_u,                NULL }, /* slt_u */
2750   { CODE_FOR_insn_sltb,                 NULL }, /* sltb */
2751   { CODE_FOR_insn_sltb_u,               NULL }, /* sltb_u */
2752   { CODE_FOR_insn_slte,                 NULL }, /* slte */
2753   { CODE_FOR_insn_slte_u,               NULL }, /* slte_u */
2754   { CODE_FOR_insn_slteb,                NULL }, /* slteb */
2755   { CODE_FOR_insn_slteb_u,              NULL }, /* slteb_u */
2756   { CODE_FOR_insn_slteh,                NULL }, /* slteh */
2757   { CODE_FOR_insn_slteh_u,              NULL }, /* slteh_u */
2758   { CODE_FOR_insn_slth,                 NULL }, /* slth */
2759   { CODE_FOR_insn_slth_u,               NULL }, /* slth_u */
2760   { CODE_FOR_insn_sltib,                NULL }, /* sltib */
2761   { CODE_FOR_insn_sltib_u,              NULL }, /* sltib_u */
2762   { CODE_FOR_insn_sltih,                NULL }, /* sltih */
2763   { CODE_FOR_insn_sltih_u,              NULL }, /* sltih_u */
2764   { CODE_FOR_insn_sne,                  NULL }, /* sne */
2765   { CODE_FOR_insn_sneb,                 NULL }, /* sneb */
2766   { CODE_FOR_insn_sneh,                 NULL }, /* sneh */
2767   { CODE_FOR_ashrsi3,                   NULL }, /* sra */
2768   { CODE_FOR_insn_srab,                 NULL }, /* srab */
2769   { CODE_FOR_insn_srah,                 NULL }, /* srah */
2770   { CODE_FOR_insn_srab,                 NULL }, /* sraib */
2771   { CODE_FOR_insn_srah,                 NULL }, /* sraih */
2772   { CODE_FOR_subsi3,                    NULL }, /* sub */
2773   { CODE_FOR_insn_subb,                 NULL }, /* subb */
2774   { CODE_FOR_insn_subbs_u,              NULL }, /* subbs_u */
2775   { CODE_FOR_insn_subh,                 NULL }, /* subh */
2776   { CODE_FOR_insn_subhs,                NULL }, /* subhs */
2777   { CODE_FOR_sssubsi3,                  NULL }, /* subs */
2778   { CODE_FOR_insn_sw,                   NULL }, /* sw */
2779   { CODE_FOR_insn_tblidxb0,             NULL }, /* tblidxb0 */
2780   { CODE_FOR_insn_tblidxb1,             NULL }, /* tblidxb1 */
2781   { CODE_FOR_insn_tblidxb2,             NULL }, /* tblidxb2 */
2782   { CODE_FOR_insn_tblidxb3,             NULL }, /* tblidxb3 */
2783   { CODE_FOR_insn_tns,                  NULL }, /* tns */
2784   { CODE_FOR_insn_wh64,                 NULL }, /* wh64 */
2785   { CODE_FOR_xorsi3,                    NULL }, /* xor */
2786   { CODE_FOR_tilepro_network_barrier,   NULL }, /* network_barrier */
2787   { CODE_FOR_tilepro_idn0_receive,      NULL }, /* idn0_receive */
2788   { CODE_FOR_tilepro_idn1_receive,      NULL }, /* idn1_receive */
2789   { CODE_FOR_tilepro_idn_send,          NULL }, /* idn_send */
2790   { CODE_FOR_tilepro_sn_receive,        NULL }, /* sn_receive */
2791   { CODE_FOR_tilepro_sn_send,           NULL }, /* sn_send */
2792   { CODE_FOR_tilepro_udn0_receive,      NULL }, /* udn0_receive */
2793   { CODE_FOR_tilepro_udn1_receive,      NULL }, /* udn1_receive */
2794   { CODE_FOR_tilepro_udn2_receive,      NULL }, /* udn2_receive */
2795   { CODE_FOR_tilepro_udn3_receive,      NULL }, /* udn3_receive */
2796   { CODE_FOR_tilepro_udn_send,          NULL }, /* udn_send */
2797 };
2798 
2799 
2800 struct tilepro_builtin_def
2801 {
2802   const char *name;
2803   enum tilepro_builtin code;
2804   bool is_const;
2805   /* The first character is the return type.  Subsequent characters
2806      are the argument types. See char_to_type.  */
2807   const char *type;
2808 };
2809 
2810 
2811 static const struct tilepro_builtin_def tilepro_builtins[] = {
2812   { "__insn_add",             TILEPRO_INSN_ADD,         true,  "lll"   },
2813   { "__insn_addb",            TILEPRO_INSN_ADDB,        true,  "lll"   },
2814   { "__insn_addbs_u",         TILEPRO_INSN_ADDBS_U,     false, "lll"   },
2815   { "__insn_addh",            TILEPRO_INSN_ADDH,        true,  "lll"   },
2816   { "__insn_addhs",           TILEPRO_INSN_ADDHS,       false, "lll"   },
2817   { "__insn_addi",            TILEPRO_INSN_ADD,         true,  "lll"   },
2818   { "__insn_addib",           TILEPRO_INSN_ADDIB,       true,  "lll"   },
2819   { "__insn_addih",           TILEPRO_INSN_ADDIH,       true,  "lll"   },
2820   { "__insn_addli",           TILEPRO_INSN_ADD,         true,  "lll"   },
2821   { "__insn_addlis",          TILEPRO_INSN_ADDLIS,      false, "lll"   },
2822   { "__insn_adds",            TILEPRO_INSN_ADDS,        false, "lll"   },
2823   { "__insn_adiffb_u",        TILEPRO_INSN_ADIFFB_U,    true,  "lll"   },
2824   { "__insn_adiffh",          TILEPRO_INSN_ADIFFH,      true,  "lll"   },
2825   { "__insn_and",             TILEPRO_INSN_AND,         true,  "lll"   },
2826   { "__insn_andi",            TILEPRO_INSN_AND,         true,  "lll"   },
2827   { "__insn_auli",            TILEPRO_INSN_AULI,        true,  "lll"   },
2828   { "__insn_avgb_u",          TILEPRO_INSN_AVGB_U,      true,  "lll"   },
2829   { "__insn_avgh",            TILEPRO_INSN_AVGH,        true,  "lll"   },
2830   { "__insn_bitx",            TILEPRO_INSN_BITX,        true,  "ll"    },
2831   { "__insn_bytex",           TILEPRO_INSN_BYTEX,       true,  "ll"    },
2832   { "__insn_clz",             TILEPRO_INSN_CLZ,         true,  "ll"    },
2833   { "__insn_crc32_32",        TILEPRO_INSN_CRC32_32,    true,  "lll"   },
2834   { "__insn_crc32_8",         TILEPRO_INSN_CRC32_8,     true,  "lll"   },
2835   { "__insn_ctz",             TILEPRO_INSN_CTZ,         true,  "ll"    },
2836   { "__insn_drain",           TILEPRO_INSN_DRAIN,       false, "v"     },
2837   { "__insn_dtlbpr",          TILEPRO_INSN_DTLBPR,      false, "vl"    },
2838   { "__insn_dword_align",     TILEPRO_INSN_DWORD_ALIGN, true,  "lllk"  },
2839   { "__insn_finv",            TILEPRO_INSN_FINV,        false, "vk"    },
2840   { "__insn_flush",           TILEPRO_INSN_FLUSH,       false, "vk"    },
2841   { "__insn_fnop",            TILEPRO_INSN_FNOP,        false, "v"     },
2842   { "__insn_icoh",            TILEPRO_INSN_ICOH,        false, "vk"    },
2843   { "__insn_ill",             TILEPRO_INSN_ILL,         false, "v"     },
2844   { "__insn_info",            TILEPRO_INSN_INFO,        false, "vl"    },
2845   { "__insn_infol",           TILEPRO_INSN_INFOL,       false, "vl"    },
2846   { "__insn_inthb",           TILEPRO_INSN_INTHB,       true,  "lll"   },
2847   { "__insn_inthh",           TILEPRO_INSN_INTHH,       true,  "lll"   },
2848   { "__insn_intlb",           TILEPRO_INSN_INTLB,       true,  "lll"   },
2849   { "__insn_intlh",           TILEPRO_INSN_INTLH,       true,  "lll"   },
2850   { "__insn_inv",             TILEPRO_INSN_INV,         false, "vp"    },
2851   { "__insn_lb",              TILEPRO_INSN_LB,          false, "lk"    },
2852   { "__insn_lb_u",            TILEPRO_INSN_LB_U,        false, "lk"    },
2853   { "__insn_lh",              TILEPRO_INSN_LH,          false, "lk"    },
2854   { "__insn_lh_u",            TILEPRO_INSN_LH_U,        false, "lk"    },
2855   { "__insn_lnk",             TILEPRO_INSN_LNK,         true,  "l"     },
2856   { "__insn_lw",              TILEPRO_INSN_LW,          false, "lk"    },
2857   { "__insn_lw_na",           TILEPRO_INSN_LW_NA,       false, "lk"    },
2858   { "__insn_lb_L2",           TILEPRO_INSN_LB_L2,       false, "lk"    },
2859   { "__insn_lb_u_L2",         TILEPRO_INSN_LB_U_L2,     false, "lk"    },
2860   { "__insn_lh_L2",           TILEPRO_INSN_LH_L2,       false, "lk"    },
2861   { "__insn_lh_u_L2",         TILEPRO_INSN_LH_U_L2,     false, "lk"    },
2862   { "__insn_lw_L2",           TILEPRO_INSN_LW_L2,       false, "lk"    },
2863   { "__insn_lw_na_L2",        TILEPRO_INSN_LW_NA_L2,    false, "lk"    },
2864   { "__insn_lb_miss",         TILEPRO_INSN_LB_MISS,     false, "lk"    },
2865   { "__insn_lb_u_miss",       TILEPRO_INSN_LB_U_MISS,   false, "lk"    },
2866   { "__insn_lh_miss",         TILEPRO_INSN_LH_MISS,     false, "lk"    },
2867   { "__insn_lh_u_miss",       TILEPRO_INSN_LH_U_MISS,   false, "lk"    },
2868   { "__insn_lw_miss",         TILEPRO_INSN_LW_MISS,     false, "lk"    },
2869   { "__insn_lw_na_miss",      TILEPRO_INSN_LW_NA_MISS,  false, "lk"    },
2870   { "__insn_maxb_u",          TILEPRO_INSN_MAXB_U,      true,  "lll"   },
2871   { "__insn_maxh",            TILEPRO_INSN_MAXH,        true,  "lll"   },
2872   { "__insn_maxib_u",         TILEPRO_INSN_MAXIB_U,     true,  "lll"   },
2873   { "__insn_maxih",           TILEPRO_INSN_MAXIH,       true,  "lll"   },
2874   { "__insn_mf",              TILEPRO_INSN_MF,          false, "v"     },
2875   { "__insn_mfspr",           TILEPRO_INSN_MFSPR,       false, "ll"    },
2876   { "__insn_minb_u",          TILEPRO_INSN_MINB_U,      true,  "lll"   },
2877   { "__insn_minh",            TILEPRO_INSN_MINH,        true,  "lll"   },
2878   { "__insn_minib_u",         TILEPRO_INSN_MINIB_U,     true,  "lll"   },
2879   { "__insn_minih",           TILEPRO_INSN_MINIH,       true,  "lll"   },
2880   { "__insn_mm",              TILEPRO_INSN_MM,          true,  "lllll" },
2881   { "__insn_mnz",             TILEPRO_INSN_MNZ,         true,  "lll"   },
2882   { "__insn_mnzb",            TILEPRO_INSN_MNZB,        true,  "lll"   },
2883   { "__insn_mnzh",            TILEPRO_INSN_MNZH,        true,  "lll"   },
2884   { "__insn_move",            TILEPRO_INSN_MOVE,        true,  "ll"    },
2885   { "__insn_movei",           TILEPRO_INSN_MOVE,        true,  "ll"    },
2886   { "__insn_moveli",          TILEPRO_INSN_MOVE,        true,  "ll"    },
2887   { "__insn_movelis",         TILEPRO_INSN_MOVELIS,     false, "ll"    },
2888   { "__insn_mtspr",           TILEPRO_INSN_MTSPR,       false, "vll"   },
2889   { "__insn_mulhh_ss",        TILEPRO_INSN_MULHH_SS,    true,  "lll"   },
2890   { "__insn_mulhh_su",        TILEPRO_INSN_MULHH_SU,    true,  "lll"   },
2891   { "__insn_mulhh_uu",        TILEPRO_INSN_MULHH_UU,    true,  "lll"   },
2892   { "__insn_mulhha_ss",       TILEPRO_INSN_MULHHA_SS,   true,  "llll"  },
2893   { "__insn_mulhha_su",       TILEPRO_INSN_MULHHA_SU,   true,  "llll"  },
2894   { "__insn_mulhha_uu",       TILEPRO_INSN_MULHHA_UU,   true,  "llll"  },
2895   { "__insn_mulhhsa_uu",      TILEPRO_INSN_MULHHSA_UU,  true,  "llll"  },
2896   { "__insn_mulhl_ss",        TILEPRO_INSN_MULHL_SS,    true,  "lll"   },
2897   { "__insn_mulhl_su",        TILEPRO_INSN_MULHL_SU,    true,  "lll"   },
2898   { "__insn_mulhl_us",        TILEPRO_INSN_MULHL_US,    true,  "lll"   },
2899   { "__insn_mulhl_uu",        TILEPRO_INSN_MULHL_UU,    true,  "lll"   },
2900   { "__insn_mulhla_ss",       TILEPRO_INSN_MULHLA_SS,   true,  "llll"  },
2901   { "__insn_mulhla_su",       TILEPRO_INSN_MULHLA_SU,   true,  "llll"  },
2902   { "__insn_mulhla_us",       TILEPRO_INSN_MULHLA_US,   true,  "llll"  },
2903   { "__insn_mulhla_uu",       TILEPRO_INSN_MULHLA_UU,   true,  "llll"  },
2904   { "__insn_mulhlsa_uu",      TILEPRO_INSN_MULHLSA_UU,  true,  "llll"  },
2905   { "__insn_mulll_ss",        TILEPRO_INSN_MULLL_SS,    true,  "lll"   },
2906   { "__insn_mulll_su",        TILEPRO_INSN_MULLL_SU,    true,  "lll"   },
2907   { "__insn_mulll_uu",        TILEPRO_INSN_MULLL_UU,    true,  "lll"   },
2908   { "__insn_mullla_ss",       TILEPRO_INSN_MULLLA_SS,   true,  "llll"  },
2909   { "__insn_mullla_su",       TILEPRO_INSN_MULLLA_SU,   true,  "llll"  },
2910   { "__insn_mullla_uu",       TILEPRO_INSN_MULLLA_UU,   true,  "llll"  },
2911   { "__insn_mulllsa_uu",      TILEPRO_INSN_MULLLSA_UU,  true,  "llll"  },
2912   { "__insn_mvnz",            TILEPRO_INSN_MVNZ,        true,  "llll"  },
2913   { "__insn_mvz",             TILEPRO_INSN_MVZ,         true,  "llll"  },
2914   { "__insn_mz",              TILEPRO_INSN_MZ,          true,  "lll"   },
2915   { "__insn_mzb",             TILEPRO_INSN_MZB,         true,  "lll"   },
2916   { "__insn_mzh",             TILEPRO_INSN_MZH,         true,  "lll"   },
2917   { "__insn_nap",             TILEPRO_INSN_NAP,         false, "v"     },
2918   { "__insn_nop",             TILEPRO_INSN_NOP,         true,  "v"     },
2919   { "__insn_nor",             TILEPRO_INSN_NOR,         true,  "lll"   },
2920   { "__insn_or",              TILEPRO_INSN_OR,          true,  "lll"   },
2921   { "__insn_ori",             TILEPRO_INSN_OR,          true,  "lll"   },
2922   { "__insn_packbs_u",        TILEPRO_INSN_PACKBS_U,    false, "lll"   },
2923   { "__insn_packhb",          TILEPRO_INSN_PACKHB,      true,  "lll"   },
2924   { "__insn_packhs",          TILEPRO_INSN_PACKHS,      false, "lll"   },
2925   { "__insn_packlb",          TILEPRO_INSN_PACKLB,      true,  "lll"   },
2926   { "__insn_pcnt",            TILEPRO_INSN_PCNT,        true,  "ll"    },
2927   { "__insn_prefetch",        TILEPRO_INSN_PREFETCH,    false, "vk"    },
2928   { "__insn_prefetch_L1",     TILEPRO_INSN_PREFETCH_L1, false, "vk"    },
2929   { "__insn_rl",              TILEPRO_INSN_RL,          true,  "lll"   },
2930   { "__insn_rli",             TILEPRO_INSN_RL,          true,  "lll"   },
2931   { "__insn_s1a",             TILEPRO_INSN_S1A,         true,  "lll"   },
2932   { "__insn_s2a",             TILEPRO_INSN_S2A,         true,  "lll"   },
2933   { "__insn_s3a",             TILEPRO_INSN_S3A,         true,  "lll"   },
2934   { "__insn_sadab_u",         TILEPRO_INSN_SADAB_U,     true,  "llll"  },
2935   { "__insn_sadah",           TILEPRO_INSN_SADAH,       true,  "llll"  },
2936   { "__insn_sadah_u",         TILEPRO_INSN_SADAH_U,     true,  "llll"  },
2937   { "__insn_sadb_u",          TILEPRO_INSN_SADB_U,      true,  "lll"   },
2938   { "__insn_sadh",            TILEPRO_INSN_SADH,        true,  "lll"   },
2939   { "__insn_sadh_u",          TILEPRO_INSN_SADH_U,      true,  "lll"   },
2940   { "__insn_sb",              TILEPRO_INSN_SB,          false, "vpl"   },
2941   { "__insn_seq",             TILEPRO_INSN_SEQ,         true,  "lll"   },
2942   { "__insn_seqb",            TILEPRO_INSN_SEQB,        true,  "lll"   },
2943   { "__insn_seqh",            TILEPRO_INSN_SEQH,        true,  "lll"   },
2944   { "__insn_seqi",            TILEPRO_INSN_SEQ,         true,  "lll"   },
2945   { "__insn_seqib",           TILEPRO_INSN_SEQIB,       true,  "lll"   },
2946   { "__insn_seqih",           TILEPRO_INSN_SEQIH,       true,  "lll"   },
2947   { "__insn_sh",              TILEPRO_INSN_SH,          false, "vpl"   },
2948   { "__insn_shl",             TILEPRO_INSN_SHL,         true,  "lll"   },
2949   { "__insn_shlb",            TILEPRO_INSN_SHLB,        true,  "lll"   },
2950   { "__insn_shlh",            TILEPRO_INSN_SHLH,        true,  "lll"   },
2951   { "__insn_shli",            TILEPRO_INSN_SHL,         true,  "lll"   },
2952   { "__insn_shlib",           TILEPRO_INSN_SHLIB,       true,  "lll"   },
2953   { "__insn_shlih",           TILEPRO_INSN_SHLIH,       true,  "lll"   },
2954   { "__insn_shr",             TILEPRO_INSN_SHR,         true,  "lll"   },
2955   { "__insn_shrb",            TILEPRO_INSN_SHRB,        true,  "lll"   },
2956   { "__insn_shrh",            TILEPRO_INSN_SHRH,        true,  "lll"   },
2957   { "__insn_shri",            TILEPRO_INSN_SHR,         true,  "lll"   },
2958   { "__insn_shrib",           TILEPRO_INSN_SHRIB,       true,  "lll"   },
2959   { "__insn_shrih",           TILEPRO_INSN_SHRIH,       true,  "lll"   },
2960   { "__insn_slt",             TILEPRO_INSN_SLT,         true,  "lll"   },
2961   { "__insn_slt_u",           TILEPRO_INSN_SLT_U,       true,  "lll"   },
2962   { "__insn_sltb",            TILEPRO_INSN_SLTB,        true,  "lll"   },
2963   { "__insn_sltb_u",          TILEPRO_INSN_SLTB_U,      true,  "lll"   },
2964   { "__insn_slte",            TILEPRO_INSN_SLTE,        true,  "lll"   },
2965   { "__insn_slte_u",          TILEPRO_INSN_SLTE_U,      true,  "lll"   },
2966   { "__insn_slteb",           TILEPRO_INSN_SLTEB,       true,  "lll"   },
2967   { "__insn_slteb_u",         TILEPRO_INSN_SLTEB_U,     true,  "lll"   },
2968   { "__insn_slteh",           TILEPRO_INSN_SLTEH,       true,  "lll"   },
2969   { "__insn_slteh_u",         TILEPRO_INSN_SLTEH_U,     true,  "lll"   },
2970   { "__insn_slth",            TILEPRO_INSN_SLTH,        true,  "lll"   },
2971   { "__insn_slth_u",          TILEPRO_INSN_SLTH_U,      true,  "lll"   },
2972   { "__insn_slti",            TILEPRO_INSN_SLT,         true,  "lll"   },
2973   { "__insn_slti_u",          TILEPRO_INSN_SLT_U,       true,  "lll"   },
2974   { "__insn_sltib",           TILEPRO_INSN_SLTIB,       true,  "lll"   },
2975   { "__insn_sltib_u",         TILEPRO_INSN_SLTIB_U,     true,  "lll"   },
2976   { "__insn_sltih",           TILEPRO_INSN_SLTIH,       true,  "lll"   },
2977   { "__insn_sltih_u",         TILEPRO_INSN_SLTIH_U,     true,  "lll"   },
2978   { "__insn_sne",             TILEPRO_INSN_SNE,         true,  "lll"   },
2979   { "__insn_sneb",            TILEPRO_INSN_SNEB,        true,  "lll"   },
2980   { "__insn_sneh",            TILEPRO_INSN_SNEH,        true,  "lll"   },
2981   { "__insn_sra",             TILEPRO_INSN_SRA,         true,  "lll"   },
2982   { "__insn_srab",            TILEPRO_INSN_SRAB,        true,  "lll"   },
2983   { "__insn_srah",            TILEPRO_INSN_SRAH,        true,  "lll"   },
2984   { "__insn_srai",            TILEPRO_INSN_SRA,         true,  "lll"   },
2985   { "__insn_sraib",           TILEPRO_INSN_SRAIB,       true,  "lll"   },
2986   { "__insn_sraih",           TILEPRO_INSN_SRAIH,       true,  "lll"   },
2987   { "__insn_sub",             TILEPRO_INSN_SUB,         true,  "lll"   },
2988   { "__insn_subb",            TILEPRO_INSN_SUBB,        true,  "lll"   },
2989   { "__insn_subbs_u",         TILEPRO_INSN_SUBBS_U,     false, "lll"   },
2990   { "__insn_subh",            TILEPRO_INSN_SUBH,        true,  "lll"   },
2991   { "__insn_subhs",           TILEPRO_INSN_SUBHS,       false, "lll"   },
2992   { "__insn_subs",            TILEPRO_INSN_SUBS,        false, "lll"   },
2993   { "__insn_sw",              TILEPRO_INSN_SW,          false, "vpl"   },
2994   { "__insn_tblidxb0",        TILEPRO_INSN_TBLIDXB0,    true,  "lll"   },
2995   { "__insn_tblidxb1",        TILEPRO_INSN_TBLIDXB1,    true,  "lll"   },
2996   { "__insn_tblidxb2",        TILEPRO_INSN_TBLIDXB2,    true,  "lll"   },
2997   { "__insn_tblidxb3",        TILEPRO_INSN_TBLIDXB3,    true,  "lll"   },
2998   { "__insn_tns",             TILEPRO_INSN_TNS,         false, "lp"    },
2999   { "__insn_wh64",            TILEPRO_INSN_WH64,        false, "vp"    },
3000   { "__insn_xor",             TILEPRO_INSN_XOR,         true,  "lll"   },
3001   { "__insn_xori",            TILEPRO_INSN_XOR,         true,  "lll"   },
3002   { "__tile_network_barrier", TILEPRO_NETWORK_BARRIER,  false, "v"     },
3003   { "__tile_idn0_receive",    TILEPRO_IDN0_RECEIVE,     false, "l"     },
3004   { "__tile_idn1_receive",    TILEPRO_IDN1_RECEIVE,     false, "l"     },
3005   { "__tile_idn_send",        TILEPRO_IDN_SEND,         false, "vl"    },
3006   { "__tile_sn_receive",      TILEPRO_SN_RECEIVE,       false, "l"     },
3007   { "__tile_sn_send",         TILEPRO_SN_SEND,          false, "vl"    },
3008   { "__tile_udn0_receive",    TILEPRO_UDN0_RECEIVE,     false, "l"     },
3009   { "__tile_udn1_receive",    TILEPRO_UDN1_RECEIVE,     false, "l"     },
3010   { "__tile_udn2_receive",    TILEPRO_UDN2_RECEIVE,     false, "l"     },
3011   { "__tile_udn3_receive",    TILEPRO_UDN3_RECEIVE,     false, "l"     },
3012   { "__tile_udn_send",        TILEPRO_UDN_SEND,         false, "vl"    },
3013 };
3014 
3015 
3016 /* Convert a character in a builtin type string to a tree type.  */
3017 static tree
char_to_type(char c)3018 char_to_type (char c)
3019 {
3020   static tree volatile_ptr_type_node = NULL;
3021   static tree volatile_const_ptr_type_node = NULL;
3022 
3023   if (volatile_ptr_type_node == NULL)
3024     {
3025       volatile_ptr_type_node =
3026 	build_pointer_type (build_qualified_type (void_type_node,
3027 						  TYPE_QUAL_VOLATILE));
3028       volatile_const_ptr_type_node =
3029 	build_pointer_type (build_qualified_type (void_type_node,
3030 						  TYPE_QUAL_CONST
3031 						  | TYPE_QUAL_VOLATILE));
3032     }
3033 
3034   switch (c)
3035     {
3036     case 'v':
3037       return void_type_node;
3038     case 'l':
3039       return long_unsigned_type_node;
3040     case 'p':
3041       return volatile_ptr_type_node;
3042     case 'k':
3043       return volatile_const_ptr_type_node;
3044     default:
3045       gcc_unreachable ();
3046     }
3047 }
3048 
3049 
3050 /* Implement TARGET_INIT_BUILTINS.  */
3051 static void
tilepro_init_builtins(void)3052 tilepro_init_builtins (void)
3053 {
3054   size_t i;
3055 
3056   for (i = 0; i < ARRAY_SIZE (tilepro_builtins); i++)
3057     {
3058       const struct tilepro_builtin_def *p = &tilepro_builtins[i];
3059       tree ftype, ret_type, arg_type_list = void_list_node;
3060       tree decl;
3061       int j;
3062 
3063       for (j = strlen (p->type) - 1; j > 0; j--)
3064 	{
3065 	  arg_type_list =
3066 	    tree_cons (NULL_TREE, char_to_type (p->type[j]), arg_type_list);
3067 	}
3068 
3069       ret_type = char_to_type (p->type[0]);
3070 
3071       ftype = build_function_type (ret_type, arg_type_list);
3072 
3073       decl = add_builtin_function (p->name, ftype, p->code, BUILT_IN_MD,
3074 				   NULL, NULL);
3075 
3076       if (p->is_const)
3077 	TREE_READONLY (decl) = 1;
3078       TREE_NOTHROW (decl) = 1;
3079 
3080       if (tilepro_builtin_info[p->code].fndecl == NULL)
3081 	tilepro_builtin_info[p->code].fndecl = decl;
3082     }
3083 }
3084 
3085 
3086 /* Implement TARGET_EXPAND_BUILTIN.  */
3087 static rtx
tilepro_expand_builtin(tree exp,rtx target,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)3088 tilepro_expand_builtin (tree exp,
3089 			rtx target,
3090 			rtx subtarget ATTRIBUTE_UNUSED,
3091 			machine_mode mode ATTRIBUTE_UNUSED,
3092 			int ignore ATTRIBUTE_UNUSED)
3093 {
3094 #define MAX_BUILTIN_ARGS 4
3095 
3096   tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3097   unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3098   tree arg;
3099   call_expr_arg_iterator iter;
3100   enum insn_code icode;
3101   rtx op[MAX_BUILTIN_ARGS + 1], pat;
3102   int opnum;
3103   bool nonvoid;
3104   insn_gen_fn fn;
3105 
3106   if (fcode >= TILEPRO_BUILTIN_max)
3107     internal_error ("bad builtin fcode");
3108   icode = tilepro_builtin_info[fcode].icode;
3109   if (icode == 0)
3110     internal_error ("bad builtin icode");
3111 
3112   nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
3113 
3114   opnum = nonvoid;
3115   FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3116   {
3117     const struct insn_operand_data *insn_op;
3118 
3119     if (arg == error_mark_node)
3120       return NULL_RTX;
3121     if (opnum > MAX_BUILTIN_ARGS)
3122       return NULL_RTX;
3123 
3124     insn_op = &insn_data[icode].operand[opnum];
3125 
3126     op[opnum] = expand_expr (arg, NULL_RTX, insn_op->mode, EXPAND_NORMAL);
3127 
3128     if (!(*insn_op->predicate) (op[opnum], insn_op->mode))
3129       op[opnum] = copy_to_mode_reg (insn_op->mode, op[opnum]);
3130 
3131     if (!(*insn_op->predicate) (op[opnum], insn_op->mode))
3132       {
3133 	/* We still failed to meet the predicate even after moving
3134 	   into a register. Assume we needed an immediate.  */
3135 	error_at (EXPR_LOCATION (exp),
3136 		  "operand must be an immediate of the right size");
3137 	return const0_rtx;
3138       }
3139 
3140     opnum++;
3141   }
3142 
3143   if (nonvoid)
3144     {
3145       machine_mode tmode = insn_data[icode].operand[0].mode;
3146       if (!target
3147 	  || GET_MODE (target) != tmode
3148 	  || !(*insn_data[icode].operand[0].predicate) (target, tmode))
3149 	target = gen_reg_rtx (tmode);
3150       op[0] = target;
3151     }
3152 
3153   fn = GEN_FCN (icode);
3154   switch (opnum)
3155     {
3156     case 0:
3157       pat = fn (NULL_RTX);
3158       break;
3159     case 1:
3160       pat = fn (op[0]);
3161       break;
3162     case 2:
3163       pat = fn (op[0], op[1]);
3164       break;
3165     case 3:
3166       pat = fn (op[0], op[1], op[2]);
3167       break;
3168     case 4:
3169       pat = fn (op[0], op[1], op[2], op[3]);
3170       break;
3171     case 5:
3172       pat = fn (op[0], op[1], op[2], op[3], op[4]);
3173       break;
3174     default:
3175       gcc_unreachable ();
3176     }
3177   if (!pat)
3178     return NULL_RTX;
3179 
3180   /* If we are generating a prefetch, tell the scheduler not to move
3181      it around.  */
3182   if (GET_CODE (pat) == PREFETCH)
3183     PREFETCH_SCHEDULE_BARRIER_P (pat) = true;
3184 
3185   emit_insn (pat);
3186 
3187   if (nonvoid)
3188     return target;
3189   else
3190     return const0_rtx;
3191 }
3192 
3193 
3194 /* Implement TARGET_BUILTIN_DECL.  */
3195 static tree
tilepro_builtin_decl(unsigned code,bool initialize_p ATTRIBUTE_UNUSED)3196 tilepro_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
3197 {
3198   if (code >= TILEPRO_BUILTIN_max)
3199     return error_mark_node;
3200 
3201   return tilepro_builtin_info[code].fndecl;
3202 }
3203 
3204 
3205 
3206 /* Stack frames  */
3207 
3208 /* Return whether REGNO needs to be saved in the stack frame.  */
3209 static bool
need_to_save_reg(unsigned int regno)3210 need_to_save_reg (unsigned int regno)
3211 {
3212   if (!fixed_regs[regno] && !call_used_regs[regno]
3213       && df_regs_ever_live_p (regno))
3214     return true;
3215 
3216   if (flag_pic
3217       && (regno == PIC_OFFSET_TABLE_REGNUM
3218 	  || regno == TILEPRO_PIC_TEXT_LABEL_REGNUM)
3219       && (crtl->uses_pic_offset_table || crtl->saves_all_registers))
3220     return true;
3221 
3222   if (crtl->calls_eh_return)
3223     {
3224       unsigned i;
3225       for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; i++)
3226 	{
3227 	  if (regno == EH_RETURN_DATA_REGNO (i))
3228 	    return true;
3229 	}
3230     }
3231 
3232   return false;
3233 }
3234 
3235 
3236 /* Return the size of the register savev area.  This function is only
3237    correct starting with local register allocation */
3238 static int
tilepro_saved_regs_size(void)3239 tilepro_saved_regs_size (void)
3240 {
3241   int reg_save_size = 0;
3242   int regno;
3243   int offset_to_frame;
3244   int align_mask;
3245 
3246   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
3247     if (need_to_save_reg (regno))
3248       reg_save_size += UNITS_PER_WORD;
3249 
3250   /* Pad out the register save area if necessary to make
3251      frame_pointer_rtx be as aligned as the stack pointer.  */
3252   offset_to_frame = crtl->args.pretend_args_size + reg_save_size;
3253   align_mask = (STACK_BOUNDARY / BITS_PER_UNIT) - 1;
3254   reg_save_size += (-offset_to_frame) & align_mask;
3255 
3256   return reg_save_size;
3257 }
3258 
3259 
3260 /* Round up frame size SIZE.  */
3261 static int
round_frame_size(int size)3262 round_frame_size (int size)
3263 {
3264   return ((size + STACK_BOUNDARY / BITS_PER_UNIT - 1)
3265 	  & -STACK_BOUNDARY / BITS_PER_UNIT);
3266 }
3267 
3268 
3269 /* Emit a store in the stack frame to save REGNO at address ADDR, and
3270    emit the corresponding REG_CFA_OFFSET note described by CFA and
3271    CFA_OFFSET.  Return the emitted insn.  */
3272 static rtx
frame_emit_store(int regno,int regno_note,rtx addr,rtx cfa,int cfa_offset)3273 frame_emit_store (int regno, int regno_note, rtx addr, rtx cfa,
3274 		  int cfa_offset)
3275 {
3276   rtx reg = gen_rtx_REG (Pmode, regno);
3277   rtx mem = gen_frame_mem (Pmode, addr);
3278   rtx mov = gen_movsi (mem, reg);
3279 
3280   /* Describe what just happened in a way that dwarf understands.  We
3281      use temporary registers to hold the address to make scheduling
3282      easier, and use the REG_CFA_OFFSET to describe the address as an
3283      offset from the CFA.  */
3284   rtx reg_note = gen_rtx_REG (Pmode, regno_note);
3285   rtx cfa_relative_addr = gen_rtx_PLUS (Pmode, cfa, gen_int_si (cfa_offset));
3286   rtx cfa_relative_mem = gen_frame_mem (Pmode, cfa_relative_addr);
3287   rtx real = gen_rtx_SET (cfa_relative_mem, reg_note);
3288   add_reg_note (mov, REG_CFA_OFFSET, real);
3289 
3290   return emit_insn (mov);
3291 }
3292 
3293 
3294 /* Emit a load in the stack frame to load REGNO from address ADDR.
3295    Add a REG_CFA_RESTORE note to CFA_RESTORES if CFA_RESTORES is
3296    non-null.  Return the emitted insn.  */
3297 static rtx_insn *
frame_emit_load(int regno,rtx addr,rtx * cfa_restores)3298 frame_emit_load (int regno, rtx addr, rtx *cfa_restores)
3299 {
3300   rtx reg = gen_rtx_REG (Pmode, regno);
3301   rtx mem = gen_frame_mem (Pmode, addr);
3302   if (cfa_restores)
3303     *cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, *cfa_restores);
3304   return emit_insn (gen_movsi (reg, mem));
3305 }
3306 
3307 
3308 /* Helper function to set RTX_FRAME_RELATED_P on instructions,
3309    including sequences.  */
3310 static rtx_insn *
set_frame_related_p(void)3311 set_frame_related_p (void)
3312 {
3313   rtx_insn *seq = get_insns ();
3314   rtx_insn *insn;
3315 
3316   end_sequence ();
3317 
3318   if (!seq)
3319     return NULL;
3320 
3321   if (INSN_P (seq))
3322     {
3323       insn = seq;
3324       while (insn != NULL_RTX)
3325 	{
3326 	  RTX_FRAME_RELATED_P (insn) = 1;
3327 	  insn = NEXT_INSN (insn);
3328 	}
3329       seq = emit_insn (seq);
3330     }
3331   else
3332     {
3333       seq = emit_insn (seq);
3334       RTX_FRAME_RELATED_P (seq) = 1;
3335     }
3336   return seq;
3337 }
3338 
3339 
3340 #define FRP(exp)  (start_sequence (), exp, set_frame_related_p ())
3341 
3342 /* This emits code for 'sp += offset'.
3343 
3344    The ABI only allows us to modify 'sp' in a single 'addi' or
3345    'addli', so the backtracer understands it. Larger amounts cannot
3346    use those instructions, so are added by placing the offset into a
3347    large register and using 'add'.
3348 
3349    This happens after reload, so we need to expand it ourselves.  */
3350 static rtx_insn *
emit_sp_adjust(int offset,int * next_scratch_regno,bool frame_related,rtx reg_notes)3351 emit_sp_adjust (int offset, int *next_scratch_regno, bool frame_related,
3352 		rtx reg_notes)
3353 {
3354   rtx to_add;
3355   rtx imm_rtx = gen_int_si (offset);
3356 
3357   rtx_insn *insn;
3358   if (satisfies_constraint_J (imm_rtx))
3359     {
3360       /* We can add this using a single addi or addli.  */
3361       to_add = imm_rtx;
3362     }
3363   else
3364     {
3365       rtx tmp = gen_rtx_REG (Pmode, (*next_scratch_regno)--);
3366       tilepro_expand_set_const32 (tmp, imm_rtx);
3367       to_add = tmp;
3368     }
3369 
3370   /* Actually adjust the stack pointer.  */
3371   insn = emit_insn (gen_sp_adjust (stack_pointer_rtx, stack_pointer_rtx,
3372 				   to_add));
3373   REG_NOTES (insn) = reg_notes;
3374 
3375   /* Describe what just happened in a way that dwarf understands.  */
3376   if (frame_related)
3377     {
3378       rtx real = gen_rtx_SET (stack_pointer_rtx,
3379 			      gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3380 					    imm_rtx));
3381       RTX_FRAME_RELATED_P (insn) = 1;
3382       add_reg_note (insn, REG_CFA_ADJUST_CFA, real);
3383     }
3384 
3385   return insn;
3386 }
3387 
3388 
3389 /* Return whether the current function is leaf.  This takes into
3390    account whether the function calls tls_get_addr.  */
3391 static bool
tilepro_current_function_is_leaf(void)3392 tilepro_current_function_is_leaf (void)
3393 {
3394   return crtl->is_leaf && !cfun->machine->calls_tls_get_addr;
3395 }
3396 
3397 
3398 /* Return the frame size.  */
3399 static int
compute_total_frame_size(void)3400 compute_total_frame_size (void)
3401 {
3402   int total_size = (get_frame_size () + tilepro_saved_regs_size ()
3403 		    + crtl->outgoing_args_size
3404 		    + crtl->args.pretend_args_size);
3405 
3406   if (!tilepro_current_function_is_leaf () || cfun->calls_alloca)
3407     {
3408       /* Make room for save area in callee.  */
3409       total_size += STACK_POINTER_OFFSET;
3410     }
3411 
3412   return round_frame_size (total_size);
3413 }
3414 
3415 
3416 /* Return nonzero if this function is known to have a null epilogue.
3417    This allows the optimizer to omit jumps to jumps if no stack was
3418    created.  */
3419 bool
tilepro_can_use_return_insn_p(void)3420 tilepro_can_use_return_insn_p (void)
3421 {
3422   return (reload_completed
3423 	  && cfun->static_chain_decl == 0
3424 	  && compute_total_frame_size () == 0
3425 	  && tilepro_current_function_is_leaf ()
3426 	  && !crtl->profile && !df_regs_ever_live_p (TILEPRO_LINK_REGNUM));
3427 }
3428 
3429 
3430 /* Returns an rtx for a stack slot at 'FP + offset_from_fp'.  If there
3431    is a frame pointer, it computes the value relative to
3432    that. Otherwise it uses the stack pointer.  */
3433 static rtx
compute_frame_addr(int offset_from_fp,int * next_scratch_regno)3434 compute_frame_addr (int offset_from_fp, int *next_scratch_regno)
3435 {
3436   rtx base_reg_rtx, tmp_reg_rtx, offset_rtx;
3437   int offset_from_base;
3438 
3439   if (frame_pointer_needed)
3440     {
3441       base_reg_rtx = hard_frame_pointer_rtx;
3442       offset_from_base = offset_from_fp;
3443     }
3444   else
3445     {
3446       int offset_from_sp = compute_total_frame_size () + offset_from_fp;
3447       base_reg_rtx = stack_pointer_rtx;
3448       offset_from_base = offset_from_sp;
3449     }
3450 
3451   if (offset_from_base == 0)
3452     return base_reg_rtx;
3453 
3454   /* Compute the new value of the stack pointer.  */
3455   tmp_reg_rtx = gen_rtx_REG (Pmode, (*next_scratch_regno)--);
3456   offset_rtx = gen_int_si (offset_from_base);
3457 
3458   if (!tilepro_expand_addsi (tmp_reg_rtx, base_reg_rtx, offset_rtx))
3459     {
3460       emit_insn (gen_rtx_SET (tmp_reg_rtx,
3461 			      gen_rtx_PLUS (Pmode, base_reg_rtx,
3462 					    offset_rtx)));
3463     }
3464 
3465   return tmp_reg_rtx;
3466 }
3467 
3468 
3469 /* The stack frame looks like this:
3470          +-------------+
3471          |    ...      |
3472          |  incoming   |
3473          | stack args  |
3474    AP -> +-------------+
3475          | caller's HFP|
3476          +-------------+
3477          | lr save     |
3478   HFP -> +-------------+
3479          |  var args   |
3480          |  reg save   | crtl->args.pretend_args_size bytes
3481          +-------------+
3482          |    ...      |
3483          | saved regs  | tilepro_saved_regs_size() bytes
3484    FP -> +-------------+
3485          |    ...      |
3486          |   vars      | get_frame_size() bytes
3487          +-------------+
3488          |    ...      |
3489          |  outgoing   |
3490          |  stack args | crtl->outgoing_args_size bytes
3491          +-------------+
3492          | HFP         | 4 bytes (only here if nonleaf / alloca)
3493          +-------------+
3494          | callee lr   | 4 bytes (only here if nonleaf / alloca)
3495          | save        |
3496    SP -> +-------------+
3497 
3498   HFP == incoming SP.
3499 
3500   For functions with a frame larger than 32767 bytes, or which use
3501   alloca (), r52 is used as a frame pointer.  Otherwise there is no
3502   frame pointer.
3503 
3504   FP is saved at SP+4 before calling a subroutine so the
3505   callee can chain.  */
3506 void
tilepro_expand_prologue(void)3507 tilepro_expand_prologue (void)
3508 {
3509 #define ROUND_ROBIN_SIZE 4
3510   /* We round-robin through four scratch registers to hold temporary
3511      addresses for saving registers, to make instruction scheduling
3512      easier.  */
3513   rtx reg_save_addr[ROUND_ROBIN_SIZE] = {
3514     NULL_RTX, NULL_RTX, NULL_RTX, NULL_RTX
3515   };
3516   rtx insn, cfa;
3517   unsigned int which_scratch;
3518   int offset, start_offset, regno;
3519 
3520   /* A register that holds a copy of the incoming fp.  */
3521   int fp_copy_regno = -1;
3522 
3523   /* A register that holds a copy of the incoming sp.  */
3524   int sp_copy_regno = -1;
3525 
3526   /* Next scratch register number to hand out (postdecrementing).  */
3527   int next_scratch_regno = 29;
3528 
3529   int total_size = compute_total_frame_size ();
3530 
3531   if (flag_stack_usage_info)
3532     current_function_static_stack_size = total_size;
3533 
3534   /* Save lr first in its special location because code after this
3535      might use the link register as a scratch register.  */
3536   if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM) || crtl->calls_eh_return)
3537     {
3538       FRP (frame_emit_store (TILEPRO_LINK_REGNUM, TILEPRO_LINK_REGNUM,
3539 			     stack_pointer_rtx, stack_pointer_rtx, 0));
3540       emit_insn (gen_blockage ());
3541     }
3542 
3543   if (total_size == 0)
3544     {
3545       /* Load the PIC register if needed.  */
3546       if (flag_pic && crtl->uses_pic_offset_table)
3547 	load_pic_register (false);
3548 
3549       return;
3550     }
3551 
3552   cfa = stack_pointer_rtx;
3553 
3554   if (frame_pointer_needed)
3555     {
3556       fp_copy_regno = next_scratch_regno--;
3557 
3558       /* Copy the old frame pointer aside so we can save it later.  */
3559       insn = FRP (emit_move_insn (gen_rtx_REG (word_mode, fp_copy_regno),
3560 				  hard_frame_pointer_rtx));
3561       add_reg_note (insn, REG_CFA_REGISTER, NULL_RTX);
3562 
3563       /* Set up the frame pointer.  */
3564       insn = FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
3565       add_reg_note (insn, REG_CFA_DEF_CFA, hard_frame_pointer_rtx);
3566       cfa = hard_frame_pointer_rtx;
3567       REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
3568 
3569       /* fp holds a copy of the incoming sp, in case we need to store
3570          it.  */
3571       sp_copy_regno = HARD_FRAME_POINTER_REGNUM;
3572     }
3573   else if (!tilepro_current_function_is_leaf ())
3574     {
3575       /* Copy the old stack pointer aside so we can save it later.  */
3576       sp_copy_regno = next_scratch_regno--;
3577       emit_move_insn (gen_rtx_REG (Pmode, sp_copy_regno),
3578 		      stack_pointer_rtx);
3579     }
3580 
3581   if (tilepro_current_function_is_leaf ())
3582     {
3583       /* No need to store chain pointer to caller's frame.  */
3584       emit_sp_adjust (-total_size, &next_scratch_regno,
3585 		      !frame_pointer_needed, NULL_RTX);
3586     }
3587   else
3588     {
3589       /* Save the frame pointer (incoming sp value) to support
3590          backtracing.  First we need to create an rtx with the store
3591          address.  */
3592       rtx chain_addr = gen_rtx_REG (Pmode, next_scratch_regno--);
3593       rtx size_rtx = gen_int_si (-(total_size - UNITS_PER_WORD));
3594 
3595       if (add_operand (size_rtx, Pmode))
3596 	{
3597 	  /* Expose more parallelism by computing this value from the
3598 	     original stack pointer, not the one after we have pushed
3599 	     the frame.  */
3600 	  rtx p = gen_rtx_PLUS (Pmode, stack_pointer_rtx, size_rtx);
3601 	  emit_insn (gen_rtx_SET (chain_addr, p));
3602 	  emit_sp_adjust (-total_size, &next_scratch_regno,
3603 			  !frame_pointer_needed, NULL_RTX);
3604 	}
3605       else
3606 	{
3607 	  /* The stack frame is large, so just store the incoming sp
3608 	     value at *(new_sp + UNITS_PER_WORD).  */
3609 	  rtx p;
3610 	  emit_sp_adjust (-total_size, &next_scratch_regno,
3611 			  !frame_pointer_needed, NULL_RTX);
3612 	  p = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3613 			    GEN_INT (UNITS_PER_WORD));
3614 	  emit_insn (gen_rtx_SET (chain_addr, p));
3615 	}
3616 
3617       /* Save our frame pointer for backtrace chaining.  */
3618       emit_insn (gen_movsi (gen_frame_mem (SImode, chain_addr),
3619 			    gen_rtx_REG (SImode, sp_copy_regno)));
3620     }
3621 
3622   /* Compute where to start storing registers we need to save.  */
3623   start_offset = -crtl->args.pretend_args_size - UNITS_PER_WORD;
3624   offset = start_offset;
3625 
3626   /* Store all registers that need saving.  */
3627   which_scratch = 0;
3628   for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
3629     if (need_to_save_reg (regno))
3630       {
3631 	rtx r = reg_save_addr[which_scratch];
3632 	int from_regno;
3633 	int cfa_offset = frame_pointer_needed ? offset : total_size + offset;
3634 
3635 	if (r == NULL_RTX)
3636 	  {
3637 	    rtx p = compute_frame_addr (offset, &next_scratch_regno);
3638 	    r = gen_rtx_REG (word_mode, next_scratch_regno--);
3639 	    reg_save_addr[which_scratch] = r;
3640 
3641 	    emit_insn (gen_rtx_SET (r, p));
3642 	  }
3643 	else
3644 	  {
3645 	    /* Advance to the next stack slot to store this register.  */
3646 	    int stride = ROUND_ROBIN_SIZE * -UNITS_PER_WORD;
3647 	    rtx p = gen_rtx_PLUS (Pmode, r, GEN_INT (stride));
3648 	    emit_insn (gen_rtx_SET (r, p));
3649 	  }
3650 
3651 	/* Save this register to the stack (but use the old fp value
3652 	   we copied aside if appropriate).  */
3653 	from_regno = (fp_copy_regno >= 0
3654 		      && regno ==
3655 		      HARD_FRAME_POINTER_REGNUM) ? fp_copy_regno : regno;
3656 	FRP (frame_emit_store (from_regno, regno, r, cfa, cfa_offset));
3657 
3658 	offset -= UNITS_PER_WORD;
3659 	which_scratch = (which_scratch + 1) % ROUND_ROBIN_SIZE;
3660       }
3661 
3662   /* If profiling, force that to happen after the frame is set up.  */
3663   if (crtl->profile)
3664     emit_insn (gen_blockage ());
3665 
3666   /* Load the PIC register if needed.  */
3667   if (flag_pic && crtl->uses_pic_offset_table)
3668     load_pic_register (false);
3669 }
3670 
3671 
3672 /* Implement the epilogue and sibcall_epilogue patterns.  SIBCALL_P is
3673    true for a sibcall_epilogue pattern, and false for an epilogue
3674    pattern.  */
3675 void
tilepro_expand_epilogue(bool sibcall_p)3676 tilepro_expand_epilogue (bool sibcall_p)
3677 {
3678   /* We round-robin through four scratch registers to hold temporary
3679      addresses for saving registers, to make instruction scheduling
3680      easier.  */
3681   rtx reg_save_addr[ROUND_ROBIN_SIZE] = {
3682     NULL_RTX, NULL_RTX, NULL_RTX, NULL_RTX
3683   };
3684   rtx_insn *last_insn, *insn;
3685   unsigned int which_scratch;
3686   int offset, start_offset, regno;
3687   rtx cfa_restores = NULL_RTX;
3688 
3689   /* A register that holds a copy of the incoming fp.  */
3690   int fp_copy_regno = -1;
3691 
3692   /* Next scratch register number to hand out (postdecrementing).  */
3693   int next_scratch_regno = 29;
3694 
3695   int total_size = compute_total_frame_size ();
3696 
3697   last_insn = get_last_insn ();
3698 
3699   /* Load lr first since we are going to need it first.  */
3700   insn = NULL;
3701   if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM))
3702     {
3703       insn = frame_emit_load (TILEPRO_LINK_REGNUM,
3704 			      compute_frame_addr (0, &next_scratch_regno),
3705 			      &cfa_restores);
3706     }
3707 
3708   if (total_size == 0)
3709     {
3710       if (insn)
3711 	{
3712 	  RTX_FRAME_RELATED_P (insn) = 1;
3713 	  REG_NOTES (insn) = cfa_restores;
3714 	}
3715       goto done;
3716     }
3717 
3718   /* Compute where to start restoring registers.  */
3719   start_offset = -crtl->args.pretend_args_size - UNITS_PER_WORD;
3720   offset = start_offset;
3721 
3722   if (frame_pointer_needed)
3723     fp_copy_regno = next_scratch_regno--;
3724 
3725   /* Restore all callee-saved registers.  */
3726   which_scratch = 0;
3727   for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
3728     if (need_to_save_reg (regno))
3729       {
3730 	rtx r = reg_save_addr[which_scratch];
3731 	if (r == NULL_RTX)
3732 	  {
3733 	    r = compute_frame_addr (offset, &next_scratch_regno);
3734 	    reg_save_addr[which_scratch] = r;
3735 	  }
3736 	else
3737 	  {
3738 	    /* Advance to the next stack slot to store this
3739 	       register.  */
3740 	    int stride = ROUND_ROBIN_SIZE * -UNITS_PER_WORD;
3741 	    rtx p = gen_rtx_PLUS (Pmode, r, GEN_INT (stride));
3742 	    emit_insn (gen_rtx_SET (r, p));
3743 	  }
3744 
3745 	if (fp_copy_regno >= 0 && regno == HARD_FRAME_POINTER_REGNUM)
3746 	  frame_emit_load (fp_copy_regno, r, NULL);
3747 	else
3748 	  frame_emit_load (regno, r, &cfa_restores);
3749 
3750 	offset -= UNITS_PER_WORD;
3751 	which_scratch = (which_scratch + 1) % ROUND_ROBIN_SIZE;
3752       }
3753 
3754   if (!tilepro_current_function_is_leaf ())
3755     cfa_restores =
3756       alloc_reg_note (REG_CFA_RESTORE, stack_pointer_rtx, cfa_restores);
3757 
3758   emit_insn (gen_blockage ());
3759 
3760   if (frame_pointer_needed)
3761     {
3762       /* Restore the old stack pointer by copying from the frame
3763          pointer.  */
3764       insn = emit_insn (gen_sp_restore (stack_pointer_rtx,
3765 					hard_frame_pointer_rtx));
3766       RTX_FRAME_RELATED_P (insn) = 1;
3767       REG_NOTES (insn) = cfa_restores;
3768       add_reg_note (insn, REG_CFA_DEF_CFA, stack_pointer_rtx);
3769     }
3770   else
3771     {
3772       insn = emit_sp_adjust (total_size, &next_scratch_regno, true,
3773 			     cfa_restores);
3774     }
3775 
3776   if (crtl->calls_eh_return)
3777     emit_insn (gen_sp_adjust (stack_pointer_rtx, stack_pointer_rtx,
3778 			      EH_RETURN_STACKADJ_RTX));
3779 
3780   /* Restore the old frame pointer.  */
3781   if (frame_pointer_needed)
3782     {
3783       insn = emit_move_insn (hard_frame_pointer_rtx,
3784 			     gen_rtx_REG (Pmode, fp_copy_regno));
3785       add_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx);
3786     }
3787 
3788   /* Mark the pic registers as live outside of the function.  */
3789   if (flag_pic)
3790     {
3791       emit_use (cfun->machine->text_label_rtx);
3792       emit_use (cfun->machine->got_rtx);
3793     }
3794 
3795 done:
3796   if (!sibcall_p)
3797     {
3798       /* Emit the actual 'return' instruction.  */
3799       emit_jump_insn (gen__return ());
3800     }
3801   else
3802     {
3803       emit_use (gen_rtx_REG (Pmode, TILEPRO_LINK_REGNUM));
3804     }
3805 
3806   /* Mark all insns we just emitted as frame-related.  */
3807   for (; last_insn != NULL_RTX; last_insn = next_insn (last_insn))
3808     RTX_FRAME_RELATED_P (last_insn) = 1;
3809 }
3810 
3811 #undef ROUND_ROBIN_SIZE
3812 
3813 
3814 /* Implement INITIAL_ELIMINATION_OFFSET.  */
3815 int
tilepro_initial_elimination_offset(int from,int to)3816 tilepro_initial_elimination_offset (int from, int to)
3817 {
3818   int total_size = compute_total_frame_size ();
3819 
3820   if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
3821     {
3822       return (total_size - crtl->args.pretend_args_size
3823 	      - tilepro_saved_regs_size ());
3824     }
3825   else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
3826     {
3827       return -(crtl->args.pretend_args_size + tilepro_saved_regs_size ());
3828     }
3829   else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
3830     {
3831       return STACK_POINTER_OFFSET + total_size;
3832     }
3833   else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
3834     {
3835       return STACK_POINTER_OFFSET;
3836     }
3837   else
3838     gcc_unreachable ();
3839 }
3840 
3841 
3842 /* Return an RTX indicating where the return address to the
3843    calling function can be found.  */
3844 rtx
tilepro_return_addr(int count,rtx frame ATTRIBUTE_UNUSED)3845 tilepro_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
3846 {
3847   if (count != 0)
3848     return const0_rtx;
3849 
3850   return get_hard_reg_initial_val (Pmode, TILEPRO_LINK_REGNUM);
3851 }
3852 
3853 
3854 /* Implement EH_RETURN_HANDLER_RTX.  */
3855 rtx
tilepro_eh_return_handler_rtx(void)3856 tilepro_eh_return_handler_rtx (void)
3857 {
3858   /* The MEM needs to be volatile to prevent it from being
3859      deleted.  */
3860   rtx tmp = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
3861   MEM_VOLATILE_P (tmp) = true;
3862   return tmp;
3863 }
3864 
3865 
3866 
3867 /* Registers  */
3868 
3869 /* Implemnet TARGET_CONDITIONAL_REGISTER_USAGE.  */
3870 static void
tilepro_conditional_register_usage(void)3871 tilepro_conditional_register_usage (void)
3872 {
3873   global_regs[TILEPRO_NETORDER_REGNUM] = 1;
3874   /* TILEPRO_PIC_TEXT_LABEL_REGNUM is conditionally used.  It is a
3875      member of fixed_regs, and therefore must be member of
3876      call_used_regs, but it is not a member of call_really_used_regs[]
3877      because it is not clobbered by a call.  */
3878   if (TILEPRO_PIC_TEXT_LABEL_REGNUM != INVALID_REGNUM)
3879     {
3880       fixed_regs[TILEPRO_PIC_TEXT_LABEL_REGNUM] = 1;
3881       call_used_regs[TILEPRO_PIC_TEXT_LABEL_REGNUM] = 1;
3882     }
3883   if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3884     {
3885       fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
3886       call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
3887     }
3888 }
3889 
3890 
3891 /* Implement TARGET_FRAME_POINTER_REQUIRED.  */
3892 static bool
tilepro_frame_pointer_required(void)3893 tilepro_frame_pointer_required (void)
3894 {
3895   return crtl->calls_eh_return || cfun->calls_alloca;
3896 }
3897 
3898 
3899 
3900 /* Scheduling and reorg  */
3901 
3902 /* Return the length of INSN.  LENGTH is the initial length computed
3903    by attributes in the machine-description file.  This is where we
3904    account for bundles.  */
3905 int
tilepro_adjust_insn_length(rtx_insn * insn,int length)3906 tilepro_adjust_insn_length (rtx_insn *insn, int length)
3907 {
3908   machine_mode mode = GET_MODE (insn);
3909 
3910   /* A non-termininating instruction in a bundle has length 0.  */
3911   if (mode == SImode)
3912     return 0;
3913 
3914   /* By default, there is not length adjustment.  */
3915   return length;
3916 }
3917 
3918 
3919 /* Implement TARGET_SCHED_ISSUE_RATE.  */
3920 static int
tilepro_issue_rate(void)3921 tilepro_issue_rate (void)
3922 {
3923   return 3;
3924 }
3925 
3926 
3927 /* Return the rtx for the jump target.  */
3928 static rtx
get_jump_target(rtx branch)3929 get_jump_target (rtx branch)
3930 {
3931   if (CALL_P (branch))
3932     {
3933       rtx call;
3934       call = PATTERN (branch);
3935 
3936       if (GET_CODE (call) == PARALLEL)
3937 	call = XVECEXP (call, 0, 0);
3938 
3939       if (GET_CODE (call) == SET)
3940 	call = SET_SRC (call);
3941 
3942       if (GET_CODE (call) == CALL)
3943 	return XEXP (XEXP (call, 0), 0);
3944     }
3945   return 0;
3946 }
3947 
3948 /* Implement TARGET_SCHED_ADJUST_COST.  */
3949 static int
tilepro_sched_adjust_cost(rtx_insn * insn,rtx link,rtx_insn * dep_insn,int cost)3950 tilepro_sched_adjust_cost (rtx_insn *insn, rtx link, rtx_insn *dep_insn,
3951 			   int cost)
3952 {
3953   /* If we have a true dependence, INSN is a call, and DEP_INSN
3954      defines a register that is needed by the call (argument or stack
3955      pointer), set its latency to 0 so that it can be bundled with
3956      the call.  Explicitly check for and exclude the case when
3957      DEP_INSN defines the target of the jump.  */
3958   if (CALL_P (insn) && REG_NOTE_KIND (link) == REG_DEP_TRUE)
3959     {
3960       rtx target = get_jump_target (insn);
3961       if (!REG_P (target) || !set_of (target, dep_insn))
3962 	return 0;
3963     }
3964 
3965   return cost;
3966 }
3967 
3968 
3969 /* Skip over irrelevant NOTEs and such and look for the next insn we
3970    would consider bundling.  */
3971 static rtx_insn *
next_insn_to_bundle(rtx_insn * r,rtx_insn * end)3972 next_insn_to_bundle (rtx_insn *r, rtx_insn *end)
3973 {
3974   for (; r != end; r = NEXT_INSN (r))
3975     {
3976       if (NONDEBUG_INSN_P (r)
3977 	  && GET_CODE (PATTERN (r)) != USE
3978 	  && GET_CODE (PATTERN (r)) != CLOBBER)
3979 	return r;
3980     }
3981 
3982   return NULL;
3983 }
3984 
3985 
3986 /* Go through all insns, and use the information generated during
3987    scheduling to generate SEQUENCEs to represent bundles of
3988    instructions issued simultaneously.  */
3989 static void
tilepro_gen_bundles(void)3990 tilepro_gen_bundles (void)
3991 {
3992   basic_block bb;
3993   FOR_EACH_BB_FN (bb, cfun)
3994   {
3995     rtx_insn *insn, *next;
3996     rtx_insn *end = NEXT_INSN (BB_END (bb));
3997 
3998     for (insn = next_insn_to_bundle (BB_HEAD (bb), end); insn; insn = next)
3999       {
4000 	next = next_insn_to_bundle (NEXT_INSN (insn), end);
4001 
4002 	/* Never wrap {} around inline asm.  */
4003 	if (GET_CODE (PATTERN (insn)) != ASM_INPUT)
4004 	  {
4005 	    if (next == NULL_RTX || GET_MODE (next) == TImode
4006 		/* NOTE: The scheduler incorrectly believes a call
4007 		   insn can execute in the same cycle as the insn
4008 		   after the call.  This is of course impossible.
4009 		   Really we need to fix the scheduler somehow, so
4010 		   the code after the call gets scheduled
4011 		   optimally.  */
4012 		|| CALL_P (insn))
4013 	      {
4014 		/* Mark current insn as the end of a bundle.  */
4015 		PUT_MODE (insn, QImode);
4016 	      }
4017 	    else
4018 	      {
4019 		/* Mark it as part of a bundle.  */
4020 		PUT_MODE (insn, SImode);
4021 	      }
4022 	  }
4023       }
4024   }
4025 }
4026 
4027 
4028 /* Helper function for tilepro_fixup_pcrel_references.  */
4029 static void
replace_pc_relative_symbol_ref(rtx_insn * insn,rtx opnds[4],bool first_insn_p)4030 replace_pc_relative_symbol_ref (rtx_insn *insn, rtx opnds[4], bool first_insn_p)
4031 {
4032   rtx_insn *new_insns;
4033 
4034   start_sequence ();
4035 
4036   if (flag_pic == 1)
4037     {
4038       if (!first_insn_p)
4039 	{
4040 	  emit_insn (gen_add_got16 (opnds[0], tilepro_got_rtx (),
4041 				    opnds[2]));
4042 	  emit_insn (gen_insn_lw (opnds[0], opnds[0]));
4043 	}
4044     }
4045   else
4046     {
4047       if (first_insn_p)
4048 	{
4049 	  emit_insn (gen_addhi_got32 (opnds[0], tilepro_got_rtx (),
4050 				      opnds[2]));
4051 	}
4052       else
4053 	{
4054 	  emit_insn (gen_addlo_got32 (opnds[0], opnds[1], opnds[2]));
4055 	  emit_insn (gen_insn_lw (opnds[0], opnds[0]));
4056 	}
4057     }
4058 
4059   new_insns = get_insns ();
4060   end_sequence ();
4061 
4062   if (new_insns)
4063     emit_insn_before (new_insns, insn);
4064 
4065   delete_insn (insn);
4066 }
4067 
4068 
4069 /* Returns whether INSN is a pc-relative addli insn.   */
4070 static bool
match_addli_pcrel(rtx_insn * insn)4071 match_addli_pcrel (rtx_insn *insn)
4072 {
4073   rtx pattern = PATTERN (insn);
4074   rtx unspec;
4075 
4076   if (GET_CODE (pattern) != SET)
4077     return false;
4078 
4079   if (GET_CODE (SET_SRC (pattern)) != LO_SUM)
4080     return false;
4081 
4082   if (GET_CODE (XEXP (SET_SRC (pattern), 1)) != CONST)
4083     return false;
4084 
4085   unspec = XEXP (XEXP (SET_SRC (pattern), 1), 0);
4086 
4087   return (GET_CODE (unspec) == UNSPEC
4088 	  && XINT (unspec, 1) == UNSPEC_PCREL_SYM);
4089 }
4090 
4091 
4092 /* Helper function for tilepro_fixup_pcrel_references.  */
4093 static void
replace_addli_pcrel(rtx_insn * insn)4094 replace_addli_pcrel (rtx_insn *insn)
4095 {
4096   rtx pattern = PATTERN (insn);
4097   rtx set_src;
4098   rtx unspec;
4099   rtx opnds[4];
4100   bool first_insn_p;
4101 
4102   gcc_assert (GET_CODE (pattern) == SET);
4103   opnds[0] = SET_DEST (pattern);
4104 
4105   set_src = SET_SRC (pattern);
4106   gcc_assert (GET_CODE (set_src) == LO_SUM);
4107   gcc_assert (GET_CODE (XEXP (set_src, 1)) == CONST);
4108   opnds[1] = XEXP (set_src, 0);
4109 
4110   unspec = XEXP (XEXP (set_src, 1), 0);
4111   gcc_assert (GET_CODE (unspec) == UNSPEC);
4112   gcc_assert (XINT (unspec, 1) == UNSPEC_PCREL_SYM);
4113   opnds[2] = XVECEXP (unspec, 0, 0);
4114   opnds[3] = XVECEXP (unspec, 0, 1);
4115 
4116   /* We only need to replace SYMBOL_REFs, not LABEL_REFs.  */
4117   if (GET_CODE (opnds[2]) != SYMBOL_REF)
4118     return;
4119 
4120   first_insn_p = (opnds[1] == tilepro_text_label_rtx ());
4121 
4122   replace_pc_relative_symbol_ref (insn, opnds, first_insn_p);
4123 }
4124 
4125 
4126 /* Returns whether INSN is a pc-relative auli insn.   */
4127 static bool
match_auli_pcrel(rtx_insn * insn)4128 match_auli_pcrel (rtx_insn *insn)
4129 {
4130   rtx pattern = PATTERN (insn);
4131   rtx high;
4132   rtx unspec;
4133 
4134   if (GET_CODE (pattern) != SET)
4135     return false;
4136 
4137   if (GET_CODE (SET_SRC (pattern)) != PLUS)
4138     return false;
4139 
4140   high = XEXP (SET_SRC (pattern), 1);
4141 
4142   if (GET_CODE (high) != HIGH
4143       || GET_CODE (XEXP (high, 0)) != CONST)
4144     return false;
4145 
4146   unspec = XEXP (XEXP (high, 0), 0);
4147 
4148   return (GET_CODE (unspec) == UNSPEC
4149 	  && XINT (unspec, 1) == UNSPEC_PCREL_SYM);
4150 }
4151 
4152 
4153 /* Helper function for tilepro_fixup_pcrel_references.  */
4154 static void
replace_auli_pcrel(rtx_insn * insn)4155 replace_auli_pcrel (rtx_insn *insn)
4156 {
4157   rtx pattern = PATTERN (insn);
4158   rtx set_src;
4159   rtx high;
4160   rtx unspec;
4161   rtx opnds[4];
4162   bool first_insn_p;
4163 
4164   gcc_assert (GET_CODE (pattern) == SET);
4165   opnds[0] = SET_DEST (pattern);
4166 
4167   set_src = SET_SRC (pattern);
4168   gcc_assert (GET_CODE (set_src) == PLUS);
4169   opnds[1] = XEXP (set_src, 0);
4170 
4171   high = XEXP (set_src, 1);
4172   gcc_assert (GET_CODE (high) == HIGH);
4173   gcc_assert (GET_CODE (XEXP (high, 0)) == CONST);
4174 
4175   unspec = XEXP (XEXP (high, 0), 0);
4176   gcc_assert (GET_CODE (unspec) == UNSPEC);
4177   gcc_assert (XINT (unspec, 1) == UNSPEC_PCREL_SYM);
4178   opnds[2] = XVECEXP (unspec, 0, 0);
4179   opnds[3] = XVECEXP (unspec, 0, 1);
4180 
4181   /* We only need to replace SYMBOL_REFs, not LABEL_REFs.  */
4182   if (GET_CODE (opnds[2]) != SYMBOL_REF)
4183     return;
4184 
4185   first_insn_p = (opnds[1] == tilepro_text_label_rtx ());
4186 
4187   replace_pc_relative_symbol_ref (insn, opnds, first_insn_p);
4188 }
4189 
4190 
4191 /* We generate PC relative SYMBOL_REFs as an optimization, to avoid
4192    going through the GOT when the symbol is local to the compilation
4193    unit.  But such a symbol requires that the common text_label that
4194    we generate at the beginning of the function be in the same section
4195    as the reference to the SYMBOL_REF.  This may not be true if we
4196    generate hot/cold sections.  This function looks for such cases and
4197    replaces such references with the longer sequence going through the
4198    GOT.
4199 
4200    We expect one of the following two instruction sequences:
4201    addli tmp1, txt_label_reg, lo16(sym - txt_label)
4202    auli  tmp2,          tmp1, ha16(sym - txt_label)
4203 
4204    auli  tmp1, txt_label_reg, ha16(sym - txt_label)
4205    addli tmp2,          tmp1, lo16(sym - txt_label)
4206 
4207    If we're compiling -fpic, we replace the first instruction with
4208    nothing, and the second instruction with:
4209 
4210    addli tmp2, got_rtx, got(sym)
4211    lw    tmp2,    tmp2
4212 
4213    If we're compiling -fPIC, we replace the first instruction with:
4214 
4215    auli  tmp1, got_rtx, got_ha16(sym)
4216 
4217    and the second instruction with:
4218 
4219    addli tmp2,    tmp1, got_lo16(sym)
4220    lw    tmp2,    tmp2
4221 
4222    Note that we're careful to disturb the instruction sequence as
4223    little as possible, since it's very late in the compilation
4224    process.
4225 */
4226 static void
tilepro_fixup_pcrel_references(void)4227 tilepro_fixup_pcrel_references (void)
4228 {
4229   rtx_insn *insn, *next_insn;
4230   bool same_section_as_entry = true;
4231 
4232   for (insn = get_insns (); insn; insn = next_insn)
4233     {
4234       next_insn = NEXT_INSN (insn);
4235 
4236       if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
4237 	{
4238 	  same_section_as_entry = !same_section_as_entry;
4239 	  continue;
4240 	}
4241 
4242       if (same_section_as_entry)
4243       	continue;
4244 
4245       if (!(INSN_P (insn)
4246 	    && GET_CODE (PATTERN (insn)) != USE
4247 	    && GET_CODE (PATTERN (insn)) != CLOBBER))
4248 	continue;
4249 
4250       if (match_addli_pcrel (insn))
4251 	replace_addli_pcrel (insn);
4252       else if (match_auli_pcrel (insn))
4253 	replace_auli_pcrel (insn);
4254     }
4255 }
4256 
4257 
4258 /* Ensure that no var tracking notes are emitted in the middle of a
4259    three-instruction bundle.  */
4260 static void
reorder_var_tracking_notes(void)4261 reorder_var_tracking_notes (void)
4262 {
4263   basic_block bb;
4264   FOR_EACH_BB_FN (bb, cfun)
4265   {
4266     rtx_insn *insn, *next;
4267     rtx_insn *queue = NULL;
4268     bool in_bundle = false;
4269 
4270     for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4271       {
4272 	next = NEXT_INSN (insn);
4273 
4274 	if (INSN_P (insn))
4275 	  {
4276 	    /* Emit queued up notes at the last instruction of a bundle.  */
4277 	    if (GET_MODE (insn) == QImode)
4278 	      {
4279 		while (queue)
4280 		  {
4281 		    rtx_insn *next_queue = PREV_INSN (queue);
4282 		    SET_PREV_INSN (NEXT_INSN (insn)) = queue;
4283 		    SET_NEXT_INSN (queue) = NEXT_INSN (insn);
4284 		    SET_NEXT_INSN (insn) = queue;
4285 		    SET_PREV_INSN (queue) = insn;
4286 		    queue = next_queue;
4287 		  }
4288 		in_bundle = false;
4289 	      }
4290 	    else if (GET_MODE (insn) == SImode)
4291 	      in_bundle = true;
4292 	  }
4293 	else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4294 	  {
4295 	    if (in_bundle)
4296 	      {
4297 		rtx_insn *prev = PREV_INSN (insn);
4298 		SET_PREV_INSN (next) = prev;
4299 		SET_NEXT_INSN (prev) = next;
4300 
4301 		SET_PREV_INSN (insn) = queue;
4302 		queue = insn;
4303 	      }
4304 	  }
4305       }
4306   }
4307 }
4308 
4309 
4310 /* Perform machine dependent operations on the rtl chain INSNS.  */
4311 static void
tilepro_reorg(void)4312 tilepro_reorg (void)
4313 {
4314   /* We are freeing block_for_insn in the toplev to keep compatibility
4315      with old MDEP_REORGS that are not CFG based.  Recompute it
4316      now.  */
4317   compute_bb_for_insn ();
4318 
4319   if (flag_reorder_blocks_and_partition)
4320     {
4321       tilepro_fixup_pcrel_references ();
4322     }
4323 
4324   if (flag_schedule_insns_after_reload)
4325     {
4326       split_all_insns ();
4327 
4328       timevar_push (TV_SCHED2);
4329       schedule_insns ();
4330       timevar_pop (TV_SCHED2);
4331 
4332       /* Examine the schedule to group into bundles.  */
4333       tilepro_gen_bundles ();
4334     }
4335 
4336   df_analyze ();
4337 
4338   if (flag_var_tracking)
4339     {
4340       timevar_push (TV_VAR_TRACKING);
4341       variable_tracking_main ();
4342       reorder_var_tracking_notes ();
4343       timevar_pop (TV_VAR_TRACKING);
4344     }
4345 
4346   df_finish_pass (false);
4347 }
4348 
4349 
4350 
4351 /* Assembly  */
4352 
4353 /* Select a format to encode pointers in exception handling data.
4354    CODE is 0 for data, 1 for code labels, 2 for function pointers.
4355    GLOBAL is true if the symbol may be affected by dynamic
4356    relocations.  */
4357 int
tilepro_asm_preferred_eh_data_format(int code ATTRIBUTE_UNUSED,int global)4358 tilepro_asm_preferred_eh_data_format (int code ATTRIBUTE_UNUSED, int global)
4359 {
4360   return (global ? DW_EH_PE_indirect : 0) | DW_EH_PE_pcrel | DW_EH_PE_sdata4;
4361 }
4362 
4363 
4364 /* Implement TARGET_ASM_OUTPUT_MI_THUNK.  */
4365 static void
tilepro_asm_output_mi_thunk(FILE * file,tree thunk_fndecl ATTRIBUTE_UNUSED,HOST_WIDE_INT delta,HOST_WIDE_INT vcall_offset,tree function)4366 tilepro_asm_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
4367 			     HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
4368 			     tree function)
4369 {
4370   rtx this_rtx, funexp;
4371   rtx_insn *insn;
4372 
4373   /* Pretend to be a post-reload pass while generating rtl.  */
4374   reload_completed = 1;
4375 
4376   /* Mark the end of the (empty) prologue.  */
4377   emit_note (NOTE_INSN_PROLOGUE_END);
4378 
4379   /* Find the "this" pointer.  If the function returns a structure,
4380      the structure return pointer is in $1.  */
4381   if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
4382     this_rtx = gen_rtx_REG (Pmode, 1);
4383   else
4384     this_rtx = gen_rtx_REG (Pmode, 0);
4385 
4386   /* Add DELTA to THIS_RTX.  */
4387   emit_insn (gen_addsi3 (this_rtx, this_rtx, GEN_INT (delta)));
4388 
4389   /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX.  */
4390   if (vcall_offset)
4391     {
4392       rtx tmp;
4393 
4394       tmp = gen_rtx_REG (Pmode, 29);
4395       emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
4396 
4397       emit_insn (gen_addsi3 (tmp, tmp, GEN_INT (vcall_offset)));
4398 
4399       emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
4400 
4401       emit_insn (gen_addsi3 (this_rtx, this_rtx, tmp));
4402     }
4403 
4404   /* Generate a tail call to the target function.  */
4405   if (!TREE_USED (function))
4406     {
4407       assemble_external (function);
4408       TREE_USED (function) = 1;
4409     }
4410   funexp = XEXP (DECL_RTL (function), 0);
4411   funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
4412   insn = emit_call_insn (gen_sibcall (funexp, const0_rtx));
4413   SIBLING_CALL_P (insn) = 1;
4414 
4415   /* Run just enough of rest_of_compilation to get the insns emitted.
4416      There's not really enough bulk here to make other passes such as
4417      instruction scheduling worth while.  Note that use_thunk calls
4418      assemble_start_function and assemble_end_function.
4419 
4420      We don't currently bundle, but the instruciton sequence is all
4421      serial except for the tail call, so we're only wasting one cycle.
4422    */
4423   insn = get_insns ();
4424   shorten_branches (insn);
4425   final_start_function (insn, file, 1);
4426   final (insn, file, 1);
4427   final_end_function ();
4428 
4429   /* Stop pretending to be a post-reload pass.  */
4430   reload_completed = 0;
4431 }
4432 
4433 
4434 /* Implement TARGET_ASM_TRAMPOLINE_TEMPLATE.  */
4435 static void
tilepro_asm_trampoline_template(FILE * file)4436 tilepro_asm_trampoline_template (FILE *file)
4437 {
4438   fprintf (file, "\tlnk   r10\n");
4439   fprintf (file, "\taddi  r10, r10, 32\n");
4440   fprintf (file, "\tlwadd r11, r10, %d\n", GET_MODE_SIZE (ptr_mode));
4441   fprintf (file, "\tlw    r10, r10\n");
4442   fprintf (file, "\tjr    r11\n");
4443   fprintf (file, "\t.word 0 # <function address>\n");
4444   fprintf (file, "\t.word 0 # <static chain value>\n");
4445 }
4446 
4447 
4448 /* Implement TARGET_TRAMPOLINE_INIT.  */
4449 static void
tilepro_trampoline_init(rtx m_tramp,tree fndecl,rtx static_chain)4450 tilepro_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4451 {
4452   rtx fnaddr, chaddr;
4453   rtx mem;
4454   rtx begin_addr, end_addr;
4455   int ptr_mode_size = GET_MODE_SIZE (ptr_mode);
4456 
4457   fnaddr = copy_to_reg (XEXP (DECL_RTL (fndecl), 0));
4458   chaddr = copy_to_reg (static_chain);
4459 
4460   emit_block_move (m_tramp, assemble_trampoline_template (),
4461 		   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
4462 
4463   mem = adjust_address (m_tramp, ptr_mode,
4464 			TRAMPOLINE_SIZE - 2 * ptr_mode_size);
4465   emit_move_insn (mem, fnaddr);
4466   mem = adjust_address (m_tramp, ptr_mode,
4467 			TRAMPOLINE_SIZE - ptr_mode_size);
4468   emit_move_insn (mem, chaddr);
4469 
4470   /* Get pointers to the beginning and end of the code block.  */
4471   begin_addr = force_reg (Pmode, XEXP (m_tramp, 0));
4472   end_addr = force_reg (Pmode, plus_constant (Pmode, XEXP (m_tramp, 0),
4473 					      TRAMPOLINE_SIZE));
4474 
4475   emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__clear_cache"),
4476 		     LCT_NORMAL, VOIDmode, 2, begin_addr, Pmode,
4477 		     end_addr, Pmode);
4478 }
4479 
4480 
4481 /* Implement TARGET_PRINT_OPERAND.  */
4482 static void
tilepro_print_operand(FILE * file,rtx x,int code)4483 tilepro_print_operand (FILE *file, rtx x, int code)
4484 {
4485   switch (code)
4486     {
4487     case 'c':
4488       /* Print the compare operator opcode for conditional moves. */
4489       switch (GET_CODE (x))
4490 	{
4491 	case EQ:
4492 	  fputs ("z", file);
4493 	  break;
4494 	case NE:
4495 	  fputs ("nz", file);
4496 	  break;
4497 	default:
4498 	  output_operand_lossage ("invalid %%c operand");
4499 	}
4500       return;
4501 
4502     case 'C':
4503       /* Print the compare operator opcode for conditional moves. */
4504       switch (GET_CODE (x))
4505 	{
4506 	case EQ:
4507 	  fputs ("nz", file);
4508 	  break;
4509 	case NE:
4510 	  fputs ("z", file);
4511 	  break;
4512 	default:
4513 	  output_operand_lossage ("invalid %%C operand");
4514 	}
4515       return;
4516 
4517     case 'h':
4518       {
4519 	/* Print the high 16 bits of a 32-bit constant.  */
4520 	HOST_WIDE_INT i;
4521 	if (CONST_INT_P (x))
4522 	  i = INTVAL (x);
4523 	else if (GET_CODE (x) == CONST_DOUBLE)
4524 	  i = CONST_DOUBLE_LOW (x);
4525 	else
4526 	  {
4527 	    output_operand_lossage ("invalid %%h operand");
4528 	    return;
4529 	  }
4530 	i = trunc_int_for_mode (i >> 16, HImode);
4531 	fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
4532 	return;
4533       }
4534 
4535     case 'H':
4536       {
4537 	rtx addr = NULL;
4538 	const char *opstr = NULL;
4539 	bool pcrel = false;
4540 	if (GET_CODE (x) == CONST
4541 	    && GET_CODE (XEXP (x, 0)) == UNSPEC)
4542 	  {
4543 	    addr = XVECEXP (XEXP (x, 0), 0, 0);
4544 	    switch (XINT (XEXP (x, 0), 1))
4545 	    {
4546 	    case UNSPEC_GOT32_SYM:
4547 	      opstr = "got_ha16";
4548 	      break;
4549 	    case UNSPEC_PCREL_SYM:
4550 	      opstr = "ha16";
4551 	      pcrel = true;
4552 	      break;
4553 	    case UNSPEC_TLS_GD:
4554 	      opstr = "tls_gd_ha16";
4555 	      break;
4556 	    case UNSPEC_TLS_IE:
4557 	      opstr = "tls_ie_ha16";
4558 	      break;
4559 	    case UNSPEC_TLS_LE:
4560 	      opstr = "tls_le_ha16";
4561 	      break;
4562 	    default:
4563 	      output_operand_lossage ("invalid %%H operand");
4564 	    }
4565 	  }
4566 	else
4567 	  {
4568 	    addr = x;
4569 	    opstr = "ha16";
4570 	  }
4571 
4572 	fputs (opstr, file);
4573 	fputc ('(', file);
4574 	output_addr_const (file, addr);
4575 
4576 	if (pcrel)
4577 	  {
4578 	    rtx addr2 = XVECEXP (XEXP (x, 0), 0, 1);
4579 	    fputs (" - " , file);
4580 	    output_addr_const (file, addr2);
4581 	  }
4582 
4583 	fputc (')', file);
4584 	return;
4585       }
4586 
4587     case 'I':
4588       /* Print an auto-inc memory operand.  */
4589       if (!MEM_P (x))
4590 	{
4591 	  output_operand_lossage ("invalid %%I operand");
4592 	  return;
4593 	}
4594 
4595       output_memory_autoinc_first = true;
4596       output_address (GET_MODE (x), XEXP (x, 0));
4597       return;
4598 
4599     case 'i':
4600       /* Print an auto-inc memory operand.  */
4601       if (!MEM_P (x))
4602 	{
4603 	  output_operand_lossage ("invalid %%i operand");
4604 	  return;
4605 	}
4606 
4607       output_memory_autoinc_first = false;
4608       output_address (GET_MODE (x), XEXP (x, 0));
4609       return;
4610 
4611     case 'j':
4612       {
4613 	/* Print the low 8 bits of a constant.  */
4614 	HOST_WIDE_INT i;
4615 	if (CONST_INT_P (x))
4616 	  i = INTVAL (x);
4617 	else if (GET_CODE (x) == CONST_DOUBLE)
4618 	  i = CONST_DOUBLE_LOW (x);
4619 	else if (GET_CODE (x) == CONST_VECTOR
4620 		 && CONST_INT_P (CONST_VECTOR_ELT (x, 0)))
4621 	  i = INTVAL (CONST_VECTOR_ELT (x, 0));
4622 	else
4623 	  {
4624 	    output_operand_lossage ("invalid %%j operand");
4625 	    return;
4626 	  }
4627 	i = trunc_int_for_mode (i, QImode);
4628 	fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
4629 	return;
4630       }
4631 
4632     case 'L':
4633       {
4634 	rtx addr = NULL;
4635 	const char *opstr = NULL;
4636 	bool pcrel = false;
4637 	if (GET_CODE (x) == CONST
4638 	    && GET_CODE (XEXP (x, 0)) == UNSPEC)
4639 	  {
4640 	    addr = XVECEXP (XEXP (x, 0), 0, 0);
4641 	    switch (XINT (XEXP (x, 0), 1))
4642 	    {
4643 	    case UNSPEC_GOT16_SYM:
4644 	      opstr = "got";
4645 	      break;
4646 	    case UNSPEC_GOT32_SYM:
4647 	      opstr = "got_lo16";
4648 	      break;
4649 	    case UNSPEC_PCREL_SYM:
4650 	      opstr = "lo16";
4651 	      pcrel = true;
4652 	      break;
4653 	    case UNSPEC_TLS_GD:
4654 	      opstr = "tls_gd_lo16";
4655 	      break;
4656 	    case UNSPEC_TLS_IE:
4657 	      opstr = "tls_ie_lo16";
4658 	      break;
4659 	    case UNSPEC_TLS_LE:
4660 	      opstr = "tls_le_lo16";
4661 	      break;
4662 	    default:
4663 	      output_operand_lossage ("invalid %%L operand");
4664 	    }
4665 	  }
4666 	else
4667 	  {
4668 	    addr = x;
4669 	    opstr = "lo16";
4670 	  }
4671 
4672 	fputs (opstr, file);
4673 	fputc ('(', file);
4674 	output_addr_const (file, addr);
4675 
4676 	if (pcrel)
4677 	  {
4678 	    rtx addr2 = XVECEXP (XEXP (x, 0), 0, 1);
4679 	    fputs (" - " , file);
4680 	    output_addr_const (file, addr2);
4681 	  }
4682 
4683 	fputc (')', file);
4684 	return;
4685       }
4686 
4687     case 'p':
4688       if (GET_CODE (x) == SYMBOL_REF)
4689 	{
4690 	  if (flag_pic && !SYMBOL_REF_LOCAL_P (x))
4691 	    fprintf (file, "plt(");
4692 	  output_addr_const (file, x);
4693 	  if (flag_pic && !SYMBOL_REF_LOCAL_P (x))
4694 	    fprintf (file, ")");
4695 	}
4696       else
4697 	output_addr_const (file, x);
4698       return;
4699 
4700     case 'P':
4701       {
4702 	/* Print a 32-bit constant plus one.  */
4703 	HOST_WIDE_INT i;
4704 	if (!CONST_INT_P (x))
4705 	  {
4706 	    output_operand_lossage ("invalid %%P operand");
4707 	    return;
4708 	  }
4709 	i = trunc_int_for_mode (INTVAL (x) + 1, SImode);
4710 	fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
4711 	return;
4712       }
4713 
4714     case 'M':
4715       {
4716 	/* Print an mm-style bit range.  */
4717 	int first_bit, last_bit;
4718 
4719 	if (!CONST_INT_P (x)
4720 	    || !tilepro_bitfield_operand_p (INTVAL (x), &first_bit,
4721 					    &last_bit))
4722 	  {
4723 	    output_operand_lossage ("invalid %%M operand");
4724 	    return;
4725 	  }
4726 
4727 	fprintf (file, "%d, %d", first_bit, last_bit);
4728 	return;
4729       }
4730 
4731     case 'N':
4732       {
4733 	const char *reg = NULL;
4734 
4735 	/* Print a network register.  */
4736 	if (!CONST_INT_P (x))
4737 	  {
4738 	    output_operand_lossage ("invalid %%N operand");
4739 	    return;
4740 	  }
4741 
4742 	switch (INTVAL (x))
4743 	  {
4744 	  case TILEPRO_NETREG_IDN0: reg = "idn0"; break;
4745 	  case TILEPRO_NETREG_IDN1: reg = "idn1"; break;
4746 	  case TILEPRO_NETREG_SN:   reg = "sn";   break;
4747 	  case TILEPRO_NETREG_UDN0: reg = "udn0"; break;
4748 	  case TILEPRO_NETREG_UDN1: reg = "udn1"; break;
4749 	  case TILEPRO_NETREG_UDN2: reg = "udn2"; break;
4750 	  case TILEPRO_NETREG_UDN3: reg = "udn3"; break;
4751 	  default: gcc_unreachable ();
4752 	  }
4753 
4754 	fprintf (file, reg);
4755 	return;
4756       }
4757 
4758     case 't':
4759       {
4760 	/* Log base 2 of a power of two.  */
4761 	HOST_WIDE_INT i;
4762 	HOST_WIDE_INT n;
4763 
4764 	if (!CONST_INT_P (x))
4765 	  {
4766 	    output_operand_lossage ("invalid %%t operand");
4767 	    return;
4768 	  }
4769 	n = trunc_int_for_mode (INTVAL (x), SImode);
4770 	i = exact_log2 (n);
4771 	if (i < 0)
4772 	  {
4773 	    output_operand_lossage ("invalid %%t operand '"
4774 				    HOST_WIDE_INT_PRINT_DEC "'", n);
4775 	    return;
4776 	  }
4777 
4778 	fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
4779 	return;
4780       }
4781       break;
4782 
4783     case 'r':
4784       /* In this case we need a register.  Use 'zero' if the
4785          operand is const0_rtx.  */
4786       if (x == const0_rtx
4787 	  || (GET_MODE (x) != VOIDmode && x == CONST0_RTX (GET_MODE (x))))
4788 	{
4789 	  fputs ("zero", file);
4790 	  return;
4791 	}
4792       else if (!REG_P (x))
4793 	{
4794 	  output_operand_lossage ("invalid %%r operand");
4795 	  return;
4796 	}
4797       /* FALLTHRU */
4798 
4799     case 0:
4800       if (REG_P (x))
4801 	{
4802 	  fprintf (file, "%s", reg_names[REGNO (x)]);
4803 	  return;
4804 	}
4805       else if (MEM_P (x))
4806 	{
4807 	  output_address (VOIDmode, XEXP (x, 0));
4808 	  return;
4809 	}
4810       else
4811 	{
4812 	  output_addr_const (file, x);
4813 	  return;
4814 	}
4815       break;
4816     }
4817 
4818   debug_rtx (x);
4819   output_operand_lossage ("unable to print out operand yet; code == %d (%c)",
4820 			  code, code);
4821 }
4822 
4823 
4824 /* Implement TARGET_PRINT_OPERAND_ADDRESS.  */
4825 static void
tilepro_print_operand_address(FILE * file,machine_mode mode,rtx addr)4826 tilepro_print_operand_address (FILE *file, machine_mode mode, rtx addr)
4827 {
4828   if (GET_CODE (addr) == POST_DEC
4829       || GET_CODE (addr) == POST_INC)
4830     {
4831       int offset = GET_MODE_SIZE (mode);
4832 
4833       gcc_assert (mode != VOIDmode);
4834 
4835       if (output_memory_autoinc_first)
4836 	fprintf (file, "%s", reg_names[REGNO (XEXP (addr, 0))]);
4837       else
4838 	fprintf (file, "%d",
4839 		 GET_CODE (addr) == POST_DEC ? -offset : offset);
4840     }
4841   else if (GET_CODE (addr) == POST_MODIFY)
4842     {
4843       gcc_assert (mode != VOIDmode);
4844 
4845       gcc_assert (GET_CODE (XEXP (addr, 1)) == PLUS);
4846 
4847       if (output_memory_autoinc_first)
4848 	fprintf (file, "%s", reg_names[REGNO (XEXP (addr, 0))]);
4849       else
4850 	fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4851 		 INTVAL (XEXP (XEXP (addr, 1), 1)));
4852     }
4853   else
4854     tilepro_print_operand (file, addr, 'r');
4855 }
4856 
4857 
4858 /* Machine mode of current insn, for determining curly brace
4859    placement.  */
4860 static machine_mode insn_mode;
4861 
4862 
4863 /* Implement FINAL_PRESCAN_INSN.  This is used to emit bundles.  */
4864 void
tilepro_final_prescan_insn(rtx_insn * insn)4865 tilepro_final_prescan_insn (rtx_insn *insn)
4866 {
4867   /* Record this for tilepro_asm_output_opcode to examine.  */
4868   insn_mode = GET_MODE (insn);
4869 }
4870 
4871 
4872 /* While emitting asm, are we currently inside '{' for a bundle? */
4873 static bool tilepro_in_bundle = false;
4874 
4875 /* Implement ASM_OUTPUT_OPCODE.  Prepend/append curly braces as
4876    appropriate given the bundling information recorded by
4877    tilepro_gen_bundles.  */
4878 const char *
tilepro_asm_output_opcode(FILE * stream,const char * code)4879 tilepro_asm_output_opcode (FILE *stream, const char *code)
4880 {
4881   bool pseudo = !strcmp (code, "pseudo");
4882 
4883   if (!tilepro_in_bundle && insn_mode == SImode)
4884     {
4885       /* Start a new bundle.  */
4886       fprintf (stream, "{\n\t");
4887       tilepro_in_bundle = true;
4888     }
4889 
4890   if (tilepro_in_bundle && insn_mode == QImode)
4891     {
4892       /* Close an existing bundle.  */
4893       static char buf[100];
4894 
4895       gcc_assert (strlen (code) + 3 + 1 < sizeof (buf));
4896 
4897       strcpy (buf, pseudo ? "" : code);
4898       strcat (buf, "\n\t}");
4899       tilepro_in_bundle = false;
4900 
4901       return buf;
4902     }
4903   else
4904     {
4905       return pseudo ? "" : code;
4906     }
4907 }
4908 
4909 
4910 /* Output assembler code to FILE to increment profiler label # LABELNO
4911    for profiling a function entry.  */
4912 void
tilepro_function_profiler(FILE * file,int labelno ATTRIBUTE_UNUSED)4913 tilepro_function_profiler (FILE *file, int labelno ATTRIBUTE_UNUSED)
4914 {
4915   if (tilepro_in_bundle)
4916     {
4917       fprintf (file, "\t}\n");
4918     }
4919 
4920   if (flag_pic)
4921     {
4922       fprintf (file,
4923 	       "\t{\n"
4924 	       "\tmove\tr10, lr\n"
4925 	       "\tjal\tplt(%s)\n"
4926 	       "\t}\n", MCOUNT_NAME);
4927     }
4928   else
4929     {
4930       fprintf (file,
4931 	       "\t{\n"
4932 	       "\tmove\tr10, lr\n"
4933 	       "\tjal\t%s\n"
4934 	       "\t}\n", MCOUNT_NAME);
4935     }
4936 
4937   tilepro_in_bundle = false;
4938 }
4939 
4940 
4941 /* Implement TARGET_ASM_FILE_END.  */
4942 static void
tilepro_file_end(void)4943 tilepro_file_end (void)
4944 {
4945   if (NEED_INDICATE_EXEC_STACK)
4946     file_end_indicate_exec_stack ();
4947 }
4948 
4949 
4950 #undef  TARGET_HAVE_TLS
4951 #define TARGET_HAVE_TLS HAVE_AS_TLS
4952 
4953 #undef  TARGET_OPTION_OVERRIDE
4954 #define TARGET_OPTION_OVERRIDE tilepro_option_override
4955 
4956 #undef  TARGET_SCALAR_MODE_SUPPORTED_P
4957 #define TARGET_SCALAR_MODE_SUPPORTED_P tilepro_scalar_mode_supported_p
4958 
4959 #undef  TARGET_VECTOR_MODE_SUPPORTED_P
4960 #define TARGET_VECTOR_MODE_SUPPORTED_P tile_vector_mode_supported_p
4961 
4962 #undef  TARGET_CANNOT_FORCE_CONST_MEM
4963 #define TARGET_CANNOT_FORCE_CONST_MEM tilepro_cannot_force_const_mem
4964 
4965 #undef  TARGET_FUNCTION_OK_FOR_SIBCALL
4966 #define TARGET_FUNCTION_OK_FOR_SIBCALL tilepro_function_ok_for_sibcall
4967 
4968 #undef  TARGET_PASS_BY_REFERENCE
4969 #define TARGET_PASS_BY_REFERENCE tilepro_pass_by_reference
4970 
4971 #undef  TARGET_RETURN_IN_MEMORY
4972 #define TARGET_RETURN_IN_MEMORY tilepro_return_in_memory
4973 
4974 #undef  TARGET_FUNCTION_ARG_BOUNDARY
4975 #define TARGET_FUNCTION_ARG_BOUNDARY tilepro_function_arg_boundary
4976 
4977 #undef  TARGET_FUNCTION_ARG
4978 #define TARGET_FUNCTION_ARG tilepro_function_arg
4979 
4980 #undef  TARGET_FUNCTION_ARG_ADVANCE
4981 #define TARGET_FUNCTION_ARG_ADVANCE tilepro_function_arg_advance
4982 
4983 #undef  TARGET_FUNCTION_VALUE
4984 #define TARGET_FUNCTION_VALUE tilepro_function_value
4985 
4986 #undef  TARGET_LIBCALL_VALUE
4987 #define TARGET_LIBCALL_VALUE tilepro_libcall_value
4988 
4989 #undef  TARGET_FUNCTION_VALUE_REGNO_P
4990 #define TARGET_FUNCTION_VALUE_REGNO_P tilepro_function_value_regno_p
4991 
4992 #undef  TARGET_PROMOTE_FUNCTION_MODE
4993 #define TARGET_PROMOTE_FUNCTION_MODE \
4994   default_promote_function_mode_always_promote
4995 
4996 #undef  TARGET_PROMOTE_PROTOTYPES
4997 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false
4998 
4999 #undef  TARGET_BUILD_BUILTIN_VA_LIST
5000 #define TARGET_BUILD_BUILTIN_VA_LIST tilepro_build_builtin_va_list
5001 
5002 #undef  TARGET_EXPAND_BUILTIN_VA_START
5003 #define TARGET_EXPAND_BUILTIN_VA_START tilepro_va_start
5004 
5005 #undef  TARGET_SETUP_INCOMING_VARARGS
5006 #define TARGET_SETUP_INCOMING_VARARGS tilepro_setup_incoming_varargs
5007 
5008 #undef  TARGET_GIMPLIFY_VA_ARG_EXPR
5009 #define TARGET_GIMPLIFY_VA_ARG_EXPR tilepro_gimplify_va_arg_expr
5010 
5011 #undef  TARGET_RTX_COSTS
5012 #define TARGET_RTX_COSTS tilepro_rtx_costs
5013 
5014 /* Limit to what we can reach in one addli.  */
5015 #undef  TARGET_MIN_ANCHOR_OFFSET
5016 #define TARGET_MIN_ANCHOR_OFFSET -32768
5017 #undef  TARGET_MAX_ANCHOR_OFFSET
5018 #define TARGET_MAX_ANCHOR_OFFSET 32767
5019 
5020 #undef  TARGET_LEGITIMATE_CONSTANT_P
5021 #define TARGET_LEGITIMATE_CONSTANT_P tilepro_legitimate_constant_p
5022 
5023 #undef  TARGET_LEGITIMATE_ADDRESS_P
5024 #define TARGET_LEGITIMATE_ADDRESS_P tilepro_legitimate_address_p
5025 
5026 #undef  TARGET_LEGITIMIZE_ADDRESS
5027 #define TARGET_LEGITIMIZE_ADDRESS tilepro_legitimize_address
5028 
5029 #undef  TARGET_DELEGITIMIZE_ADDRESS
5030 #define TARGET_DELEGITIMIZE_ADDRESS tilepro_delegitimize_address
5031 
5032 #undef  TARGET_INIT_BUILTINS
5033 #define TARGET_INIT_BUILTINS  tilepro_init_builtins
5034 
5035 #undef  TARGET_BUILTIN_DECL
5036 #define TARGET_BUILTIN_DECL tilepro_builtin_decl
5037 
5038 #undef  TARGET_EXPAND_BUILTIN
5039 #define TARGET_EXPAND_BUILTIN tilepro_expand_builtin
5040 
5041 #undef  TARGET_CONDITIONAL_REGISTER_USAGE
5042 #define TARGET_CONDITIONAL_REGISTER_USAGE tilepro_conditional_register_usage
5043 
5044 #undef  TARGET_FRAME_POINTER_REQUIRED
5045 #define TARGET_FRAME_POINTER_REQUIRED tilepro_frame_pointer_required
5046 
5047 #undef  TARGET_DELAY_SCHED2
5048 #define TARGET_DELAY_SCHED2 true
5049 
5050 #undef  TARGET_DELAY_VARTRACK
5051 #define TARGET_DELAY_VARTRACK true
5052 
5053 #undef  TARGET_SCHED_ISSUE_RATE
5054 #define TARGET_SCHED_ISSUE_RATE tilepro_issue_rate
5055 
5056 #undef  TARGET_SCHED_ADJUST_COST
5057 #define TARGET_SCHED_ADJUST_COST tilepro_sched_adjust_cost
5058 
5059 #undef  TARGET_MACHINE_DEPENDENT_REORG
5060 #define TARGET_MACHINE_DEPENDENT_REORG tilepro_reorg
5061 
5062 #undef  TARGET_ASM_CAN_OUTPUT_MI_THUNK
5063 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5064   hook_bool_const_tree_hwi_hwi_const_tree_true
5065 
5066 #undef  TARGET_ASM_OUTPUT_MI_THUNK
5067 #define TARGET_ASM_OUTPUT_MI_THUNK tilepro_asm_output_mi_thunk
5068 
5069 #undef  TARGET_ASM_TRAMPOLINE_TEMPLATE
5070 #define TARGET_ASM_TRAMPOLINE_TEMPLATE tilepro_asm_trampoline_template
5071 
5072 #undef  TARGET_TRAMPOLINE_INIT
5073 #define TARGET_TRAMPOLINE_INIT tilepro_trampoline_init
5074 
5075 #undef  TARGET_PRINT_OPERAND
5076 #define TARGET_PRINT_OPERAND tilepro_print_operand
5077 
5078 #undef  TARGET_PRINT_OPERAND_ADDRESS
5079 #define TARGET_PRINT_OPERAND_ADDRESS tilepro_print_operand_address
5080 
5081 #undef  TARGET_ASM_FILE_END
5082 #define TARGET_ASM_FILE_END tilepro_file_end
5083 
5084 #undef  TARGET_CAN_USE_DOLOOP_P
5085 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
5086 
5087 struct gcc_target targetm = TARGET_INITIALIZER;
5088 
5089 #include "gt-tilepro.h"
5090