1 /* A pass for lowering trees to RTL.
2    Copyright (C) 2004-2016 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "tm_p.h"
31 #include "ssa.h"
32 #include "optabs.h"
33 #include "regs.h" /* For reg_renumber.  */
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "cgraph.h"
37 #include "diagnostic.h"
38 #include "fold-const.h"
39 #include "varasm.h"
40 #include "stor-layout.h"
41 #include "stmt.h"
42 #include "print-tree.h"
43 #include "cfgrtl.h"
44 #include "cfganal.h"
45 #include "cfgbuild.h"
46 #include "cfgcleanup.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "calls.h"
50 #include "expr.h"
51 #include "internal-fn.h"
52 #include "tree-eh.h"
53 #include "gimple-iterator.h"
54 #include "gimple-expr.h"
55 #include "gimple-walk.h"
56 #include "tree-cfg.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa.h"
59 #include "except.h"
60 #include "gimple-pretty-print.h"
61 #include "toplev.h"
62 #include "debug.h"
63 #include "params.h"
64 #include "tree-inline.h"
65 #include "value-prof.h"
66 #include "tree-ssa-live.h"
67 #include "tree-outof-ssa.h"
68 #include "cfgloop.h"
69 #include "insn-attr.h" /* For INSN_SCHEDULING.  */
70 #include "asan.h"
71 #include "tree-ssa-address.h"
72 #include "output.h"
73 #include "builtins.h"
74 #include "tree-chkp.h"
75 #include "rtl-chkp.h"
76 
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78    cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79    give the same symbol without quotes for an alternative entry point.  You
80    must define both, or neither.  */
81 #ifndef NAME__MAIN
82 #define NAME__MAIN "__main"
83 #endif
84 
85 /* This variable holds information helping the rewriting of SSA trees
86    into RTL.  */
87 struct ssaexpand SA;
88 
89 /* This variable holds the currently expanded gimple statement for purposes
90    of comminucating the profile info to the builtin expanders.  */
91 gimple *currently_expanding_gimple_stmt;
92 
93 static rtx expand_debug_expr (tree);
94 
95 static bool defer_stack_allocation (tree, bool);
96 
97 static void record_alignment_for_reg_var (unsigned int);
98 
99 /* Return an expression tree corresponding to the RHS of GIMPLE
100    statement STMT.  */
101 
102 tree
gimple_assign_rhs_to_tree(gimple * stmt)103 gimple_assign_rhs_to_tree (gimple *stmt)
104 {
105   tree t;
106   enum gimple_rhs_class grhs_class;
107 
108   grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
109 
110   if (grhs_class == GIMPLE_TERNARY_RHS)
111     t = build3 (gimple_assign_rhs_code (stmt),
112 		TREE_TYPE (gimple_assign_lhs (stmt)),
113 		gimple_assign_rhs1 (stmt),
114 		gimple_assign_rhs2 (stmt),
115 		gimple_assign_rhs3 (stmt));
116   else if (grhs_class == GIMPLE_BINARY_RHS)
117     t = build2 (gimple_assign_rhs_code (stmt),
118 		TREE_TYPE (gimple_assign_lhs (stmt)),
119 		gimple_assign_rhs1 (stmt),
120 		gimple_assign_rhs2 (stmt));
121   else if (grhs_class == GIMPLE_UNARY_RHS)
122     t = build1 (gimple_assign_rhs_code (stmt),
123 		TREE_TYPE (gimple_assign_lhs (stmt)),
124 		gimple_assign_rhs1 (stmt));
125   else if (grhs_class == GIMPLE_SINGLE_RHS)
126     {
127       t = gimple_assign_rhs1 (stmt);
128       /* Avoid modifying this tree in place below.  */
129       if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
130 	   && gimple_location (stmt) != EXPR_LOCATION (t))
131 	  || (gimple_block (stmt)
132 	      && currently_expanding_to_rtl
133 	      && EXPR_P (t)))
134 	t = copy_node (t);
135     }
136   else
137     gcc_unreachable ();
138 
139   if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
140     SET_EXPR_LOCATION (t, gimple_location (stmt));
141 
142   return t;
143 }
144 
145 
146 #ifndef STACK_ALIGNMENT_NEEDED
147 #define STACK_ALIGNMENT_NEEDED 1
148 #endif
149 
150 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
151 
152 /* Choose either CUR or NEXT as the leader DECL for a partition.
153    Prefer ignored decls, to simplify debug dumps and reduce ambiguity
154    out of the same user variable being in multiple partitions (this is
155    less likely for compiler-introduced temps).  */
156 
157 static tree
leader_merge(tree cur,tree next)158 leader_merge (tree cur, tree next)
159 {
160   if (cur == NULL || cur == next)
161     return next;
162 
163   if (DECL_P (cur) && DECL_IGNORED_P (cur))
164     return cur;
165 
166   if (DECL_P (next) && DECL_IGNORED_P (next))
167     return next;
168 
169   return cur;
170 }
171 
172 /* Associate declaration T with storage space X.  If T is no
173    SSA name this is exactly SET_DECL_RTL, otherwise make the
174    partition of T associated with X.  */
175 static inline void
set_rtl(tree t,rtx x)176 set_rtl (tree t, rtx x)
177 {
178   gcc_checking_assert (!x
179 		       || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
180 		       || (use_register_for_decl (t)
181 			   ? (REG_P (x)
182 			      || (GET_CODE (x) == CONCAT
183 				  && (REG_P (XEXP (x, 0))
184 				      || SUBREG_P (XEXP (x, 0)))
185 				  && (REG_P (XEXP (x, 1))
186 				      || SUBREG_P (XEXP (x, 1))))
187 			      /* We need to accept PARALLELs for RESUT_DECLs
188 				 because of vector types with BLKmode returned
189 				 in multiple registers, but they are supposed
190 				 to be uncoalesced.  */
191 			      || (GET_CODE (x) == PARALLEL
192 				  && SSAVAR (t)
193 				  && TREE_CODE (SSAVAR (t)) == RESULT_DECL
194 				  && (GET_MODE (x) == BLKmode
195 				      || !flag_tree_coalesce_vars)))
196 			   : (MEM_P (x) || x == pc_rtx
197 			      || (GET_CODE (x) == CONCAT
198 				  && MEM_P (XEXP (x, 0))
199 				  && MEM_P (XEXP (x, 1))))));
200   /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
201      RESULT_DECLs has the expected mode.  For memory, we accept
202      unpromoted modes, since that's what we're likely to get.  For
203      PARM_DECLs and RESULT_DECLs, we'll have been called by
204      set_parm_rtl, which will give us the default def, so we don't
205      have to compute it ourselves.  For RESULT_DECLs, we accept mode
206      mismatches too, as long as we have BLKmode or are not coalescing
207      across variables, so that we don't reject BLKmode PARALLELs or
208      unpromoted REGs.  */
209   gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
210 		       || (SSAVAR (t)
211 			   && TREE_CODE (SSAVAR (t)) == RESULT_DECL
212 			   && (promote_ssa_mode (t, NULL) == BLKmode
213 			       || !flag_tree_coalesce_vars))
214 		       || !use_register_for_decl (t)
215 		       || GET_MODE (x) == promote_ssa_mode (t, NULL));
216 
217   if (x)
218     {
219       bool skip = false;
220       tree cur = NULL_TREE;
221       rtx xm = x;
222 
223     retry:
224       if (MEM_P (xm))
225 	cur = MEM_EXPR (xm);
226       else if (REG_P (xm))
227 	cur = REG_EXPR (xm);
228       else if (SUBREG_P (xm))
229 	{
230 	  gcc_assert (subreg_lowpart_p (xm));
231 	  xm = SUBREG_REG (xm);
232 	  goto retry;
233 	}
234       else if (GET_CODE (xm) == CONCAT)
235 	{
236 	  xm = XEXP (xm, 0);
237 	  goto retry;
238 	}
239       else if (GET_CODE (xm) == PARALLEL)
240 	{
241 	  xm = XVECEXP (xm, 0, 0);
242 	  gcc_assert (GET_CODE (xm) == EXPR_LIST);
243 	  xm = XEXP (xm, 0);
244 	  goto retry;
245 	}
246       else if (xm == pc_rtx)
247 	skip = true;
248       else
249 	gcc_unreachable ();
250 
251       tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
252 
253       if (cur != next)
254 	{
255 	  if (MEM_P (x))
256 	    set_mem_attributes (x,
257 				next && TREE_CODE (next) == SSA_NAME
258 				? TREE_TYPE (next)
259 				: next, true);
260 	  else
261 	    set_reg_attrs_for_decl_rtl (next, x);
262 	}
263     }
264 
265   if (TREE_CODE (t) == SSA_NAME)
266     {
267       int part = var_to_partition (SA.map, t);
268       if (part != NO_PARTITION)
269 	{
270 	  if (SA.partition_to_pseudo[part])
271 	    gcc_assert (SA.partition_to_pseudo[part] == x);
272 	  else if (x != pc_rtx)
273 	    SA.partition_to_pseudo[part] = x;
274 	}
275       /* For the benefit of debug information at -O0 (where
276          vartracking doesn't run) record the place also in the base
277          DECL.  For PARMs and RESULTs, do so only when setting the
278          default def.  */
279       if (x && x != pc_rtx && SSA_NAME_VAR (t)
280 	  && (VAR_P (SSA_NAME_VAR (t))
281 	      || SSA_NAME_IS_DEFAULT_DEF (t)))
282 	{
283 	  tree var = SSA_NAME_VAR (t);
284 	  /* If we don't yet have something recorded, just record it now.  */
285 	  if (!DECL_RTL_SET_P (var))
286 	    SET_DECL_RTL (var, x);
287 	  /* If we have it set already to "multiple places" don't
288 	     change this.  */
289 	  else if (DECL_RTL (var) == pc_rtx)
290 	    ;
291 	  /* If we have something recorded and it's not the same place
292 	     as we want to record now, we have multiple partitions for the
293 	     same base variable, with different places.  We can't just
294 	     randomly chose one, hence we have to say that we don't know.
295 	     This only happens with optimization, and there var-tracking
296 	     will figure out the right thing.  */
297 	  else if (DECL_RTL (var) != x)
298 	    SET_DECL_RTL (var, pc_rtx);
299 	}
300     }
301   else
302     SET_DECL_RTL (t, x);
303 }
304 
305 /* This structure holds data relevant to one variable that will be
306    placed in a stack slot.  */
307 struct stack_var
308 {
309   /* The Variable.  */
310   tree decl;
311 
312   /* Initially, the size of the variable.  Later, the size of the partition,
313      if this variable becomes it's partition's representative.  */
314   HOST_WIDE_INT size;
315 
316   /* The *byte* alignment required for this variable.  Or as, with the
317      size, the alignment for this partition.  */
318   unsigned int alignb;
319 
320   /* The partition representative.  */
321   size_t representative;
322 
323   /* The next stack variable in the partition, or EOC.  */
324   size_t next;
325 
326   /* The numbers of conflicting stack variables.  */
327   bitmap conflicts;
328 };
329 
330 #define EOC  ((size_t)-1)
331 
332 /* We have an array of such objects while deciding allocation.  */
333 static struct stack_var *stack_vars;
334 static size_t stack_vars_alloc;
335 static size_t stack_vars_num;
336 static hash_map<tree, size_t> *decl_to_stack_part;
337 
338 /* Conflict bitmaps go on this obstack.  This allows us to destroy
339    all of them in one big sweep.  */
340 static bitmap_obstack stack_var_bitmap_obstack;
341 
342 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
343    is non-decreasing.  */
344 static size_t *stack_vars_sorted;
345 
346 /* The phase of the stack frame.  This is the known misalignment of
347    virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
348    (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
349 static int frame_phase;
350 
351 /* Used during expand_used_vars to remember if we saw any decls for
352    which we'd like to enable stack smashing protection.  */
353 static bool has_protected_decls;
354 
355 /* Used during expand_used_vars.  Remember if we say a character buffer
356    smaller than our cutoff threshold.  Used for -Wstack-protector.  */
357 static bool has_short_buffer;
358 
359 /* Compute the byte alignment to use for DECL.  Ignore alignment
360    we can't do with expected alignment of the stack boundary.  */
361 
362 static unsigned int
align_local_variable(tree decl)363 align_local_variable (tree decl)
364 {
365   unsigned int align;
366 
367   if (TREE_CODE (decl) == SSA_NAME)
368     align = TYPE_ALIGN (TREE_TYPE (decl));
369   else
370     {
371       align = LOCAL_DECL_ALIGNMENT (decl);
372       DECL_ALIGN (decl) = align;
373     }
374   return align / BITS_PER_UNIT;
375 }
376 
377 /* Align given offset BASE with ALIGN.  Truncate up if ALIGN_UP is true,
378    down otherwise.  Return truncated BASE value.  */
379 
380 static inline unsigned HOST_WIDE_INT
align_base(HOST_WIDE_INT base,unsigned HOST_WIDE_INT align,bool align_up)381 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
382 {
383   return align_up ? (base + align - 1) & -align : base & -align;
384 }
385 
386 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
387    Return the frame offset.  */
388 
389 static HOST_WIDE_INT
alloc_stack_frame_space(HOST_WIDE_INT size,unsigned HOST_WIDE_INT align)390 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
391 {
392   HOST_WIDE_INT offset, new_frame_offset;
393 
394   if (FRAME_GROWS_DOWNWARD)
395     {
396       new_frame_offset
397 	= align_base (frame_offset - frame_phase - size,
398 		      align, false) + frame_phase;
399       offset = new_frame_offset;
400     }
401   else
402     {
403       new_frame_offset
404 	= align_base (frame_offset - frame_phase, align, true) + frame_phase;
405       offset = new_frame_offset;
406       new_frame_offset += size;
407     }
408   frame_offset = new_frame_offset;
409 
410   if (frame_offset_overflow (frame_offset, cfun->decl))
411     frame_offset = offset = 0;
412 
413   return offset;
414 }
415 
416 /* Accumulate DECL into STACK_VARS.  */
417 
418 static void
add_stack_var(tree decl)419 add_stack_var (tree decl)
420 {
421   struct stack_var *v;
422 
423   if (stack_vars_num >= stack_vars_alloc)
424     {
425       if (stack_vars_alloc)
426 	stack_vars_alloc = stack_vars_alloc * 3 / 2;
427       else
428 	stack_vars_alloc = 32;
429       stack_vars
430 	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
431     }
432   if (!decl_to_stack_part)
433     decl_to_stack_part = new hash_map<tree, size_t>;
434 
435   v = &stack_vars[stack_vars_num];
436   decl_to_stack_part->put (decl, stack_vars_num);
437 
438   v->decl = decl;
439   tree size = TREE_CODE (decl) == SSA_NAME
440     ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
441     : DECL_SIZE_UNIT (decl);
442   v->size = tree_to_uhwi (size);
443   /* Ensure that all variables have size, so that &a != &b for any two
444      variables that are simultaneously live.  */
445   if (v->size == 0)
446     v->size = 1;
447   v->alignb = align_local_variable (decl);
448   /* An alignment of zero can mightily confuse us later.  */
449   gcc_assert (v->alignb != 0);
450 
451   /* All variables are initially in their own partition.  */
452   v->representative = stack_vars_num;
453   v->next = EOC;
454 
455   /* All variables initially conflict with no other.  */
456   v->conflicts = NULL;
457 
458   /* Ensure that this decl doesn't get put onto the list twice.  */
459   set_rtl (decl, pc_rtx);
460 
461   stack_vars_num++;
462 }
463 
464 /* Make the decls associated with luid's X and Y conflict.  */
465 
466 static void
add_stack_var_conflict(size_t x,size_t y)467 add_stack_var_conflict (size_t x, size_t y)
468 {
469   struct stack_var *a = &stack_vars[x];
470   struct stack_var *b = &stack_vars[y];
471   if (!a->conflicts)
472     a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
473   if (!b->conflicts)
474     b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
475   bitmap_set_bit (a->conflicts, y);
476   bitmap_set_bit (b->conflicts, x);
477 }
478 
479 /* Check whether the decls associated with luid's X and Y conflict.  */
480 
481 static bool
stack_var_conflict_p(size_t x,size_t y)482 stack_var_conflict_p (size_t x, size_t y)
483 {
484   struct stack_var *a = &stack_vars[x];
485   struct stack_var *b = &stack_vars[y];
486   if (x == y)
487     return false;
488   /* Partitions containing an SSA name result from gimple registers
489      with things like unsupported modes.  They are top-level and
490      hence conflict with everything else.  */
491   if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
492     return true;
493 
494   if (!a->conflicts || !b->conflicts)
495     return false;
496   return bitmap_bit_p (a->conflicts, y);
497 }
498 
499 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
500    enter its partition number into bitmap DATA.  */
501 
502 static bool
visit_op(gimple *,tree op,tree,void * data)503 visit_op (gimple *, tree op, tree, void *data)
504 {
505   bitmap active = (bitmap)data;
506   op = get_base_address (op);
507   if (op
508       && DECL_P (op)
509       && DECL_RTL_IF_SET (op) == pc_rtx)
510     {
511       size_t *v = decl_to_stack_part->get (op);
512       if (v)
513 	bitmap_set_bit (active, *v);
514     }
515   return false;
516 }
517 
518 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
519    record conflicts between it and all currently active other partitions
520    from bitmap DATA.  */
521 
522 static bool
visit_conflict(gimple *,tree op,tree,void * data)523 visit_conflict (gimple *, tree op, tree, void *data)
524 {
525   bitmap active = (bitmap)data;
526   op = get_base_address (op);
527   if (op
528       && DECL_P (op)
529       && DECL_RTL_IF_SET (op) == pc_rtx)
530     {
531       size_t *v = decl_to_stack_part->get (op);
532       if (v && bitmap_set_bit (active, *v))
533 	{
534 	  size_t num = *v;
535 	  bitmap_iterator bi;
536 	  unsigned i;
537 	  gcc_assert (num < stack_vars_num);
538 	  EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
539 	    add_stack_var_conflict (num, i);
540 	}
541     }
542   return false;
543 }
544 
545 /* Helper routine for add_scope_conflicts, calculating the active partitions
546    at the end of BB, leaving the result in WORK.  We're called to generate
547    conflicts when FOR_CONFLICT is true, otherwise we're just tracking
548    liveness.  */
549 
550 static void
add_scope_conflicts_1(basic_block bb,bitmap work,bool for_conflict)551 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
552 {
553   edge e;
554   edge_iterator ei;
555   gimple_stmt_iterator gsi;
556   walk_stmt_load_store_addr_fn visit;
557 
558   bitmap_clear (work);
559   FOR_EACH_EDGE (e, ei, bb->preds)
560     bitmap_ior_into (work, (bitmap)e->src->aux);
561 
562   visit = visit_op;
563 
564   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
565     {
566       gimple *stmt = gsi_stmt (gsi);
567       walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
568     }
569   for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
570     {
571       gimple *stmt = gsi_stmt (gsi);
572 
573       if (gimple_clobber_p (stmt))
574 	{
575 	  tree lhs = gimple_assign_lhs (stmt);
576 	  size_t *v;
577 	  /* Nested function lowering might introduce LHSs
578 	     that are COMPONENT_REFs.  */
579 	  if (TREE_CODE (lhs) != VAR_DECL)
580 	    continue;
581 	  if (DECL_RTL_IF_SET (lhs) == pc_rtx
582 	      && (v = decl_to_stack_part->get (lhs)))
583 	    bitmap_clear_bit (work, *v);
584 	}
585       else if (!is_gimple_debug (stmt))
586 	{
587 	  if (for_conflict
588 	      && visit == visit_op)
589 	    {
590 	      /* If this is the first real instruction in this BB we need
591 	         to add conflicts for everything live at this point now.
592 		 Unlike classical liveness for named objects we can't
593 		 rely on seeing a def/use of the names we're interested in.
594 		 There might merely be indirect loads/stores.  We'd not add any
595 		 conflicts for such partitions.  */
596 	      bitmap_iterator bi;
597 	      unsigned i;
598 	      EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
599 		{
600 		  struct stack_var *a = &stack_vars[i];
601 		  if (!a->conflicts)
602 		    a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
603 		  bitmap_ior_into (a->conflicts, work);
604 		}
605 	      visit = visit_conflict;
606 	    }
607 	  walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
608 	}
609     }
610 }
611 
612 /* Generate stack partition conflicts between all partitions that are
613    simultaneously live.  */
614 
615 static void
add_scope_conflicts(void)616 add_scope_conflicts (void)
617 {
618   basic_block bb;
619   bool changed;
620   bitmap work = BITMAP_ALLOC (NULL);
621   int *rpo;
622   int n_bbs;
623 
624   /* We approximate the live range of a stack variable by taking the first
625      mention of its name as starting point(s), and by the end-of-scope
626      death clobber added by gimplify as ending point(s) of the range.
627      This overapproximates in the case we for instance moved an address-taken
628      operation upward, without also moving a dereference to it upwards.
629      But it's conservatively correct as a variable never can hold values
630      before its name is mentioned at least once.
631 
632      We then do a mostly classical bitmap liveness algorithm.  */
633 
634   FOR_ALL_BB_FN (bb, cfun)
635     bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
636 
637   rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
638   n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
639 
640   changed = true;
641   while (changed)
642     {
643       int i;
644       changed = false;
645       for (i = 0; i < n_bbs; i++)
646 	{
647 	  bitmap active;
648 	  bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
649 	  active = (bitmap)bb->aux;
650 	  add_scope_conflicts_1 (bb, work, false);
651 	  if (bitmap_ior_into (active, work))
652 	    changed = true;
653 	}
654     }
655 
656   FOR_EACH_BB_FN (bb, cfun)
657     add_scope_conflicts_1 (bb, work, true);
658 
659   free (rpo);
660   BITMAP_FREE (work);
661   FOR_ALL_BB_FN (bb, cfun)
662     BITMAP_FREE (bb->aux);
663 }
664 
665 /* A subroutine of partition_stack_vars.  A comparison function for qsort,
666    sorting an array of indices by the properties of the object.  */
667 
668 static int
stack_var_cmp(const void * a,const void * b)669 stack_var_cmp (const void *a, const void *b)
670 {
671   size_t ia = *(const size_t *)a;
672   size_t ib = *(const size_t *)b;
673   unsigned int aligna = stack_vars[ia].alignb;
674   unsigned int alignb = stack_vars[ib].alignb;
675   HOST_WIDE_INT sizea = stack_vars[ia].size;
676   HOST_WIDE_INT sizeb = stack_vars[ib].size;
677   tree decla = stack_vars[ia].decl;
678   tree declb = stack_vars[ib].decl;
679   bool largea, largeb;
680   unsigned int uida, uidb;
681 
682   /* Primary compare on "large" alignment.  Large comes first.  */
683   largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
684   largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
685   if (largea != largeb)
686     return (int)largeb - (int)largea;
687 
688   /* Secondary compare on size, decreasing  */
689   if (sizea > sizeb)
690     return -1;
691   if (sizea < sizeb)
692     return 1;
693 
694   /* Tertiary compare on true alignment, decreasing.  */
695   if (aligna < alignb)
696     return -1;
697   if (aligna > alignb)
698     return 1;
699 
700   /* Final compare on ID for sort stability, increasing.
701      Two SSA names are compared by their version, SSA names come before
702      non-SSA names, and two normal decls are compared by their DECL_UID.  */
703   if (TREE_CODE (decla) == SSA_NAME)
704     {
705       if (TREE_CODE (declb) == SSA_NAME)
706 	uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
707       else
708 	return -1;
709     }
710   else if (TREE_CODE (declb) == SSA_NAME)
711     return 1;
712   else
713     uida = DECL_UID (decla), uidb = DECL_UID (declb);
714   if (uida < uidb)
715     return 1;
716   if (uida > uidb)
717     return -1;
718   return 0;
719 }
720 
721 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
722 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
723 
724 /* If the points-to solution *PI points to variables that are in a partition
725    together with other variables add all partition members to the pointed-to
726    variables bitmap.  */
727 
728 static void
add_partitioned_vars_to_ptset(struct pt_solution * pt,part_hashmap * decls_to_partitions,hash_set<bitmap> * visited,bitmap temp)729 add_partitioned_vars_to_ptset (struct pt_solution *pt,
730 			       part_hashmap *decls_to_partitions,
731 			       hash_set<bitmap> *visited, bitmap temp)
732 {
733   bitmap_iterator bi;
734   unsigned i;
735   bitmap *part;
736 
737   if (pt->anything
738       || pt->vars == NULL
739       /* The pointed-to vars bitmap is shared, it is enough to
740 	 visit it once.  */
741       || visited->add (pt->vars))
742     return;
743 
744   bitmap_clear (temp);
745 
746   /* By using a temporary bitmap to store all members of the partitions
747      we have to add we make sure to visit each of the partitions only
748      once.  */
749   EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
750     if ((!temp
751 	 || !bitmap_bit_p (temp, i))
752 	&& (part = decls_to_partitions->get (i)))
753       bitmap_ior_into (temp, *part);
754   if (!bitmap_empty_p (temp))
755     bitmap_ior_into (pt->vars, temp);
756 }
757 
758 /* Update points-to sets based on partition info, so we can use them on RTL.
759    The bitmaps representing stack partitions will be saved until expand,
760    where partitioned decls used as bases in memory expressions will be
761    rewritten.  */
762 
763 static void
update_alias_info_with_stack_vars(void)764 update_alias_info_with_stack_vars (void)
765 {
766   part_hashmap *decls_to_partitions = NULL;
767   size_t i, j;
768   tree var = NULL_TREE;
769 
770   for (i = 0; i < stack_vars_num; i++)
771     {
772       bitmap part = NULL;
773       tree name;
774       struct ptr_info_def *pi;
775 
776       /* Not interested in partitions with single variable.  */
777       if (stack_vars[i].representative != i
778           || stack_vars[i].next == EOC)
779         continue;
780 
781       if (!decls_to_partitions)
782 	{
783 	  decls_to_partitions = new part_hashmap;
784 	  cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
785 	}
786 
787       /* Create an SSA_NAME that points to the partition for use
788          as base during alias-oracle queries on RTL for bases that
789 	 have been partitioned.  */
790       if (var == NULL_TREE)
791 	var = create_tmp_var (ptr_type_node);
792       name = make_ssa_name (var);
793 
794       /* Create bitmaps representing partitions.  They will be used for
795          points-to sets later, so use GGC alloc.  */
796       part = BITMAP_GGC_ALLOC ();
797       for (j = i; j != EOC; j = stack_vars[j].next)
798 	{
799 	  tree decl = stack_vars[j].decl;
800 	  unsigned int uid = DECL_PT_UID (decl);
801 	  bitmap_set_bit (part, uid);
802 	  decls_to_partitions->put (uid, part);
803 	  cfun->gimple_df->decls_to_pointers->put (decl, name);
804 	  if (TREE_ADDRESSABLE (decl))
805 	    TREE_ADDRESSABLE (name) = 1;
806 	}
807 
808       /* Make the SSA name point to all partition members.  */
809       pi = get_ptr_info (name);
810       pt_solution_set (&pi->pt, part, false);
811     }
812 
813   /* Make all points-to sets that contain one member of a partition
814      contain all members of the partition.  */
815   if (decls_to_partitions)
816     {
817       unsigned i;
818       hash_set<bitmap> visited;
819       bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
820 
821       for (i = 1; i < num_ssa_names; i++)
822 	{
823 	  tree name = ssa_name (i);
824 	  struct ptr_info_def *pi;
825 
826 	  if (name
827 	      && POINTER_TYPE_P (TREE_TYPE (name))
828 	      && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
829 	    add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
830 					   &visited, temp);
831 	}
832 
833       add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
834 				     decls_to_partitions, &visited, temp);
835 
836       delete decls_to_partitions;
837       BITMAP_FREE (temp);
838     }
839 }
840 
841 /* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
842    partitioning algorithm.  Partitions A and B are known to be non-conflicting.
843    Merge them into a single partition A.  */
844 
845 static void
union_stack_vars(size_t a,size_t b)846 union_stack_vars (size_t a, size_t b)
847 {
848   struct stack_var *vb = &stack_vars[b];
849   bitmap_iterator bi;
850   unsigned u;
851 
852   gcc_assert (stack_vars[b].next == EOC);
853    /* Add B to A's partition.  */
854   stack_vars[b].next = stack_vars[a].next;
855   stack_vars[b].representative = a;
856   stack_vars[a].next = b;
857 
858   /* Update the required alignment of partition A to account for B.  */
859   if (stack_vars[a].alignb < stack_vars[b].alignb)
860     stack_vars[a].alignb = stack_vars[b].alignb;
861 
862   /* Update the interference graph and merge the conflicts.  */
863   if (vb->conflicts)
864     {
865       EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
866 	add_stack_var_conflict (a, stack_vars[u].representative);
867       BITMAP_FREE (vb->conflicts);
868     }
869 }
870 
871 /* Return true if the current function should have its stack frame
872    protected by address sanitizer.  */
873 
874 static inline bool
asan_sanitize_stack_p(void)875 asan_sanitize_stack_p (void)
876 {
877   return ((flag_sanitize & SANITIZE_ADDRESS)
878 	  && ASAN_STACK
879 	  && !lookup_attribute ("no_sanitize_address",
880 				DECL_ATTRIBUTES (current_function_decl)));
881 }
882 
883 /* A subroutine of expand_used_vars.  Binpack the variables into
884    partitions constrained by the interference graph.  The overall
885    algorithm used is as follows:
886 
887 	Sort the objects by size in descending order.
888 	For each object A {
889 	  S = size(A)
890 	  O = 0
891 	  loop {
892 	    Look for the largest non-conflicting object B with size <= S.
893 	    UNION (A, B)
894 	  }
895 	}
896 */
897 
898 static void
partition_stack_vars(void)899 partition_stack_vars (void)
900 {
901   size_t si, sj, n = stack_vars_num;
902 
903   stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
904   for (si = 0; si < n; ++si)
905     stack_vars_sorted[si] = si;
906 
907   if (n == 1)
908     return;
909 
910   qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
911 
912   for (si = 0; si < n; ++si)
913     {
914       size_t i = stack_vars_sorted[si];
915       unsigned int ialign = stack_vars[i].alignb;
916       HOST_WIDE_INT isize = stack_vars[i].size;
917 
918       /* Ignore objects that aren't partition representatives. If we
919          see a var that is not a partition representative, it must
920          have been merged earlier.  */
921       if (stack_vars[i].representative != i)
922         continue;
923 
924       for (sj = si + 1; sj < n; ++sj)
925 	{
926 	  size_t j = stack_vars_sorted[sj];
927 	  unsigned int jalign = stack_vars[j].alignb;
928 	  HOST_WIDE_INT jsize = stack_vars[j].size;
929 
930 	  /* Ignore objects that aren't partition representatives.  */
931 	  if (stack_vars[j].representative != j)
932 	    continue;
933 
934 	  /* Do not mix objects of "small" (supported) alignment
935 	     and "large" (unsupported) alignment.  */
936 	  if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 	      != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
938 	    break;
939 
940 	  /* For Address Sanitizer do not mix objects with different
941 	     sizes, as the shorter vars wouldn't be adequately protected.
942 	     Don't do that for "large" (unsupported) alignment objects,
943 	     those aren't protected anyway.  */
944 	  if (asan_sanitize_stack_p () && isize != jsize
945 	      && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
946 	    break;
947 
948 	  /* Ignore conflicting objects.  */
949 	  if (stack_var_conflict_p (i, j))
950 	    continue;
951 
952 	  /* UNION the objects, placing J at OFFSET.  */
953 	  union_stack_vars (i, j);
954 	}
955     }
956 
957   update_alias_info_with_stack_vars ();
958 }
959 
960 /* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
961 
962 static void
dump_stack_var_partition(void)963 dump_stack_var_partition (void)
964 {
965   size_t si, i, j, n = stack_vars_num;
966 
967   for (si = 0; si < n; ++si)
968     {
969       i = stack_vars_sorted[si];
970 
971       /* Skip variables that aren't partition representatives, for now.  */
972       if (stack_vars[i].representative != i)
973 	continue;
974 
975       fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
976 	       " align %u\n", (unsigned long) i, stack_vars[i].size,
977 	       stack_vars[i].alignb);
978 
979       for (j = i; j != EOC; j = stack_vars[j].next)
980 	{
981 	  fputc ('\t', dump_file);
982 	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
983 	}
984       fputc ('\n', dump_file);
985     }
986 }
987 
988 /* Assign rtl to DECL at BASE + OFFSET.  */
989 
990 static void
expand_one_stack_var_at(tree decl,rtx base,unsigned base_align,HOST_WIDE_INT offset)991 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
992 			 HOST_WIDE_INT offset)
993 {
994   unsigned align;
995   rtx x;
996 
997   /* If this fails, we've overflowed the stack frame.  Error nicely?  */
998   gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
999 
1000   x = plus_constant (Pmode, base, offset);
1001   x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
1002 		   ? TYPE_MODE (TREE_TYPE (decl))
1003 		   : DECL_MODE (SSAVAR (decl)), x);
1004 
1005   if (TREE_CODE (decl) != SSA_NAME)
1006     {
1007       /* Set alignment we actually gave this decl if it isn't an SSA name.
1008          If it is we generate stack slots only accidentally so it isn't as
1009 	 important, we'll simply use the alignment that is already set.  */
1010       if (base == virtual_stack_vars_rtx)
1011 	offset -= frame_phase;
1012       align = offset & -offset;
1013       align *= BITS_PER_UNIT;
1014       if (align == 0 || align > base_align)
1015 	align = base_align;
1016 
1017       /* One would think that we could assert that we're not decreasing
1018 	 alignment here, but (at least) the i386 port does exactly this
1019 	 via the MINIMUM_ALIGNMENT hook.  */
1020 
1021       DECL_ALIGN (decl) = align;
1022       DECL_USER_ALIGN (decl) = 0;
1023     }
1024 
1025   set_rtl (decl, x);
1026 }
1027 
1028 struct stack_vars_data
1029 {
1030   /* Vector of offset pairs, always end of some padding followed
1031      by start of the padding that needs Address Sanitizer protection.
1032      The vector is in reversed, highest offset pairs come first.  */
1033   vec<HOST_WIDE_INT> asan_vec;
1034 
1035   /* Vector of partition representative decls in between the paddings.  */
1036   vec<tree> asan_decl_vec;
1037 
1038   /* Base pseudo register for Address Sanitizer protected automatic vars.  */
1039   rtx asan_base;
1040 
1041   /* Alignment needed for the Address Sanitizer protected automatic vars.  */
1042   unsigned int asan_alignb;
1043 };
1044 
1045 /* A subroutine of expand_used_vars.  Give each partition representative
1046    a unique location within the stack frame.  Update each partition member
1047    with that location.  */
1048 
1049 static void
expand_stack_vars(bool (* pred)(size_t),struct stack_vars_data * data)1050 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1051 {
1052   size_t si, i, j, n = stack_vars_num;
1053   HOST_WIDE_INT large_size = 0, large_alloc = 0;
1054   rtx large_base = NULL;
1055   unsigned large_align = 0;
1056   tree decl;
1057 
1058   /* Determine if there are any variables requiring "large" alignment.
1059      Since these are dynamically allocated, we only process these if
1060      no predicate involved.  */
1061   large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1062   if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1063     {
1064       /* Find the total size of these variables.  */
1065       for (si = 0; si < n; ++si)
1066 	{
1067 	  unsigned alignb;
1068 
1069 	  i = stack_vars_sorted[si];
1070 	  alignb = stack_vars[i].alignb;
1071 
1072 	  /* All "large" alignment decls come before all "small" alignment
1073 	     decls, but "large" alignment decls are not sorted based on
1074 	     their alignment.  Increase large_align to track the largest
1075 	     required alignment.  */
1076 	  if ((alignb * BITS_PER_UNIT) > large_align)
1077 	    large_align = alignb * BITS_PER_UNIT;
1078 
1079 	  /* Stop when we get to the first decl with "small" alignment.  */
1080 	  if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1081 	    break;
1082 
1083 	  /* Skip variables that aren't partition representatives.  */
1084 	  if (stack_vars[i].representative != i)
1085 	    continue;
1086 
1087 	  /* Skip variables that have already had rtl assigned.  See also
1088 	     add_stack_var where we perpetrate this pc_rtx hack.  */
1089 	  decl = stack_vars[i].decl;
1090 	  if (TREE_CODE (decl) == SSA_NAME
1091 	      ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1092 	      : DECL_RTL (decl) != pc_rtx)
1093 	    continue;
1094 
1095 	  large_size += alignb - 1;
1096 	  large_size &= -(HOST_WIDE_INT)alignb;
1097 	  large_size += stack_vars[i].size;
1098 	}
1099 
1100       /* If there were any, allocate space.  */
1101       if (large_size > 0)
1102 	large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
1103 						   large_align, true);
1104     }
1105 
1106   for (si = 0; si < n; ++si)
1107     {
1108       rtx base;
1109       unsigned base_align, alignb;
1110       HOST_WIDE_INT offset;
1111 
1112       i = stack_vars_sorted[si];
1113 
1114       /* Skip variables that aren't partition representatives, for now.  */
1115       if (stack_vars[i].representative != i)
1116 	continue;
1117 
1118       /* Skip variables that have already had rtl assigned.  See also
1119 	 add_stack_var where we perpetrate this pc_rtx hack.  */
1120       decl = stack_vars[i].decl;
1121       if (TREE_CODE (decl) == SSA_NAME
1122 	  ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1123 	  : DECL_RTL (decl) != pc_rtx)
1124 	continue;
1125 
1126       /* Check the predicate to see whether this variable should be
1127 	 allocated in this pass.  */
1128       if (pred && !pred (i))
1129 	continue;
1130 
1131       alignb = stack_vars[i].alignb;
1132       if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1133 	{
1134 	  base = virtual_stack_vars_rtx;
1135 	  if (asan_sanitize_stack_p () && pred)
1136 	    {
1137 	      HOST_WIDE_INT prev_offset
1138 		= align_base (frame_offset,
1139 			      MAX (alignb, ASAN_RED_ZONE_SIZE),
1140 			      !FRAME_GROWS_DOWNWARD);
1141 	      tree repr_decl = NULL_TREE;
1142 	      offset
1143 		= alloc_stack_frame_space (stack_vars[i].size
1144 					   + ASAN_RED_ZONE_SIZE,
1145 					   MAX (alignb, ASAN_RED_ZONE_SIZE));
1146 
1147 	      data->asan_vec.safe_push (prev_offset);
1148 	      data->asan_vec.safe_push (offset + stack_vars[i].size);
1149 	      /* Find best representative of the partition.
1150 		 Prefer those with DECL_NAME, even better
1151 		 satisfying asan_protect_stack_decl predicate.  */
1152 	      for (j = i; j != EOC; j = stack_vars[j].next)
1153 		if (asan_protect_stack_decl (stack_vars[j].decl)
1154 		    && DECL_NAME (stack_vars[j].decl))
1155 		  {
1156 		    repr_decl = stack_vars[j].decl;
1157 		    break;
1158 		  }
1159 		else if (repr_decl == NULL_TREE
1160 			 && DECL_P (stack_vars[j].decl)
1161 			 && DECL_NAME (stack_vars[j].decl))
1162 		  repr_decl = stack_vars[j].decl;
1163 	      if (repr_decl == NULL_TREE)
1164 		repr_decl = stack_vars[i].decl;
1165 	      data->asan_decl_vec.safe_push (repr_decl);
1166 	      data->asan_alignb = MAX (data->asan_alignb, alignb);
1167 	      if (data->asan_base == NULL)
1168 		data->asan_base = gen_reg_rtx (Pmode);
1169 	      base = data->asan_base;
1170 
1171 	      if (!STRICT_ALIGNMENT)
1172 		base_align = crtl->max_used_stack_slot_alignment;
1173 	      else
1174 		base_align = MAX (crtl->max_used_stack_slot_alignment,
1175 				  GET_MODE_ALIGNMENT (SImode)
1176 				  << ASAN_SHADOW_SHIFT);
1177 	    }
1178 	  else
1179 	    {
1180 	      offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1181 	      base_align = crtl->max_used_stack_slot_alignment;
1182 	    }
1183 	}
1184       else
1185 	{
1186 	  /* Large alignment is only processed in the last pass.  */
1187 	  if (pred)
1188 	    continue;
1189 	  gcc_assert (large_base != NULL);
1190 
1191 	  large_alloc += alignb - 1;
1192 	  large_alloc &= -(HOST_WIDE_INT)alignb;
1193 	  offset = large_alloc;
1194 	  large_alloc += stack_vars[i].size;
1195 
1196 	  base = large_base;
1197 	  base_align = large_align;
1198 	}
1199 
1200       /* Create rtl for each variable based on their location within the
1201 	 partition.  */
1202       for (j = i; j != EOC; j = stack_vars[j].next)
1203 	{
1204 	  expand_one_stack_var_at (stack_vars[j].decl,
1205 				   base, base_align,
1206 				   offset);
1207 	}
1208     }
1209 
1210   gcc_assert (large_alloc == large_size);
1211 }
1212 
1213 /* Take into account all sizes of partitions and reset DECL_RTLs.  */
1214 static HOST_WIDE_INT
account_stack_vars(void)1215 account_stack_vars (void)
1216 {
1217   size_t si, j, i, n = stack_vars_num;
1218   HOST_WIDE_INT size = 0;
1219 
1220   for (si = 0; si < n; ++si)
1221     {
1222       i = stack_vars_sorted[si];
1223 
1224       /* Skip variables that aren't partition representatives, for now.  */
1225       if (stack_vars[i].representative != i)
1226 	continue;
1227 
1228       size += stack_vars[i].size;
1229       for (j = i; j != EOC; j = stack_vars[j].next)
1230 	set_rtl (stack_vars[j].decl, NULL);
1231     }
1232   return size;
1233 }
1234 
1235 /* Record the RTL assignment X for the default def of PARM.  */
1236 
1237 extern void
set_parm_rtl(tree parm,rtx x)1238 set_parm_rtl (tree parm, rtx x)
1239 {
1240   gcc_assert (TREE_CODE (parm) == PARM_DECL
1241 	      || TREE_CODE (parm) == RESULT_DECL);
1242 
1243   if (x && !MEM_P (x))
1244     {
1245       unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1246 					      TYPE_MODE (TREE_TYPE (parm)),
1247 					      TYPE_ALIGN (TREE_TYPE (parm)));
1248 
1249       /* If the variable alignment is very large we'll dynamicaly
1250 	 allocate it, which means that in-frame portion is just a
1251 	 pointer.  ??? We've got a pseudo for sure here, do we
1252 	 actually dynamically allocate its spilling area if needed?
1253 	 ??? Isn't it a problem when POINTER_SIZE also exceeds
1254 	 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32?  */
1255       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1256 	align = POINTER_SIZE;
1257 
1258       record_alignment_for_reg_var (align);
1259     }
1260 
1261   tree ssa = ssa_default_def (cfun, parm);
1262   if (!ssa)
1263     return set_rtl (parm, x);
1264 
1265   int part = var_to_partition (SA.map, ssa);
1266   gcc_assert (part != NO_PARTITION);
1267 
1268   bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1269   gcc_assert (changed);
1270 
1271   set_rtl (ssa, x);
1272   gcc_assert (DECL_RTL (parm) == x);
1273 }
1274 
1275 /* A subroutine of expand_one_var.  Called to immediately assign rtl
1276    to a variable to be allocated in the stack frame.  */
1277 
1278 static void
expand_one_stack_var_1(tree var)1279 expand_one_stack_var_1 (tree var)
1280 {
1281   HOST_WIDE_INT size, offset;
1282   unsigned byte_align;
1283 
1284   if (TREE_CODE (var) == SSA_NAME)
1285     {
1286       tree type = TREE_TYPE (var);
1287       size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1288       byte_align = TYPE_ALIGN_UNIT (type);
1289     }
1290   else
1291     {
1292       size = tree_to_uhwi (DECL_SIZE_UNIT (var));
1293       byte_align = align_local_variable (var);
1294     }
1295 
1296   /* We handle highly aligned variables in expand_stack_vars.  */
1297   gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1298 
1299   offset = alloc_stack_frame_space (size, byte_align);
1300 
1301   expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1302 			   crtl->max_used_stack_slot_alignment, offset);
1303 }
1304 
1305 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1306    already assigned some MEM.  */
1307 
1308 static void
expand_one_stack_var(tree var)1309 expand_one_stack_var (tree var)
1310 {
1311   if (TREE_CODE (var) == SSA_NAME)
1312     {
1313       int part = var_to_partition (SA.map, var);
1314       if (part != NO_PARTITION)
1315 	{
1316 	  rtx x = SA.partition_to_pseudo[part];
1317 	  gcc_assert (x);
1318 	  gcc_assert (MEM_P (x));
1319 	  return;
1320 	}
1321     }
1322 
1323   return expand_one_stack_var_1 (var);
1324 }
1325 
1326 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1327    that will reside in a hard register.  */
1328 
1329 static void
expand_one_hard_reg_var(tree var)1330 expand_one_hard_reg_var (tree var)
1331 {
1332   rest_of_decl_compilation (var, 0, 0);
1333 }
1334 
1335 /* Record the alignment requirements of some variable assigned to a
1336    pseudo.  */
1337 
1338 static void
record_alignment_for_reg_var(unsigned int align)1339 record_alignment_for_reg_var (unsigned int align)
1340 {
1341   if (SUPPORTS_STACK_ALIGNMENT
1342       && crtl->stack_alignment_estimated < align)
1343     {
1344       /* stack_alignment_estimated shouldn't change after stack
1345          realign decision made */
1346       gcc_assert (!crtl->stack_realign_processed);
1347       crtl->stack_alignment_estimated = align;
1348     }
1349 
1350   /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1351      So here we only make sure stack_alignment_needed >= align.  */
1352   if (crtl->stack_alignment_needed < align)
1353     crtl->stack_alignment_needed = align;
1354   if (crtl->max_used_stack_slot_alignment < align)
1355     crtl->max_used_stack_slot_alignment = align;
1356 }
1357 
1358 /* Create RTL for an SSA partition.  */
1359 
1360 static void
expand_one_ssa_partition(tree var)1361 expand_one_ssa_partition (tree var)
1362 {
1363   int part = var_to_partition (SA.map, var);
1364   gcc_assert (part != NO_PARTITION);
1365 
1366   if (SA.partition_to_pseudo[part])
1367     return;
1368 
1369   unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1370 					  TYPE_MODE (TREE_TYPE (var)),
1371 					  TYPE_ALIGN (TREE_TYPE (var)));
1372 
1373   /* If the variable alignment is very large we'll dynamicaly allocate
1374      it, which means that in-frame portion is just a pointer.  */
1375   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1376     align = POINTER_SIZE;
1377 
1378   record_alignment_for_reg_var (align);
1379 
1380   if (!use_register_for_decl (var))
1381     {
1382       if (defer_stack_allocation (var, true))
1383 	add_stack_var (var);
1384       else
1385 	expand_one_stack_var_1 (var);
1386       return;
1387     }
1388 
1389   machine_mode reg_mode = promote_ssa_mode (var, NULL);
1390 
1391   rtx x = gen_reg_rtx (reg_mode);
1392 
1393   set_rtl (var, x);
1394 }
1395 
1396 /* Record the association between the RTL generated for partition PART
1397    and the underlying variable of the SSA_NAME VAR.  */
1398 
1399 static void
adjust_one_expanded_partition_var(tree var)1400 adjust_one_expanded_partition_var (tree var)
1401 {
1402   if (!var)
1403     return;
1404 
1405   tree decl = SSA_NAME_VAR (var);
1406 
1407   int part = var_to_partition (SA.map, var);
1408   if (part == NO_PARTITION)
1409     return;
1410 
1411   rtx x = SA.partition_to_pseudo[part];
1412 
1413   gcc_assert (x);
1414 
1415   set_rtl (var, x);
1416 
1417   if (!REG_P (x))
1418     return;
1419 
1420   /* Note if the object is a user variable.  */
1421   if (decl && !DECL_ARTIFICIAL (decl))
1422     mark_user_reg (x);
1423 
1424   if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1425     mark_reg_pointer (x, get_pointer_alignment (var));
1426 }
1427 
1428 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1429    that will reside in a pseudo register.  */
1430 
1431 static void
expand_one_register_var(tree var)1432 expand_one_register_var (tree var)
1433 {
1434   if (TREE_CODE (var) == SSA_NAME)
1435     {
1436       int part = var_to_partition (SA.map, var);
1437       if (part != NO_PARTITION)
1438 	{
1439 	  rtx x = SA.partition_to_pseudo[part];
1440 	  gcc_assert (x);
1441 	  gcc_assert (REG_P (x));
1442 	  return;
1443 	}
1444       gcc_unreachable ();
1445     }
1446 
1447   tree decl = var;
1448   tree type = TREE_TYPE (decl);
1449   machine_mode reg_mode = promote_decl_mode (decl, NULL);
1450   rtx x = gen_reg_rtx (reg_mode);
1451 
1452   set_rtl (var, x);
1453 
1454   /* Note if the object is a user variable.  */
1455   if (!DECL_ARTIFICIAL (decl))
1456     mark_user_reg (x);
1457 
1458   if (POINTER_TYPE_P (type))
1459     mark_reg_pointer (x, get_pointer_alignment (var));
1460 }
1461 
1462 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
1463    has some associated error, e.g. its type is error-mark.  We just need
1464    to pick something that won't crash the rest of the compiler.  */
1465 
1466 static void
expand_one_error_var(tree var)1467 expand_one_error_var (tree var)
1468 {
1469   machine_mode mode = DECL_MODE (var);
1470   rtx x;
1471 
1472   if (mode == BLKmode)
1473     x = gen_rtx_MEM (BLKmode, const0_rtx);
1474   else if (mode == VOIDmode)
1475     x = const0_rtx;
1476   else
1477     x = gen_reg_rtx (mode);
1478 
1479   SET_DECL_RTL (var, x);
1480 }
1481 
1482 /* A subroutine of expand_one_var.  VAR is a variable that will be
1483    allocated to the local stack frame.  Return true if we wish to
1484    add VAR to STACK_VARS so that it will be coalesced with other
1485    variables.  Return false to allocate VAR immediately.
1486 
1487    This function is used to reduce the number of variables considered
1488    for coalescing, which reduces the size of the quadratic problem.  */
1489 
1490 static bool
defer_stack_allocation(tree var,bool toplevel)1491 defer_stack_allocation (tree var, bool toplevel)
1492 {
1493   tree size_unit = TREE_CODE (var) == SSA_NAME
1494     ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1495     : DECL_SIZE_UNIT (var);
1496 
1497   /* Whether the variable is small enough for immediate allocation not to be
1498      a problem with regard to the frame size.  */
1499   bool smallish
1500     = ((HOST_WIDE_INT) tree_to_uhwi (size_unit)
1501        < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1502 
1503   /* If stack protection is enabled, *all* stack variables must be deferred,
1504      so that we can re-order the strings to the top of the frame.
1505      Similarly for Address Sanitizer.  */
1506   if (flag_stack_protect || asan_sanitize_stack_p ())
1507     return true;
1508 
1509   unsigned int align = TREE_CODE (var) == SSA_NAME
1510     ? TYPE_ALIGN (TREE_TYPE (var))
1511     : DECL_ALIGN (var);
1512 
1513   /* We handle "large" alignment via dynamic allocation.  We want to handle
1514      this extra complication in only one place, so defer them.  */
1515   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1516     return true;
1517 
1518   bool ignored = TREE_CODE (var) == SSA_NAME
1519     ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1520     : DECL_IGNORED_P (var);
1521 
1522   /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1523      might be detached from their block and appear at toplevel when we reach
1524      here.  We want to coalesce them with variables from other blocks when
1525      the immediate contribution to the frame size would be noticeable.  */
1526   if (toplevel && optimize > 0 && ignored && !smallish)
1527     return true;
1528 
1529   /* Variables declared in the outermost scope automatically conflict
1530      with every other variable.  The only reason to want to defer them
1531      at all is that, after sorting, we can more efficiently pack
1532      small variables in the stack frame.  Continue to defer at -O2.  */
1533   if (toplevel && optimize < 2)
1534     return false;
1535 
1536   /* Without optimization, *most* variables are allocated from the
1537      stack, which makes the quadratic problem large exactly when we
1538      want compilation to proceed as quickly as possible.  On the
1539      other hand, we don't want the function's stack frame size to
1540      get completely out of hand.  So we avoid adding scalars and
1541      "small" aggregates to the list at all.  */
1542   if (optimize == 0 && smallish)
1543     return false;
1544 
1545   return true;
1546 }
1547 
1548 /* A subroutine of expand_used_vars.  Expand one variable according to
1549    its flavor.  Variables to be placed on the stack are not actually
1550    expanded yet, merely recorded.
1551    When REALLY_EXPAND is false, only add stack values to be allocated.
1552    Return stack usage this variable is supposed to take.
1553 */
1554 
1555 static HOST_WIDE_INT
expand_one_var(tree var,bool toplevel,bool really_expand)1556 expand_one_var (tree var, bool toplevel, bool really_expand)
1557 {
1558   unsigned int align = BITS_PER_UNIT;
1559   tree origvar = var;
1560 
1561   var = SSAVAR (var);
1562 
1563   if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1564     {
1565       if (is_global_var (var))
1566 	return 0;
1567 
1568       /* Because we don't know if VAR will be in register or on stack,
1569 	 we conservatively assume it will be on stack even if VAR is
1570 	 eventually put into register after RA pass.  For non-automatic
1571 	 variables, which won't be on stack, we collect alignment of
1572 	 type and ignore user specified alignment.  Similarly for
1573 	 SSA_NAMEs for which use_register_for_decl returns true.  */
1574       if (TREE_STATIC (var)
1575 	  || DECL_EXTERNAL (var)
1576 	  || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1577 	align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1578 				   TYPE_MODE (TREE_TYPE (var)),
1579 				   TYPE_ALIGN (TREE_TYPE (var)));
1580       else if (DECL_HAS_VALUE_EXPR_P (var)
1581 	       || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1582 	/* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1583 	   or variables which were assigned a stack slot already by
1584 	   expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1585 	   changed from the offset chosen to it.  */
1586 	align = crtl->stack_alignment_estimated;
1587       else
1588 	align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1589 
1590       /* If the variable alignment is very large we'll dynamicaly allocate
1591 	 it, which means that in-frame portion is just a pointer.  */
1592       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1593 	align = POINTER_SIZE;
1594     }
1595 
1596   record_alignment_for_reg_var (align);
1597 
1598   if (TREE_CODE (origvar) == SSA_NAME)
1599     {
1600       gcc_assert (TREE_CODE (var) != VAR_DECL
1601 		  || (!DECL_EXTERNAL (var)
1602 		      && !DECL_HAS_VALUE_EXPR_P (var)
1603 		      && !TREE_STATIC (var)
1604 		      && TREE_TYPE (var) != error_mark_node
1605 		      && !DECL_HARD_REGISTER (var)
1606 		      && really_expand));
1607     }
1608   if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1609     ;
1610   else if (DECL_EXTERNAL (var))
1611     ;
1612   else if (DECL_HAS_VALUE_EXPR_P (var))
1613     ;
1614   else if (TREE_STATIC (var))
1615     ;
1616   else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1617     ;
1618   else if (TREE_TYPE (var) == error_mark_node)
1619     {
1620       if (really_expand)
1621         expand_one_error_var (var);
1622     }
1623   else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1624     {
1625       if (really_expand)
1626 	{
1627 	  expand_one_hard_reg_var (var);
1628 	  if (!DECL_HARD_REGISTER (var))
1629 	    /* Invalid register specification.  */
1630 	    expand_one_error_var (var);
1631 	}
1632     }
1633   else if (use_register_for_decl (var))
1634     {
1635       if (really_expand)
1636         expand_one_register_var (origvar);
1637     }
1638   else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1639     {
1640       /* Reject variables which cover more than half of the address-space.  */
1641       if (really_expand)
1642 	{
1643 	  error ("size of variable %q+D is too large", var);
1644 	  expand_one_error_var (var);
1645 	}
1646     }
1647   else if (defer_stack_allocation (var, toplevel))
1648     add_stack_var (origvar);
1649   else
1650     {
1651       if (really_expand)
1652         {
1653           if (lookup_attribute ("naked",
1654                                 DECL_ATTRIBUTES (current_function_decl)))
1655             error ("cannot allocate stack for variable %q+D, naked function.",
1656                    var);
1657 
1658           expand_one_stack_var (origvar);
1659         }
1660 
1661 
1662       return tree_to_uhwi (DECL_SIZE_UNIT (var));
1663     }
1664   return 0;
1665 }
1666 
1667 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1668    expanding variables.  Those variables that can be put into registers
1669    are allocated pseudos; those that can't are put on the stack.
1670 
1671    TOPLEVEL is true if this is the outermost BLOCK.  */
1672 
1673 static void
expand_used_vars_for_block(tree block,bool toplevel)1674 expand_used_vars_for_block (tree block, bool toplevel)
1675 {
1676   tree t;
1677 
1678   /* Expand all variables at this level.  */
1679   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1680     if (TREE_USED (t)
1681         && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1682 	    || !DECL_NONSHAREABLE (t)))
1683       expand_one_var (t, toplevel, true);
1684 
1685   /* Expand all variables at containing levels.  */
1686   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1687     expand_used_vars_for_block (t, false);
1688 }
1689 
1690 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1691    and clear TREE_USED on all local variables.  */
1692 
1693 static void
clear_tree_used(tree block)1694 clear_tree_used (tree block)
1695 {
1696   tree t;
1697 
1698   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1699     /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1700     if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1701 	|| !DECL_NONSHAREABLE (t))
1702       TREE_USED (t) = 0;
1703 
1704   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1705     clear_tree_used (t);
1706 }
1707 
1708 enum {
1709   SPCT_FLAG_DEFAULT = 1,
1710   SPCT_FLAG_ALL = 2,
1711   SPCT_FLAG_STRONG = 3,
1712   SPCT_FLAG_EXPLICIT = 4
1713 };
1714 
1715 /* Examine TYPE and determine a bit mask of the following features.  */
1716 
1717 #define SPCT_HAS_LARGE_CHAR_ARRAY	1
1718 #define SPCT_HAS_SMALL_CHAR_ARRAY	2
1719 #define SPCT_HAS_ARRAY			4
1720 #define SPCT_HAS_AGGREGATE		8
1721 
1722 static unsigned int
stack_protect_classify_type(tree type)1723 stack_protect_classify_type (tree type)
1724 {
1725   unsigned int ret = 0;
1726   tree t;
1727 
1728   switch (TREE_CODE (type))
1729     {
1730     case ARRAY_TYPE:
1731       t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1732       if (t == char_type_node
1733 	  || t == signed_char_type_node
1734 	  || t == unsigned_char_type_node)
1735 	{
1736 	  unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1737 	  unsigned HOST_WIDE_INT len;
1738 
1739 	  if (!TYPE_SIZE_UNIT (type)
1740 	      || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1741 	    len = max;
1742 	  else
1743 	    len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1744 
1745 	  if (len < max)
1746 	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1747 	  else
1748 	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1749 	}
1750       else
1751 	ret = SPCT_HAS_ARRAY;
1752       break;
1753 
1754     case UNION_TYPE:
1755     case QUAL_UNION_TYPE:
1756     case RECORD_TYPE:
1757       ret = SPCT_HAS_AGGREGATE;
1758       for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1759 	if (TREE_CODE (t) == FIELD_DECL)
1760 	  ret |= stack_protect_classify_type (TREE_TYPE (t));
1761       break;
1762 
1763     default:
1764       break;
1765     }
1766 
1767   return ret;
1768 }
1769 
1770 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1771    part of the local stack frame.  Remember if we ever return nonzero for
1772    any variable in this function.  The return value is the phase number in
1773    which the variable should be allocated.  */
1774 
1775 static int
stack_protect_decl_phase(tree decl)1776 stack_protect_decl_phase (tree decl)
1777 {
1778   unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1779   int ret = 0;
1780 
1781   if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1782     has_short_buffer = true;
1783 
1784   if (flag_stack_protect == SPCT_FLAG_ALL
1785       || flag_stack_protect == SPCT_FLAG_STRONG
1786       || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1787 	  && lookup_attribute ("stack_protect",
1788 			       DECL_ATTRIBUTES (current_function_decl))))
1789     {
1790       if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1791 	  && !(bits & SPCT_HAS_AGGREGATE))
1792 	ret = 1;
1793       else if (bits & SPCT_HAS_ARRAY)
1794 	ret = 2;
1795     }
1796   else
1797     ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1798 
1799   if (ret)
1800     has_protected_decls = true;
1801 
1802   return ret;
1803 }
1804 
1805 /* Two helper routines that check for phase 1 and phase 2.  These are used
1806    as callbacks for expand_stack_vars.  */
1807 
1808 static bool
stack_protect_decl_phase_1(size_t i)1809 stack_protect_decl_phase_1 (size_t i)
1810 {
1811   return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1812 }
1813 
1814 static bool
stack_protect_decl_phase_2(size_t i)1815 stack_protect_decl_phase_2 (size_t i)
1816 {
1817   return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1818 }
1819 
1820 /* And helper function that checks for asan phase (with stack protector
1821    it is phase 3).  This is used as callback for expand_stack_vars.
1822    Returns true if any of the vars in the partition need to be protected.  */
1823 
1824 static bool
asan_decl_phase_3(size_t i)1825 asan_decl_phase_3 (size_t i)
1826 {
1827   while (i != EOC)
1828     {
1829       if (asan_protect_stack_decl (stack_vars[i].decl))
1830 	return true;
1831       i = stack_vars[i].next;
1832     }
1833   return false;
1834 }
1835 
1836 /* Ensure that variables in different stack protection phases conflict
1837    so that they are not merged and share the same stack slot.  */
1838 
1839 static void
add_stack_protection_conflicts(void)1840 add_stack_protection_conflicts (void)
1841 {
1842   size_t i, j, n = stack_vars_num;
1843   unsigned char *phase;
1844 
1845   phase = XNEWVEC (unsigned char, n);
1846   for (i = 0; i < n; ++i)
1847     phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1848 
1849   for (i = 0; i < n; ++i)
1850     {
1851       unsigned char ph_i = phase[i];
1852       for (j = i + 1; j < n; ++j)
1853 	if (ph_i != phase[j])
1854 	  add_stack_var_conflict (i, j);
1855     }
1856 
1857   XDELETEVEC (phase);
1858 }
1859 
1860 /* Create a decl for the guard at the top of the stack frame.  */
1861 
1862 static void
create_stack_guard(void)1863 create_stack_guard (void)
1864 {
1865   tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1866 			   VAR_DECL, NULL, ptr_type_node);
1867   TREE_THIS_VOLATILE (guard) = 1;
1868   TREE_USED (guard) = 1;
1869   expand_one_stack_var (guard);
1870   crtl->stack_protect_guard = guard;
1871 }
1872 
1873 /* Prepare for expanding variables.  */
1874 static void
init_vars_expansion(void)1875 init_vars_expansion (void)
1876 {
1877   /* Conflict bitmaps, and a few related temporary bitmaps, go here.  */
1878   bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1879 
1880   /* A map from decl to stack partition.  */
1881   decl_to_stack_part = new hash_map<tree, size_t>;
1882 
1883   /* Initialize local stack smashing state.  */
1884   has_protected_decls = false;
1885   has_short_buffer = false;
1886 }
1887 
1888 /* Free up stack variable graph data.  */
1889 static void
fini_vars_expansion(void)1890 fini_vars_expansion (void)
1891 {
1892   bitmap_obstack_release (&stack_var_bitmap_obstack);
1893   if (stack_vars)
1894     XDELETEVEC (stack_vars);
1895   if (stack_vars_sorted)
1896     XDELETEVEC (stack_vars_sorted);
1897   stack_vars = NULL;
1898   stack_vars_sorted = NULL;
1899   stack_vars_alloc = stack_vars_num = 0;
1900   delete decl_to_stack_part;
1901   decl_to_stack_part = NULL;
1902 }
1903 
1904 /* Make a fair guess for the size of the stack frame of the function
1905    in NODE.  This doesn't have to be exact, the result is only used in
1906    the inline heuristics.  So we don't want to run the full stack var
1907    packing algorithm (which is quadratic in the number of stack vars).
1908    Instead, we calculate the total size of all stack vars.  This turns
1909    out to be a pretty fair estimate -- packing of stack vars doesn't
1910    happen very often.  */
1911 
1912 HOST_WIDE_INT
estimated_stack_frame_size(struct cgraph_node * node)1913 estimated_stack_frame_size (struct cgraph_node *node)
1914 {
1915   HOST_WIDE_INT size = 0;
1916   size_t i;
1917   tree var;
1918   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1919 
1920   push_cfun (fn);
1921 
1922   init_vars_expansion ();
1923 
1924   FOR_EACH_LOCAL_DECL (fn, i, var)
1925     if (auto_var_in_fn_p (var, fn->decl))
1926       size += expand_one_var (var, true, false);
1927 
1928   if (stack_vars_num > 0)
1929     {
1930       /* Fake sorting the stack vars for account_stack_vars ().  */
1931       stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1932       for (i = 0; i < stack_vars_num; ++i)
1933 	stack_vars_sorted[i] = i;
1934       size += account_stack_vars ();
1935     }
1936 
1937   fini_vars_expansion ();
1938   pop_cfun ();
1939   return size;
1940 }
1941 
1942 /* Helper routine to check if a record or union contains an array field. */
1943 
1944 static int
record_or_union_type_has_array_p(const_tree tree_type)1945 record_or_union_type_has_array_p (const_tree tree_type)
1946 {
1947   tree fields = TYPE_FIELDS (tree_type);
1948   tree f;
1949 
1950   for (f = fields; f; f = DECL_CHAIN (f))
1951     if (TREE_CODE (f) == FIELD_DECL)
1952       {
1953 	tree field_type = TREE_TYPE (f);
1954 	if (RECORD_OR_UNION_TYPE_P (field_type)
1955 	    && record_or_union_type_has_array_p (field_type))
1956 	  return 1;
1957 	if (TREE_CODE (field_type) == ARRAY_TYPE)
1958 	  return 1;
1959       }
1960   return 0;
1961 }
1962 
1963 /* Check if the current function has local referenced variables that
1964    have their addresses taken, contain an array, or are arrays.  */
1965 
1966 static bool
stack_protect_decl_p()1967 stack_protect_decl_p ()
1968 {
1969   unsigned i;
1970   tree var;
1971 
1972   FOR_EACH_LOCAL_DECL (cfun, i, var)
1973     if (!is_global_var (var))
1974       {
1975 	tree var_type = TREE_TYPE (var);
1976 	if (TREE_CODE (var) == VAR_DECL
1977 	    && (TREE_CODE (var_type) == ARRAY_TYPE
1978 		|| TREE_ADDRESSABLE (var)
1979 		|| (RECORD_OR_UNION_TYPE_P (var_type)
1980 		    && record_or_union_type_has_array_p (var_type))))
1981 	  return true;
1982       }
1983   return false;
1984 }
1985 
1986 /* Check if the current function has calls that use a return slot.  */
1987 
1988 static bool
stack_protect_return_slot_p()1989 stack_protect_return_slot_p ()
1990 {
1991   basic_block bb;
1992 
1993   FOR_ALL_BB_FN (bb, cfun)
1994     for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1995 	 !gsi_end_p (gsi); gsi_next (&gsi))
1996       {
1997 	gimple *stmt = gsi_stmt (gsi);
1998 	/* This assumes that calls to internal-only functions never
1999 	   use a return slot.  */
2000 	if (is_gimple_call (stmt)
2001 	    && !gimple_call_internal_p (stmt)
2002 	    && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2003 				  gimple_call_fndecl (stmt)))
2004 	  return true;
2005       }
2006   return false;
2007 }
2008 
2009 /* Expand all variables used in the function.  */
2010 
2011 static rtx_insn *
expand_used_vars(void)2012 expand_used_vars (void)
2013 {
2014   tree var, outer_block = DECL_INITIAL (current_function_decl);
2015   vec<tree> maybe_local_decls = vNULL;
2016   rtx_insn *var_end_seq = NULL;
2017   unsigned i;
2018   unsigned len;
2019   bool gen_stack_protect_signal = false;
2020 
2021   /* Compute the phase of the stack frame for this function.  */
2022   {
2023     int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2024     int off = STARTING_FRAME_OFFSET % align;
2025     frame_phase = off ? align - off : 0;
2026   }
2027 
2028   /* Set TREE_USED on all variables in the local_decls.  */
2029   FOR_EACH_LOCAL_DECL (cfun, i, var)
2030     TREE_USED (var) = 1;
2031   /* Clear TREE_USED on all variables associated with a block scope.  */
2032   clear_tree_used (DECL_INITIAL (current_function_decl));
2033 
2034   init_vars_expansion ();
2035 
2036   if (targetm.use_pseudo_pic_reg ())
2037     pic_offset_table_rtx = gen_reg_rtx (Pmode);
2038 
2039   for (i = 0; i < SA.map->num_partitions; i++)
2040     {
2041       if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2042 	continue;
2043 
2044       tree var = partition_to_var (SA.map, i);
2045 
2046       gcc_assert (!virtual_operand_p (var));
2047 
2048       expand_one_ssa_partition (var);
2049     }
2050 
2051   if (flag_stack_protect == SPCT_FLAG_STRONG)
2052       gen_stack_protect_signal
2053 	= stack_protect_decl_p () || stack_protect_return_slot_p ();
2054 
2055   /* At this point all variables on the local_decls with TREE_USED
2056      set are not associated with any block scope.  Lay them out.  */
2057 
2058   len = vec_safe_length (cfun->local_decls);
2059   FOR_EACH_LOCAL_DECL (cfun, i, var)
2060     {
2061       bool expand_now = false;
2062 
2063       /* Expanded above already.  */
2064       if (is_gimple_reg (var))
2065 	{
2066 	  TREE_USED (var) = 0;
2067 	  goto next;
2068 	}
2069       /* We didn't set a block for static or extern because it's hard
2070 	 to tell the difference between a global variable (re)declared
2071 	 in a local scope, and one that's really declared there to
2072 	 begin with.  And it doesn't really matter much, since we're
2073 	 not giving them stack space.  Expand them now.  */
2074       else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2075 	expand_now = true;
2076 
2077       /* Expand variables not associated with any block now.  Those created by
2078 	 the optimizers could be live anywhere in the function.  Those that
2079 	 could possibly have been scoped originally and detached from their
2080 	 block will have their allocation deferred so we coalesce them with
2081 	 others when optimization is enabled.  */
2082       else if (TREE_USED (var))
2083 	expand_now = true;
2084 
2085       /* Finally, mark all variables on the list as used.  We'll use
2086 	 this in a moment when we expand those associated with scopes.  */
2087       TREE_USED (var) = 1;
2088 
2089       if (expand_now)
2090 	expand_one_var (var, true, true);
2091 
2092     next:
2093       if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2094 	{
2095 	  rtx rtl = DECL_RTL_IF_SET (var);
2096 
2097 	  /* Keep artificial non-ignored vars in cfun->local_decls
2098 	     chain until instantiate_decls.  */
2099 	  if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2100 	    add_local_decl (cfun, var);
2101 	  else if (rtl == NULL_RTX)
2102 	    /* If rtl isn't set yet, which can happen e.g. with
2103 	       -fstack-protector, retry before returning from this
2104 	       function.  */
2105 	    maybe_local_decls.safe_push (var);
2106 	}
2107     }
2108 
2109   /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2110 
2111      +-----------------+-----------------+
2112      | ...processed... | ...duplicates...|
2113      +-----------------+-----------------+
2114                        ^
2115 		       +-- LEN points here.
2116 
2117      We just want the duplicates, as those are the artificial
2118      non-ignored vars that we want to keep until instantiate_decls.
2119      Move them down and truncate the array.  */
2120   if (!vec_safe_is_empty (cfun->local_decls))
2121     cfun->local_decls->block_remove (0, len);
2122 
2123   /* At this point, all variables within the block tree with TREE_USED
2124      set are actually used by the optimized function.  Lay them out.  */
2125   expand_used_vars_for_block (outer_block, true);
2126 
2127   if (stack_vars_num > 0)
2128     {
2129       add_scope_conflicts ();
2130 
2131       /* If stack protection is enabled, we don't share space between
2132 	 vulnerable data and non-vulnerable data.  */
2133       if (flag_stack_protect != 0
2134 	  && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2135 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2136 		  && lookup_attribute ("stack_protect",
2137 				       DECL_ATTRIBUTES (current_function_decl)))))
2138 	add_stack_protection_conflicts ();
2139 
2140       /* Now that we have collected all stack variables, and have computed a
2141 	 minimal interference graph, attempt to save some stack space.  */
2142       partition_stack_vars ();
2143       if (dump_file)
2144 	dump_stack_var_partition ();
2145     }
2146 
2147   switch (flag_stack_protect)
2148     {
2149     case SPCT_FLAG_ALL:
2150       create_stack_guard ();
2151       break;
2152 
2153     case SPCT_FLAG_STRONG:
2154       if (gen_stack_protect_signal
2155 	  || cfun->calls_alloca || has_protected_decls
2156 	  || lookup_attribute ("stack_protect",
2157 			       DECL_ATTRIBUTES (current_function_decl)))
2158 	create_stack_guard ();
2159       break;
2160 
2161     case SPCT_FLAG_DEFAULT:
2162       if (cfun->calls_alloca || has_protected_decls
2163 	  || lookup_attribute ("stack_protect",
2164 			       DECL_ATTRIBUTES (current_function_decl)))
2165 	create_stack_guard ();
2166       break;
2167 
2168     case SPCT_FLAG_EXPLICIT:
2169       if (lookup_attribute ("stack_protect",
2170 			    DECL_ATTRIBUTES (current_function_decl)))
2171 	create_stack_guard ();
2172       break;
2173     default:
2174       ;
2175     }
2176 
2177   /* Assign rtl to each variable based on these partitions.  */
2178   if (stack_vars_num > 0)
2179     {
2180       struct stack_vars_data data;
2181 
2182       data.asan_vec = vNULL;
2183       data.asan_decl_vec = vNULL;
2184       data.asan_base = NULL_RTX;
2185       data.asan_alignb = 0;
2186 
2187       /* Reorder decls to be protected by iterating over the variables
2188 	 array multiple times, and allocating out of each phase in turn.  */
2189       /* ??? We could probably integrate this into the qsort we did
2190 	 earlier, such that we naturally see these variables first,
2191 	 and thus naturally allocate things in the right order.  */
2192       if (has_protected_decls)
2193 	{
2194 	  /* Phase 1 contains only character arrays.  */
2195 	  expand_stack_vars (stack_protect_decl_phase_1, &data);
2196 
2197 	  /* Phase 2 contains other kinds of arrays.  */
2198 	  if (flag_stack_protect == SPCT_FLAG_ALL
2199 	      || flag_stack_protect == SPCT_FLAG_STRONG
2200 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2201 		  && lookup_attribute ("stack_protect",
2202 				       DECL_ATTRIBUTES (current_function_decl))))
2203 	    expand_stack_vars (stack_protect_decl_phase_2, &data);
2204 	}
2205 
2206       if (asan_sanitize_stack_p ())
2207 	/* Phase 3, any partitions that need asan protection
2208 	   in addition to phase 1 and 2.  */
2209 	expand_stack_vars (asan_decl_phase_3, &data);
2210 
2211       if (!data.asan_vec.is_empty ())
2212 	{
2213 	  HOST_WIDE_INT prev_offset = frame_offset;
2214 	  HOST_WIDE_INT offset, sz, redzonesz;
2215 	  redzonesz = ASAN_RED_ZONE_SIZE;
2216 	  sz = data.asan_vec[0] - prev_offset;
2217 	  if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2218 	      && data.asan_alignb <= 4096
2219 	      && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2220 	    redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2221 			 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2222 	  offset
2223 	    = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
2224 	  data.asan_vec.safe_push (prev_offset);
2225 	  data.asan_vec.safe_push (offset);
2226 	  /* Leave space for alignment if STRICT_ALIGNMENT.  */
2227 	  if (STRICT_ALIGNMENT)
2228 	    alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2229 				      << ASAN_SHADOW_SHIFT)
2230 				     / BITS_PER_UNIT, 1);
2231 
2232 	  var_end_seq
2233 	    = asan_emit_stack_protection (virtual_stack_vars_rtx,
2234 					  data.asan_base,
2235 					  data.asan_alignb,
2236 					  data.asan_vec.address (),
2237 					  data.asan_decl_vec.address (),
2238 					  data.asan_vec.length ());
2239 	}
2240 
2241       expand_stack_vars (NULL, &data);
2242 
2243       data.asan_vec.release ();
2244       data.asan_decl_vec.release ();
2245     }
2246 
2247   fini_vars_expansion ();
2248 
2249   /* If there were any artificial non-ignored vars without rtl
2250      found earlier, see if deferred stack allocation hasn't assigned
2251      rtl to them.  */
2252   FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2253     {
2254       rtx rtl = DECL_RTL_IF_SET (var);
2255 
2256       /* Keep artificial non-ignored vars in cfun->local_decls
2257 	 chain until instantiate_decls.  */
2258       if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2259 	add_local_decl (cfun, var);
2260     }
2261   maybe_local_decls.release ();
2262 
2263   /* If the target requires that FRAME_OFFSET be aligned, do it.  */
2264   if (STACK_ALIGNMENT_NEEDED)
2265     {
2266       HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2267       if (!FRAME_GROWS_DOWNWARD)
2268 	frame_offset += align - 1;
2269       frame_offset &= -align;
2270     }
2271 
2272   return var_end_seq;
2273 }
2274 
2275 
2276 /* If we need to produce a detailed dump, print the tree representation
2277    for STMT to the dump file.  SINCE is the last RTX after which the RTL
2278    generated for STMT should have been appended.  */
2279 
2280 static void
maybe_dump_rtl_for_gimple_stmt(gimple * stmt,rtx_insn * since)2281 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2282 {
2283   if (dump_file && (dump_flags & TDF_DETAILS))
2284     {
2285       fprintf (dump_file, "\n;; ");
2286       print_gimple_stmt (dump_file, stmt, 0,
2287 			 TDF_SLIM | (dump_flags & TDF_LINENO));
2288       fprintf (dump_file, "\n");
2289 
2290       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2291     }
2292 }
2293 
2294 /* Maps the blocks that do not contain tree labels to rtx labels.  */
2295 
2296 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2297 
2298 /* Returns the label_rtx expression for a label starting basic block BB.  */
2299 
2300 static rtx_code_label *
label_rtx_for_bb(basic_block bb ATTRIBUTE_UNUSED)2301 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2302 {
2303   gimple_stmt_iterator gsi;
2304   tree lab;
2305 
2306   if (bb->flags & BB_RTL)
2307     return block_label (bb);
2308 
2309   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2310   if (elt)
2311     return *elt;
2312 
2313   /* Find the tree label if it is present.  */
2314 
2315   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2316     {
2317       glabel *lab_stmt;
2318 
2319       lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2320       if (!lab_stmt)
2321 	break;
2322 
2323       lab = gimple_label_label (lab_stmt);
2324       if (DECL_NONLOCAL (lab))
2325 	break;
2326 
2327       return jump_target_rtx (lab);
2328     }
2329 
2330   rtx_code_label *l = gen_label_rtx ();
2331   lab_rtx_for_bb->put (bb, l);
2332   return l;
2333 }
2334 
2335 
2336 /* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
2337    of a basic block where we just expanded the conditional at the end,
2338    possibly clean up the CFG and instruction sequence.  LAST is the
2339    last instruction before the just emitted jump sequence.  */
2340 
2341 static void
maybe_cleanup_end_of_block(edge e,rtx_insn * last)2342 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2343 {
2344   /* Special case: when jumpif decides that the condition is
2345      trivial it emits an unconditional jump (and the necessary
2346      barrier).  But we still have two edges, the fallthru one is
2347      wrong.  purge_dead_edges would clean this up later.  Unfortunately
2348      we have to insert insns (and split edges) before
2349      find_many_sub_basic_blocks and hence before purge_dead_edges.
2350      But splitting edges might create new blocks which depend on the
2351      fact that if there are two edges there's no barrier.  So the
2352      barrier would get lost and verify_flow_info would ICE.  Instead
2353      of auditing all edge splitters to care for the barrier (which
2354      normally isn't there in a cleaned CFG), fix it here.  */
2355   if (BARRIER_P (get_last_insn ()))
2356     {
2357       rtx_insn *insn;
2358       remove_edge (e);
2359       /* Now, we have a single successor block, if we have insns to
2360 	 insert on the remaining edge we potentially will insert
2361 	 it at the end of this block (if the dest block isn't feasible)
2362 	 in order to avoid splitting the edge.  This insertion will take
2363 	 place in front of the last jump.  But we might have emitted
2364 	 multiple jumps (conditional and one unconditional) to the
2365 	 same destination.  Inserting in front of the last one then
2366 	 is a problem.  See PR 40021.  We fix this by deleting all
2367 	 jumps except the last unconditional one.  */
2368       insn = PREV_INSN (get_last_insn ());
2369       /* Make sure we have an unconditional jump.  Otherwise we're
2370 	 confused.  */
2371       gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2372       for (insn = PREV_INSN (insn); insn != last;)
2373 	{
2374 	  insn = PREV_INSN (insn);
2375 	  if (JUMP_P (NEXT_INSN (insn)))
2376 	    {
2377 	      if (!any_condjump_p (NEXT_INSN (insn)))
2378 		{
2379 		  gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2380 		  delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2381 		}
2382 	      delete_insn (NEXT_INSN (insn));
2383 	    }
2384 	}
2385     }
2386 }
2387 
2388 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
2389    Returns a new basic block if we've terminated the current basic
2390    block and created a new one.  */
2391 
2392 static basic_block
expand_gimple_cond(basic_block bb,gcond * stmt)2393 expand_gimple_cond (basic_block bb, gcond *stmt)
2394 {
2395   basic_block new_bb, dest;
2396   edge new_edge;
2397   edge true_edge;
2398   edge false_edge;
2399   rtx_insn *last2, *last;
2400   enum tree_code code;
2401   tree op0, op1;
2402 
2403   code = gimple_cond_code (stmt);
2404   op0 = gimple_cond_lhs (stmt);
2405   op1 = gimple_cond_rhs (stmt);
2406   /* We're sometimes presented with such code:
2407        D.123_1 = x < y;
2408        if (D.123_1 != 0)
2409          ...
2410      This would expand to two comparisons which then later might
2411      be cleaned up by combine.  But some pattern matchers like if-conversion
2412      work better when there's only one compare, so make up for this
2413      here as special exception if TER would have made the same change.  */
2414   if (SA.values
2415       && TREE_CODE (op0) == SSA_NAME
2416       && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2417       && TREE_CODE (op1) == INTEGER_CST
2418       && ((gimple_cond_code (stmt) == NE_EXPR
2419 	   && integer_zerop (op1))
2420 	  || (gimple_cond_code (stmt) == EQ_EXPR
2421 	      && integer_onep (op1)))
2422       && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2423     {
2424       gimple *second = SSA_NAME_DEF_STMT (op0);
2425       if (gimple_code (second) == GIMPLE_ASSIGN)
2426 	{
2427 	  enum tree_code code2 = gimple_assign_rhs_code (second);
2428 	  if (TREE_CODE_CLASS (code2) == tcc_comparison)
2429 	    {
2430 	      code = code2;
2431 	      op0 = gimple_assign_rhs1 (second);
2432 	      op1 = gimple_assign_rhs2 (second);
2433 	    }
2434 	  /* If jumps are cheap and the target does not support conditional
2435 	     compare, turn some more codes into jumpy sequences.  */
2436 	  else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2437 		   && targetm.gen_ccmp_first == NULL)
2438 	    {
2439 	      if ((code2 == BIT_AND_EXPR
2440 		   && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2441 		   && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2442 		  || code2 == TRUTH_AND_EXPR)
2443 		{
2444 		  code = TRUTH_ANDIF_EXPR;
2445 		  op0 = gimple_assign_rhs1 (second);
2446 		  op1 = gimple_assign_rhs2 (second);
2447 		}
2448 	      else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2449 		{
2450 		  code = TRUTH_ORIF_EXPR;
2451 		  op0 = gimple_assign_rhs1 (second);
2452 		  op1 = gimple_assign_rhs2 (second);
2453 		}
2454 	    }
2455 	}
2456     }
2457 
2458   last2 = last = get_last_insn ();
2459 
2460   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2461   set_curr_insn_location (gimple_location (stmt));
2462 
2463   /* These flags have no purpose in RTL land.  */
2464   true_edge->flags &= ~EDGE_TRUE_VALUE;
2465   false_edge->flags &= ~EDGE_FALSE_VALUE;
2466 
2467   /* We can either have a pure conditional jump with one fallthru edge or
2468      two-way jump that needs to be decomposed into two basic blocks.  */
2469   if (false_edge->dest == bb->next_bb)
2470     {
2471       jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2472 		true_edge->probability);
2473       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2474       if (true_edge->goto_locus != UNKNOWN_LOCATION)
2475 	set_curr_insn_location (true_edge->goto_locus);
2476       false_edge->flags |= EDGE_FALLTHRU;
2477       maybe_cleanup_end_of_block (false_edge, last);
2478       return NULL;
2479     }
2480   if (true_edge->dest == bb->next_bb)
2481     {
2482       jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2483 		   false_edge->probability);
2484       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2485       if (false_edge->goto_locus != UNKNOWN_LOCATION)
2486 	set_curr_insn_location (false_edge->goto_locus);
2487       true_edge->flags |= EDGE_FALLTHRU;
2488       maybe_cleanup_end_of_block (true_edge, last);
2489       return NULL;
2490     }
2491 
2492   jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2493 	    true_edge->probability);
2494   last = get_last_insn ();
2495   if (false_edge->goto_locus != UNKNOWN_LOCATION)
2496     set_curr_insn_location (false_edge->goto_locus);
2497   emit_jump (label_rtx_for_bb (false_edge->dest));
2498 
2499   BB_END (bb) = last;
2500   if (BARRIER_P (BB_END (bb)))
2501     BB_END (bb) = PREV_INSN (BB_END (bb));
2502   update_bb_for_insn (bb);
2503 
2504   new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2505   dest = false_edge->dest;
2506   redirect_edge_succ (false_edge, new_bb);
2507   false_edge->flags |= EDGE_FALLTHRU;
2508   new_bb->count = false_edge->count;
2509   new_bb->frequency = EDGE_FREQUENCY (false_edge);
2510   add_bb_to_loop (new_bb, bb->loop_father);
2511   new_edge = make_edge (new_bb, dest, 0);
2512   new_edge->probability = REG_BR_PROB_BASE;
2513   new_edge->count = new_bb->count;
2514   if (BARRIER_P (BB_END (new_bb)))
2515     BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2516   update_bb_for_insn (new_bb);
2517 
2518   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2519 
2520   if (true_edge->goto_locus != UNKNOWN_LOCATION)
2521     {
2522       set_curr_insn_location (true_edge->goto_locus);
2523       true_edge->goto_locus = curr_insn_location ();
2524     }
2525 
2526   return new_bb;
2527 }
2528 
2529 /* Mark all calls that can have a transaction restart.  */
2530 
2531 static void
mark_transaction_restart_calls(gimple * stmt)2532 mark_transaction_restart_calls (gimple *stmt)
2533 {
2534   struct tm_restart_node dummy;
2535   tm_restart_node **slot;
2536 
2537   if (!cfun->gimple_df->tm_restart)
2538     return;
2539 
2540   dummy.stmt = stmt;
2541   slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2542   if (slot)
2543     {
2544       struct tm_restart_node *n = *slot;
2545       tree list = n->label_or_list;
2546       rtx_insn *insn;
2547 
2548       for (insn = next_real_insn (get_last_insn ());
2549 	   !CALL_P (insn);
2550 	   insn = next_real_insn (insn))
2551 	continue;
2552 
2553       if (TREE_CODE (list) == LABEL_DECL)
2554 	add_reg_note (insn, REG_TM, label_rtx (list));
2555       else
2556 	for (; list ; list = TREE_CHAIN (list))
2557 	  add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2558     }
2559 }
2560 
2561 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2562    statement STMT.  */
2563 
2564 static void
expand_call_stmt(gcall * stmt)2565 expand_call_stmt (gcall *stmt)
2566 {
2567   tree exp, decl, lhs;
2568   bool builtin_p;
2569   size_t i;
2570 
2571   if (gimple_call_internal_p (stmt))
2572     {
2573       expand_internal_call (stmt);
2574       return;
2575     }
2576 
2577   /* If this is a call to a built-in function and it has no effect other
2578      than setting the lhs, try to implement it using an internal function
2579      instead.  */
2580   decl = gimple_call_fndecl (stmt);
2581   if (gimple_call_lhs (stmt)
2582       && !gimple_has_side_effects (stmt)
2583       && (optimize || (decl && called_as_built_in (decl))))
2584     {
2585       internal_fn ifn = replacement_internal_fn (stmt);
2586       if (ifn != IFN_LAST)
2587 	{
2588 	  expand_internal_call (ifn, stmt);
2589 	  return;
2590 	}
2591     }
2592 
2593   exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2594 
2595   CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2596   builtin_p = decl && DECL_BUILT_IN (decl);
2597 
2598   /* If this is not a builtin function, the function type through which the
2599      call is made may be different from the type of the function.  */
2600   if (!builtin_p)
2601     CALL_EXPR_FN (exp)
2602       = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2603 		      CALL_EXPR_FN (exp));
2604 
2605   TREE_TYPE (exp) = gimple_call_return_type (stmt);
2606   CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2607 
2608   for (i = 0; i < gimple_call_num_args (stmt); i++)
2609     {
2610       tree arg = gimple_call_arg (stmt, i);
2611       gimple *def;
2612       /* TER addresses into arguments of builtin functions so we have a
2613 	 chance to infer more correct alignment information.  See PR39954.  */
2614       if (builtin_p
2615 	  && TREE_CODE (arg) == SSA_NAME
2616 	  && (def = get_gimple_for_ssa_name (arg))
2617 	  && gimple_assign_rhs_code (def) == ADDR_EXPR)
2618 	arg = gimple_assign_rhs1 (def);
2619       CALL_EXPR_ARG (exp, i) = arg;
2620     }
2621 
2622   if (gimple_has_side_effects (stmt))
2623     TREE_SIDE_EFFECTS (exp) = 1;
2624 
2625   if (gimple_call_nothrow_p (stmt))
2626     TREE_NOTHROW (exp) = 1;
2627 
2628   CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2629   CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2630   if (decl
2631       && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2632       && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2633 	  || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2634     CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2635   else
2636     CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2637   CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2638   SET_EXPR_LOCATION (exp, gimple_location (stmt));
2639   CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2640 
2641   /* Ensure RTL is created for debug args.  */
2642   if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2643     {
2644       vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2645       unsigned int ix;
2646       tree dtemp;
2647 
2648       if (debug_args)
2649 	for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2650 	  {
2651 	    gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2652 	    expand_debug_expr (dtemp);
2653 	  }
2654     }
2655 
2656   lhs = gimple_call_lhs (stmt);
2657   if (lhs)
2658     expand_assignment (lhs, exp, false);
2659   else
2660     expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2661 
2662   mark_transaction_restart_calls (stmt);
2663 }
2664 
2665 
2666 /* Generate RTL for an asm statement (explicit assembler code).
2667    STRING is a STRING_CST node containing the assembler code text,
2668    or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
2669    insn is volatile; don't optimize it.  */
2670 
2671 static void
expand_asm_loc(tree string,int vol,location_t locus)2672 expand_asm_loc (tree string, int vol, location_t locus)
2673 {
2674   rtx body;
2675 
2676   if (TREE_CODE (string) == ADDR_EXPR)
2677     string = TREE_OPERAND (string, 0);
2678 
2679   body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2680 				ggc_strdup (TREE_STRING_POINTER (string)),
2681 				locus);
2682 
2683   MEM_VOLATILE_P (body) = vol;
2684 
2685   emit_insn (body);
2686 }
2687 
2688 /* Return the number of times character C occurs in string S.  */
2689 static int
n_occurrences(int c,const char * s)2690 n_occurrences (int c, const char *s)
2691 {
2692   int n = 0;
2693   while (*s)
2694     n += (*s++ == c);
2695   return n;
2696 }
2697 
2698 /* A subroutine of expand_asm_operands.  Check that all operands have
2699    the same number of alternatives.  Return true if so.  */
2700 
2701 static bool
check_operand_nalternatives(const vec<const char * > & constraints)2702 check_operand_nalternatives (const vec<const char *> &constraints)
2703 {
2704   unsigned len = constraints.length();
2705   if (len > 0)
2706     {
2707       int nalternatives = n_occurrences (',', constraints[0]);
2708 
2709       if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2710 	{
2711 	  error ("too many alternatives in %<asm%>");
2712 	  return false;
2713 	}
2714 
2715       for (unsigned i = 1; i < len; ++i)
2716 	if (n_occurrences (',', constraints[i]) != nalternatives)
2717 	  {
2718 	    error ("operand constraints for %<asm%> differ "
2719 		   "in number of alternatives");
2720 	    return false;
2721 	  }
2722     }
2723   return true;
2724 }
2725 
2726 /* Check for overlap between registers marked in CLOBBERED_REGS and
2727    anything inappropriate in T.  Emit error and return the register
2728    variable definition for error, NULL_TREE for ok.  */
2729 
2730 static bool
tree_conflicts_with_clobbers_p(tree t,HARD_REG_SET * clobbered_regs)2731 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2732 {
2733   /* Conflicts between asm-declared register variables and the clobber
2734      list are not allowed.  */
2735   tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2736 
2737   if (overlap)
2738     {
2739       error ("asm-specifier for variable %qE conflicts with asm clobber list",
2740 	     DECL_NAME (overlap));
2741 
2742       /* Reset registerness to stop multiple errors emitted for a single
2743 	 variable.  */
2744       DECL_REGISTER (overlap) = 0;
2745       return true;
2746     }
2747 
2748   return false;
2749 }
2750 
2751 /* Generate RTL for an asm statement with arguments.
2752    STRING is the instruction template.
2753    OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2754    Each output or input has an expression in the TREE_VALUE and
2755    a tree list in TREE_PURPOSE which in turn contains a constraint
2756    name in TREE_VALUE (or NULL_TREE) and a constraint string
2757    in TREE_PURPOSE.
2758    CLOBBERS is a list of STRING_CST nodes each naming a hard register
2759    that is clobbered by this insn.
2760 
2761    LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2762    should be the fallthru basic block of the asm goto.
2763 
2764    Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2765    Some elements of OUTPUTS may be replaced with trees representing temporary
2766    values.  The caller should copy those temporary values to the originally
2767    specified lvalues.
2768 
2769    VOL nonzero means the insn is volatile; don't optimize it.  */
2770 
2771 static void
expand_asm_stmt(gasm * stmt)2772 expand_asm_stmt (gasm *stmt)
2773 {
2774   class save_input_location
2775   {
2776     location_t old;
2777 
2778   public:
2779     explicit save_input_location(location_t where)
2780     {
2781       old = input_location;
2782       input_location = where;
2783     }
2784 
2785     ~save_input_location()
2786     {
2787       input_location = old;
2788     }
2789   };
2790 
2791   location_t locus = gimple_location (stmt);
2792 
2793   if (gimple_asm_input_p (stmt))
2794     {
2795       const char *s = gimple_asm_string (stmt);
2796       tree string = build_string (strlen (s), s);
2797       expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2798       return;
2799     }
2800 
2801   /* There are some legacy diagnostics in here, and also avoids a
2802      sixth parameger to targetm.md_asm_adjust.  */
2803   save_input_location s_i_l(locus);
2804 
2805   unsigned noutputs = gimple_asm_noutputs (stmt);
2806   unsigned ninputs = gimple_asm_ninputs (stmt);
2807   unsigned nlabels = gimple_asm_nlabels (stmt);
2808   unsigned i;
2809 
2810   /* ??? Diagnose during gimplification?  */
2811   if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2812     {
2813       error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2814       return;
2815     }
2816 
2817   auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2818   auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2819   auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2820 
2821   /* Copy the gimple vectors into new vectors that we can manipulate.  */
2822 
2823   output_tvec.safe_grow (noutputs);
2824   input_tvec.safe_grow (ninputs);
2825   constraints.safe_grow (noutputs + ninputs);
2826 
2827   for (i = 0; i < noutputs; ++i)
2828     {
2829       tree t = gimple_asm_output_op (stmt, i);
2830       output_tvec[i] = TREE_VALUE (t);
2831       constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2832     }
2833   for (i = 0; i < ninputs; i++)
2834     {
2835       tree t = gimple_asm_input_op (stmt, i);
2836       input_tvec[i] = TREE_VALUE (t);
2837       constraints[i + noutputs]
2838 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2839     }
2840 
2841   /* ??? Diagnose during gimplification?  */
2842   if (! check_operand_nalternatives (constraints))
2843     return;
2844 
2845   /* Count the number of meaningful clobbered registers, ignoring what
2846      we would ignore later.  */
2847   auto_vec<rtx> clobber_rvec;
2848   HARD_REG_SET clobbered_regs;
2849   CLEAR_HARD_REG_SET (clobbered_regs);
2850 
2851   if (unsigned n = gimple_asm_nclobbers (stmt))
2852     {
2853       clobber_rvec.reserve (n);
2854       for (i = 0; i < n; i++)
2855 	{
2856 	  tree t = gimple_asm_clobber_op (stmt, i);
2857           const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2858 	  int nregs, j;
2859 
2860 	  j = decode_reg_name_and_count (regname, &nregs);
2861 	  if (j < 0)
2862 	    {
2863 	      if (j == -2)
2864 		{
2865 		  /* ??? Diagnose during gimplification?  */
2866 		  error ("unknown register name %qs in %<asm%>", regname);
2867 		}
2868 	      else if (j == -4)
2869 		{
2870 		  rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2871 		  clobber_rvec.safe_push (x);
2872 		}
2873 	      else
2874 		{
2875 		  /* Otherwise we should have -1 == empty string
2876 		     or -3 == cc, which is not a register.  */
2877 		  gcc_assert (j == -1 || j == -3);
2878 		}
2879 	    }
2880 	  else
2881 	    for (int reg = j; reg < j + nregs; reg++)
2882 	      {
2883 		/* Clobbering the PIC register is an error.  */
2884 		if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2885 		  {
2886 		    /* ??? Diagnose during gimplification?  */
2887 		    error ("PIC register clobbered by %qs in %<asm%>",
2888 			   regname);
2889 		    return;
2890 		  }
2891 
2892 	        SET_HARD_REG_BIT (clobbered_regs, reg);
2893 	        rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2894 		clobber_rvec.safe_push (x);
2895 	      }
2896 	}
2897     }
2898   unsigned nclobbers = clobber_rvec.length();
2899 
2900   /* First pass over inputs and outputs checks validity and sets
2901      mark_addressable if needed.  */
2902   /* ??? Diagnose during gimplification?  */
2903 
2904   for (i = 0; i < noutputs; ++i)
2905     {
2906       tree val = output_tvec[i];
2907       tree type = TREE_TYPE (val);
2908       const char *constraint;
2909       bool is_inout;
2910       bool allows_reg;
2911       bool allows_mem;
2912 
2913       /* Try to parse the output constraint.  If that fails, there's
2914 	 no point in going further.  */
2915       constraint = constraints[i];
2916       if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2917 				    &allows_mem, &allows_reg, &is_inout))
2918 	return;
2919 
2920       if (! allows_reg
2921 	  && (allows_mem
2922 	      || is_inout
2923 	      || (DECL_P (val)
2924 		  && REG_P (DECL_RTL (val))
2925 		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2926 	mark_addressable (val);
2927     }
2928 
2929   for (i = 0; i < ninputs; ++i)
2930     {
2931       bool allows_reg, allows_mem;
2932       const char *constraint;
2933 
2934       constraint = constraints[i + noutputs];
2935       if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
2936 				    constraints.address (),
2937 				    &allows_mem, &allows_reg))
2938 	return;
2939 
2940       if (! allows_reg && allows_mem)
2941 	mark_addressable (input_tvec[i]);
2942     }
2943 
2944   /* Second pass evaluates arguments.  */
2945 
2946   /* Make sure stack is consistent for asm goto.  */
2947   if (nlabels > 0)
2948     do_pending_stack_adjust ();
2949   int old_generating_concat_p = generating_concat_p;
2950 
2951   /* Vector of RTX's of evaluated output operands.  */
2952   auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
2953   auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
2954   rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
2955 
2956   output_rvec.safe_grow (noutputs);
2957 
2958   for (i = 0; i < noutputs; ++i)
2959     {
2960       tree val = output_tvec[i];
2961       tree type = TREE_TYPE (val);
2962       bool is_inout, allows_reg, allows_mem, ok;
2963       rtx op;
2964 
2965       ok = parse_output_constraint (&constraints[i], i, ninputs,
2966 				    noutputs, &allows_mem, &allows_reg,
2967 				    &is_inout);
2968       gcc_assert (ok);
2969 
2970       /* If an output operand is not a decl or indirect ref and our constraint
2971 	 allows a register, make a temporary to act as an intermediate.
2972 	 Make the asm insn write into that, then we will copy it to
2973 	 the real output operand.  Likewise for promoted variables.  */
2974 
2975       generating_concat_p = 0;
2976 
2977       if ((TREE_CODE (val) == INDIRECT_REF
2978 	   && allows_mem)
2979 	  || (DECL_P (val)
2980 	      && (allows_mem || REG_P (DECL_RTL (val)))
2981 	      && ! (REG_P (DECL_RTL (val))
2982 		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2983 	  || ! allows_reg
2984 	  || is_inout)
2985 	{
2986 	  op = expand_expr (val, NULL_RTX, VOIDmode,
2987 			    !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2988 	  if (MEM_P (op))
2989 	    op = validize_mem (op);
2990 
2991 	  if (! allows_reg && !MEM_P (op))
2992 	    error ("output number %d not directly addressable", i);
2993 	  if ((! allows_mem && MEM_P (op))
2994 	      || GET_CODE (op) == CONCAT)
2995 	    {
2996 	      rtx old_op = op;
2997 	      op = gen_reg_rtx (GET_MODE (op));
2998 
2999 	      generating_concat_p = old_generating_concat_p;
3000 
3001 	      if (is_inout)
3002 		emit_move_insn (op, old_op);
3003 
3004 	      push_to_sequence2 (after_rtl_seq, after_rtl_end);
3005 	      emit_move_insn (old_op, op);
3006 	      after_rtl_seq = get_insns ();
3007 	      after_rtl_end = get_last_insn ();
3008 	      end_sequence ();
3009 	    }
3010 	}
3011       else
3012 	{
3013 	  op = assign_temp (type, 0, 1);
3014 	  op = validize_mem (op);
3015 	  if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3016 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3017 
3018 	  generating_concat_p = old_generating_concat_p;
3019 
3020 	  push_to_sequence2 (after_rtl_seq, after_rtl_end);
3021 	  expand_assignment (val, make_tree (type, op), false);
3022 	  after_rtl_seq = get_insns ();
3023 	  after_rtl_end = get_last_insn ();
3024 	  end_sequence ();
3025 	}
3026       output_rvec[i] = op;
3027 
3028       if (is_inout)
3029 	inout_opnum.safe_push (i);
3030     }
3031 
3032   auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3033   auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3034 
3035   input_rvec.safe_grow (ninputs);
3036   input_mode.safe_grow (ninputs);
3037 
3038   generating_concat_p = 0;
3039 
3040   for (i = 0; i < ninputs; ++i)
3041     {
3042       tree val = input_tvec[i];
3043       tree type = TREE_TYPE (val);
3044       bool allows_reg, allows_mem, ok;
3045       const char *constraint;
3046       rtx op;
3047 
3048       constraint = constraints[i + noutputs];
3049       ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3050 				   constraints.address (),
3051 				   &allows_mem, &allows_reg);
3052       gcc_assert (ok);
3053 
3054       /* EXPAND_INITIALIZER will not generate code for valid initializer
3055 	 constants, but will still generate code for other types of operand.
3056 	 This is the behavior we want for constant constraints.  */
3057       op = expand_expr (val, NULL_RTX, VOIDmode,
3058 			allows_reg ? EXPAND_NORMAL
3059 			: allows_mem ? EXPAND_MEMORY
3060 			: EXPAND_INITIALIZER);
3061 
3062       /* Never pass a CONCAT to an ASM.  */
3063       if (GET_CODE (op) == CONCAT)
3064 	op = force_reg (GET_MODE (op), op);
3065       else if (MEM_P (op))
3066 	op = validize_mem (op);
3067 
3068       if (asm_operand_ok (op, constraint, NULL) <= 0)
3069 	{
3070 	  if (allows_reg && TYPE_MODE (type) != BLKmode)
3071 	    op = force_reg (TYPE_MODE (type), op);
3072 	  else if (!allows_mem)
3073 	    warning (0, "asm operand %d probably doesn%'t match constraints",
3074 		     i + noutputs);
3075 	  else if (MEM_P (op))
3076 	    {
3077 	      /* We won't recognize either volatile memory or memory
3078 		 with a queued address as available a memory_operand
3079 		 at this point.  Ignore it: clearly this *is* a memory.  */
3080 	    }
3081 	  else
3082 	    gcc_unreachable ();
3083 	}
3084       input_rvec[i] = op;
3085       input_mode[i] = TYPE_MODE (type);
3086     }
3087 
3088   /* For in-out operands, copy output rtx to input rtx.  */
3089   unsigned ninout = inout_opnum.length();
3090   for (i = 0; i < ninout; i++)
3091     {
3092       int j = inout_opnum[i];
3093       rtx o = output_rvec[j];
3094 
3095       input_rvec.safe_push (o);
3096       input_mode.safe_push (GET_MODE (o));
3097 
3098       char buffer[16];
3099       sprintf (buffer, "%d", j);
3100       constraints.safe_push (ggc_strdup (buffer));
3101     }
3102   ninputs += ninout;
3103 
3104   /* Sometimes we wish to automatically clobber registers across an asm.
3105      Case in point is when the i386 backend moved from cc0 to a hard reg --
3106      maintaining source-level compatibility means automatically clobbering
3107      the flags register.  */
3108   rtx_insn *after_md_seq = NULL;
3109   if (targetm.md_asm_adjust)
3110     after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3111 					  constraints, clobber_rvec,
3112 					  clobbered_regs);
3113 
3114   /* Do not allow the hook to change the output and input count,
3115      lest it mess up the operand numbering.  */
3116   gcc_assert (output_rvec.length() == noutputs);
3117   gcc_assert (input_rvec.length() == ninputs);
3118   gcc_assert (constraints.length() == noutputs + ninputs);
3119 
3120   /* But it certainly can adjust the clobbers.  */
3121   nclobbers = clobber_rvec.length();
3122 
3123   /* Third pass checks for easy conflicts.  */
3124   /* ??? Why are we doing this on trees instead of rtx.  */
3125 
3126   bool clobber_conflict_found = 0;
3127   for (i = 0; i < noutputs; ++i)
3128     if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3129 	clobber_conflict_found = 1;
3130   for (i = 0; i < ninputs - ninout; ++i)
3131     if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3132 	clobber_conflict_found = 1;
3133 
3134   /* Make vectors for the expression-rtx, constraint strings,
3135      and named operands.  */
3136 
3137   rtvec argvec = rtvec_alloc (ninputs);
3138   rtvec constraintvec = rtvec_alloc (ninputs);
3139   rtvec labelvec = rtvec_alloc (nlabels);
3140 
3141   rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3142 				    : GET_MODE (output_rvec[0])),
3143 				   ggc_strdup (gimple_asm_string (stmt)),
3144 				   empty_string, 0, argvec, constraintvec,
3145 				   labelvec, locus);
3146   MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3147 
3148   for (i = 0; i < ninputs; ++i)
3149     {
3150       ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3151       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3152 	= gen_rtx_ASM_INPUT_loc (input_mode[i],
3153 				 constraints[i + noutputs],
3154 				 locus);
3155     }
3156 
3157   /* Copy labels to the vector.  */
3158   rtx_code_label *fallthru_label = NULL;
3159   if (nlabels > 0)
3160     {
3161       basic_block fallthru_bb = NULL;
3162       edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3163       if (fallthru)
3164 	fallthru_bb = fallthru->dest;
3165 
3166       for (i = 0; i < nlabels; ++i)
3167 	{
3168 	  tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3169 	  rtx_insn *r;
3170 	  /* If asm goto has any labels in the fallthru basic block, use
3171 	     a label that we emit immediately after the asm goto.  Expansion
3172 	     may insert further instructions into the same basic block after
3173 	     asm goto and if we don't do this, insertion of instructions on
3174 	     the fallthru edge might misbehave.  See PR58670.  */
3175 	  if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3176 	    {
3177 	      if (fallthru_label == NULL_RTX)
3178 	        fallthru_label = gen_label_rtx ();
3179 	      r = fallthru_label;
3180 	    }
3181 	  else
3182 	    r = label_rtx (label);
3183 	  ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3184 	}
3185     }
3186 
3187   /* Now, for each output, construct an rtx
3188      (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3189 			       ARGVEC CONSTRAINTS OPNAMES))
3190      If there is more than one, put them inside a PARALLEL.  */
3191 
3192   if (nlabels > 0 && nclobbers == 0)
3193     {
3194       gcc_assert (noutputs == 0);
3195       emit_jump_insn (body);
3196     }
3197   else if (noutputs == 0 && nclobbers == 0)
3198     {
3199       /* No output operands: put in a raw ASM_OPERANDS rtx.  */
3200       emit_insn (body);
3201     }
3202   else if (noutputs == 1 && nclobbers == 0)
3203     {
3204       ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3205       emit_insn (gen_rtx_SET (output_rvec[0], body));
3206     }
3207   else
3208     {
3209       rtx obody = body;
3210       int num = noutputs;
3211 
3212       if (num == 0)
3213 	num = 1;
3214 
3215       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3216 
3217       /* For each output operand, store a SET.  */
3218       for (i = 0; i < noutputs; ++i)
3219 	{
3220 	  rtx src, o = output_rvec[i];
3221 	  if (i == 0)
3222 	    {
3223 	      ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3224 	      src = obody;
3225 	    }
3226 	  else
3227 	    {
3228 	      src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3229 					  ASM_OPERANDS_TEMPLATE (obody),
3230 					  constraints[i], i, argvec,
3231 					  constraintvec, labelvec, locus);
3232 	      MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3233 	    }
3234 	  XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3235 	}
3236 
3237       /* If there are no outputs (but there are some clobbers)
3238 	 store the bare ASM_OPERANDS into the PARALLEL.  */
3239       if (i == 0)
3240 	XVECEXP (body, 0, i++) = obody;
3241 
3242       /* Store (clobber REG) for each clobbered register specified.  */
3243       for (unsigned j = 0; j < nclobbers; ++j)
3244 	{
3245 	  rtx clobbered_reg = clobber_rvec[j];
3246 
3247 	  /* Do sanity check for overlap between clobbers and respectively
3248 	     input and outputs that hasn't been handled.  Such overlap
3249 	     should have been detected and reported above.  */
3250 	  if (!clobber_conflict_found && REG_P (clobbered_reg))
3251 	    {
3252 	      /* We test the old body (obody) contents to avoid
3253 		 tripping over the under-construction body.  */
3254 	      for (unsigned k = 0; k < noutputs; ++k)
3255 		if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3256 		  internal_error ("asm clobber conflict with output operand");
3257 
3258 	      for (unsigned k = 0; k < ninputs - ninout; ++k)
3259 		if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3260 		  internal_error ("asm clobber conflict with input operand");
3261 	    }
3262 
3263 	  XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3264 	}
3265 
3266       if (nlabels > 0)
3267 	emit_jump_insn (body);
3268       else
3269 	emit_insn (body);
3270     }
3271 
3272   generating_concat_p = old_generating_concat_p;
3273 
3274   if (fallthru_label)
3275     emit_label (fallthru_label);
3276 
3277   if (after_md_seq)
3278     emit_insn (after_md_seq);
3279   if (after_rtl_seq)
3280     emit_insn (after_rtl_seq);
3281 
3282   free_temp_slots ();
3283   crtl->has_asm_statement = 1;
3284 }
3285 
3286 /* Emit code to jump to the address
3287    specified by the pointer expression EXP.  */
3288 
3289 static void
expand_computed_goto(tree exp)3290 expand_computed_goto (tree exp)
3291 {
3292   rtx x = expand_normal (exp);
3293 
3294   do_pending_stack_adjust ();
3295   emit_indirect_jump (x);
3296 }
3297 
3298 /* Generate RTL code for a `goto' statement with target label LABEL.
3299    LABEL should be a LABEL_DECL tree node that was or will later be
3300    defined with `expand_label'.  */
3301 
3302 static void
expand_goto(tree label)3303 expand_goto (tree label)
3304 {
3305   if (flag_checking)
3306     {
3307       /* Check for a nonlocal goto to a containing function.  Should have
3308 	 gotten translated to __builtin_nonlocal_goto.  */
3309       tree context = decl_function_context (label);
3310       gcc_assert (!context || context == current_function_decl);
3311     }
3312 
3313   emit_jump (jump_target_rtx (label));
3314 }
3315 
3316 /* Output a return with no value.  */
3317 
3318 static void
expand_null_return_1(void)3319 expand_null_return_1 (void)
3320 {
3321   clear_pending_stack_adjust ();
3322   do_pending_stack_adjust ();
3323   emit_jump (return_label);
3324 }
3325 
3326 /* Generate RTL to return from the current function, with no value.
3327    (That is, we do not do anything about returning any value.)  */
3328 
3329 void
expand_null_return(void)3330 expand_null_return (void)
3331 {
3332   /* If this function was declared to return a value, but we
3333      didn't, clobber the return registers so that they are not
3334      propagated live to the rest of the function.  */
3335   clobber_return_register ();
3336 
3337   expand_null_return_1 ();
3338 }
3339 
3340 /* Generate RTL to return from the current function, with value VAL.  */
3341 
3342 static void
expand_value_return(rtx val)3343 expand_value_return (rtx val)
3344 {
3345   /* Copy the value to the return location unless it's already there.  */
3346 
3347   tree decl = DECL_RESULT (current_function_decl);
3348   rtx return_reg = DECL_RTL (decl);
3349   if (return_reg != val)
3350     {
3351       tree funtype = TREE_TYPE (current_function_decl);
3352       tree type = TREE_TYPE (decl);
3353       int unsignedp = TYPE_UNSIGNED (type);
3354       machine_mode old_mode = DECL_MODE (decl);
3355       machine_mode mode;
3356       if (DECL_BY_REFERENCE (decl))
3357         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3358       else
3359         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3360 
3361       if (mode != old_mode)
3362 	val = convert_modes (mode, old_mode, val, unsignedp);
3363 
3364       if (GET_CODE (return_reg) == PARALLEL)
3365 	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3366       else
3367 	emit_move_insn (return_reg, val);
3368     }
3369 
3370   expand_null_return_1 ();
3371 }
3372 
3373 /* Generate RTL to evaluate the expression RETVAL and return it
3374    from the current function.  */
3375 
3376 static void
expand_return(tree retval,tree bounds)3377 expand_return (tree retval, tree bounds)
3378 {
3379   rtx result_rtl;
3380   rtx val = 0;
3381   tree retval_rhs;
3382   rtx bounds_rtl;
3383 
3384   /* If function wants no value, give it none.  */
3385   if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3386     {
3387       expand_normal (retval);
3388       expand_null_return ();
3389       return;
3390     }
3391 
3392   if (retval == error_mark_node)
3393     {
3394       /* Treat this like a return of no value from a function that
3395 	 returns a value.  */
3396       expand_null_return ();
3397       return;
3398     }
3399   else if ((TREE_CODE (retval) == MODIFY_EXPR
3400 	    || TREE_CODE (retval) == INIT_EXPR)
3401 	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3402     retval_rhs = TREE_OPERAND (retval, 1);
3403   else
3404     retval_rhs = retval;
3405 
3406   result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3407 
3408   /* Put returned bounds to the right place.  */
3409   bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3410   if (bounds_rtl)
3411     {
3412       rtx addr = NULL;
3413       rtx bnd = NULL;
3414 
3415       if (bounds && bounds != error_mark_node)
3416 	{
3417 	  bnd = expand_normal (bounds);
3418 	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3419 	}
3420       else if (REG_P (bounds_rtl))
3421 	{
3422 	  if (bounds)
3423 	    bnd = chkp_expand_zero_bounds ();
3424 	  else
3425 	    {
3426 	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3427 	      addr = gen_rtx_MEM (Pmode, addr);
3428 	      bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3429 	    }
3430 
3431 	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3432 	}
3433       else
3434 	{
3435 	  int n;
3436 
3437 	  gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3438 
3439 	  if (bounds)
3440 	    bnd = chkp_expand_zero_bounds ();
3441 	  else
3442 	    {
3443 	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3444 	      addr = gen_rtx_MEM (Pmode, addr);
3445 	    }
3446 
3447 	  for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3448 	    {
3449 	      rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3450 	      if (!bounds)
3451 		{
3452 		  rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3453 		  rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3454 		  bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3455 		}
3456 	      targetm.calls.store_returned_bounds (slot, bnd);
3457 	    }
3458 	}
3459     }
3460   else if (chkp_function_instrumented_p (current_function_decl)
3461 	   && !BOUNDED_P (retval_rhs)
3462 	   && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3463 	   && TREE_CODE (retval_rhs) != RESULT_DECL)
3464     {
3465       rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3466       addr = gen_rtx_MEM (Pmode, addr);
3467 
3468       gcc_assert (MEM_P (result_rtl));
3469 
3470       chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3471     }
3472 
3473   /* If we are returning the RESULT_DECL, then the value has already
3474      been stored into it, so we don't have to do anything special.  */
3475   if (TREE_CODE (retval_rhs) == RESULT_DECL)
3476     expand_value_return (result_rtl);
3477 
3478   /* If the result is an aggregate that is being returned in one (or more)
3479      registers, load the registers here.  */
3480 
3481   else if (retval_rhs != 0
3482 	   && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3483 	   && REG_P (result_rtl))
3484     {
3485       val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3486       if (val)
3487 	{
3488 	  /* Use the mode of the result value on the return register.  */
3489 	  PUT_MODE (result_rtl, GET_MODE (val));
3490 	  expand_value_return (val);
3491 	}
3492       else
3493 	expand_null_return ();
3494     }
3495   else if (retval_rhs != 0
3496 	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3497 	   && (REG_P (result_rtl)
3498 	       || (GET_CODE (result_rtl) == PARALLEL)))
3499     {
3500       /* Compute the return value into a temporary (usually a pseudo reg).  */
3501       val
3502 	= assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3503       val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3504       val = force_not_mem (val);
3505       expand_value_return (val);
3506     }
3507   else
3508     {
3509       /* No hard reg used; calculate value into hard return reg.  */
3510       expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3511       expand_value_return (result_rtl);
3512     }
3513 }
3514 
3515 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3516    STMT that doesn't require special handling for outgoing edges.  That
3517    is no tailcalls and no GIMPLE_COND.  */
3518 
3519 static void
expand_gimple_stmt_1(gimple * stmt)3520 expand_gimple_stmt_1 (gimple *stmt)
3521 {
3522   tree op0;
3523 
3524   set_curr_insn_location (gimple_location (stmt));
3525 
3526   switch (gimple_code (stmt))
3527     {
3528     case GIMPLE_GOTO:
3529       op0 = gimple_goto_dest (stmt);
3530       if (TREE_CODE (op0) == LABEL_DECL)
3531 	expand_goto (op0);
3532       else
3533 	expand_computed_goto (op0);
3534       break;
3535     case GIMPLE_LABEL:
3536       expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3537       break;
3538     case GIMPLE_NOP:
3539     case GIMPLE_PREDICT:
3540       break;
3541     case GIMPLE_SWITCH:
3542       expand_case (as_a <gswitch *> (stmt));
3543       break;
3544     case GIMPLE_ASM:
3545       expand_asm_stmt (as_a <gasm *> (stmt));
3546       break;
3547     case GIMPLE_CALL:
3548       expand_call_stmt (as_a <gcall *> (stmt));
3549       break;
3550 
3551     case GIMPLE_RETURN:
3552       {
3553 	tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3554 	op0 = gimple_return_retval (as_a <greturn *> (stmt));
3555 
3556 	if (op0 && op0 != error_mark_node)
3557 	  {
3558 	    tree result = DECL_RESULT (current_function_decl);
3559 
3560 	    /* Mark we have return statement with missing bounds.  */
3561 	    if (!bnd
3562 		&& chkp_function_instrumented_p (cfun->decl)
3563 		&& !DECL_P (op0))
3564 	      bnd = error_mark_node;
3565 
3566 	    /* If we are not returning the current function's RESULT_DECL,
3567 	       build an assignment to it.  */
3568 	    if (op0 != result)
3569 	      {
3570 		/* I believe that a function's RESULT_DECL is unique.  */
3571 		gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3572 
3573 		/* ??? We'd like to use simply expand_assignment here,
3574 		   but this fails if the value is of BLKmode but the return
3575 		   decl is a register.  expand_return has special handling
3576 		   for this combination, which eventually should move
3577 		   to common code.  See comments there.  Until then, let's
3578 		   build a modify expression :-/  */
3579 		op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3580 			      result, op0);
3581 	      }
3582 	  }
3583 
3584 	if (!op0)
3585 	  expand_null_return ();
3586 	else
3587 	  expand_return (op0, bnd);
3588       }
3589       break;
3590 
3591     case GIMPLE_ASSIGN:
3592       {
3593 	gassign *assign_stmt = as_a <gassign *> (stmt);
3594 	tree lhs = gimple_assign_lhs (assign_stmt);
3595 
3596 	/* Tree expand used to fiddle with |= and &= of two bitfield
3597 	   COMPONENT_REFs here.  This can't happen with gimple, the LHS
3598 	   of binary assigns must be a gimple reg.  */
3599 
3600 	if (TREE_CODE (lhs) != SSA_NAME
3601 	    || get_gimple_rhs_class (gimple_expr_code (stmt))
3602 	       == GIMPLE_SINGLE_RHS)
3603 	  {
3604 	    tree rhs = gimple_assign_rhs1 (assign_stmt);
3605 	    gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3606 			== GIMPLE_SINGLE_RHS);
3607 	    if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3608 		/* Do not put locations on possibly shared trees.  */
3609 		&& !is_gimple_min_invariant (rhs))
3610 	      SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3611 	    if (TREE_CLOBBER_P (rhs))
3612 	      /* This is a clobber to mark the going out of scope for
3613 		 this LHS.  */
3614 	      ;
3615 	    else
3616 	      expand_assignment (lhs, rhs,
3617 				 gimple_assign_nontemporal_move_p (
3618 				   assign_stmt));
3619 	  }
3620 	else
3621 	  {
3622 	    rtx target, temp;
3623 	    bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3624 	    struct separate_ops ops;
3625 	    bool promoted = false;
3626 
3627 	    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3628 	    if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3629 	      promoted = true;
3630 
3631 	    ops.code = gimple_assign_rhs_code (assign_stmt);
3632 	    ops.type = TREE_TYPE (lhs);
3633 	    switch (get_gimple_rhs_class (ops.code))
3634 	      {
3635 		case GIMPLE_TERNARY_RHS:
3636 		  ops.op2 = gimple_assign_rhs3 (assign_stmt);
3637 		  /* Fallthru */
3638 		case GIMPLE_BINARY_RHS:
3639 		  ops.op1 = gimple_assign_rhs2 (assign_stmt);
3640 		  /* Fallthru */
3641 		case GIMPLE_UNARY_RHS:
3642 		  ops.op0 = gimple_assign_rhs1 (assign_stmt);
3643 		  break;
3644 		default:
3645 		  gcc_unreachable ();
3646 	      }
3647 	    ops.location = gimple_location (stmt);
3648 
3649 	    /* If we want to use a nontemporal store, force the value to
3650 	       register first.  If we store into a promoted register,
3651 	       don't directly expand to target.  */
3652 	    temp = nontemporal || promoted ? NULL_RTX : target;
3653 	    temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3654 				       EXPAND_NORMAL);
3655 
3656 	    if (temp == target)
3657 	      ;
3658 	    else if (promoted)
3659 	      {
3660 		int unsignedp = SUBREG_PROMOTED_SIGN (target);
3661 		/* If TEMP is a VOIDmode constant, use convert_modes to make
3662 		   sure that we properly convert it.  */
3663 		if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3664 		  {
3665 		    temp = convert_modes (GET_MODE (target),
3666 					  TYPE_MODE (ops.type),
3667 					  temp, unsignedp);
3668 		    temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3669 					  GET_MODE (target), temp, unsignedp);
3670 		  }
3671 
3672 		convert_move (SUBREG_REG (target), temp, unsignedp);
3673 	      }
3674 	    else if (nontemporal && emit_storent_insn (target, temp))
3675 	      ;
3676 	    else
3677 	      {
3678 		temp = force_operand (temp, target);
3679 		if (temp != target)
3680 		  emit_move_insn (target, temp);
3681 	      }
3682 	  }
3683       }
3684       break;
3685 
3686     default:
3687       gcc_unreachable ();
3688     }
3689 }
3690 
3691 /* Expand one gimple statement STMT and return the last RTL instruction
3692    before any of the newly generated ones.
3693 
3694    In addition to generating the necessary RTL instructions this also
3695    sets REG_EH_REGION notes if necessary and sets the current source
3696    location for diagnostics.  */
3697 
3698 static rtx_insn *
expand_gimple_stmt(gimple * stmt)3699 expand_gimple_stmt (gimple *stmt)
3700 {
3701   location_t saved_location = input_location;
3702   rtx_insn *last = get_last_insn ();
3703   int lp_nr;
3704 
3705   gcc_assert (cfun);
3706 
3707   /* We need to save and restore the current source location so that errors
3708      discovered during expansion are emitted with the right location.  But
3709      it would be better if the diagnostic routines used the source location
3710      embedded in the tree nodes rather than globals.  */
3711   if (gimple_has_location (stmt))
3712     input_location = gimple_location (stmt);
3713 
3714   expand_gimple_stmt_1 (stmt);
3715 
3716   /* Free any temporaries used to evaluate this statement.  */
3717   free_temp_slots ();
3718 
3719   input_location = saved_location;
3720 
3721   /* Mark all insns that may trap.  */
3722   lp_nr = lookup_stmt_eh_lp (stmt);
3723   if (lp_nr)
3724     {
3725       rtx_insn *insn;
3726       for (insn = next_real_insn (last); insn;
3727 	   insn = next_real_insn (insn))
3728 	{
3729 	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3730 	      /* If we want exceptions for non-call insns, any
3731 		 may_trap_p instruction may throw.  */
3732 	      && GET_CODE (PATTERN (insn)) != CLOBBER
3733 	      && GET_CODE (PATTERN (insn)) != USE
3734 	      && insn_could_throw_p (insn))
3735 	    make_reg_eh_region_note (insn, 0, lp_nr);
3736 	}
3737     }
3738 
3739   return last;
3740 }
3741 
3742 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
3743    that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
3744    generated a tail call (something that might be denied by the ABI
3745    rules governing the call; see calls.c).
3746 
3747    Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3748    can still reach the rest of BB.  The case here is __builtin_sqrt,
3749    where the NaN result goes through the external function (with a
3750    tailcall) and the normal result happens via a sqrt instruction.  */
3751 
3752 static basic_block
expand_gimple_tailcall(basic_block bb,gcall * stmt,bool * can_fallthru)3753 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3754 {
3755   rtx_insn *last2, *last;
3756   edge e;
3757   edge_iterator ei;
3758   int probability;
3759   gcov_type count;
3760 
3761   last2 = last = expand_gimple_stmt (stmt);
3762 
3763   for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3764     if (CALL_P (last) && SIBLING_CALL_P (last))
3765       goto found;
3766 
3767   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3768 
3769   *can_fallthru = true;
3770   return NULL;
3771 
3772  found:
3773   /* ??? Wouldn't it be better to just reset any pending stack adjust?
3774      Any instructions emitted here are about to be deleted.  */
3775   do_pending_stack_adjust ();
3776 
3777   /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
3778   /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
3779      EH or abnormal edges, we shouldn't have created a tail call in
3780      the first place.  So it seems to me we should just be removing
3781      all edges here, or redirecting the existing fallthru edge to
3782      the exit block.  */
3783 
3784   probability = 0;
3785   count = 0;
3786 
3787   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3788     {
3789       if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3790 	{
3791 	  if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3792 	    {
3793 	      e->dest->count -= e->count;
3794 	      e->dest->frequency -= EDGE_FREQUENCY (e);
3795 	      if (e->dest->count < 0)
3796 		e->dest->count = 0;
3797 	      if (e->dest->frequency < 0)
3798 		e->dest->frequency = 0;
3799 	    }
3800 	  count += e->count;
3801 	  probability += e->probability;
3802 	  remove_edge (e);
3803 	}
3804       else
3805 	ei_next (&ei);
3806     }
3807 
3808   /* This is somewhat ugly: the call_expr expander often emits instructions
3809      after the sibcall (to perform the function return).  These confuse the
3810      find_many_sub_basic_blocks code, so we need to get rid of these.  */
3811   last = NEXT_INSN (last);
3812   gcc_assert (BARRIER_P (last));
3813 
3814   *can_fallthru = false;
3815   while (NEXT_INSN (last))
3816     {
3817       /* For instance an sqrt builtin expander expands if with
3818 	 sibcall in the then and label for `else`.  */
3819       if (LABEL_P (NEXT_INSN (last)))
3820 	{
3821 	  *can_fallthru = true;
3822 	  break;
3823 	}
3824       delete_insn (NEXT_INSN (last));
3825     }
3826 
3827   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3828 		 | EDGE_SIBCALL);
3829   e->probability += probability;
3830   e->count += count;
3831   BB_END (bb) = last;
3832   update_bb_for_insn (bb);
3833 
3834   if (NEXT_INSN (last))
3835     {
3836       bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3837 
3838       last = BB_END (bb);
3839       if (BARRIER_P (last))
3840 	BB_END (bb) = PREV_INSN (last);
3841     }
3842 
3843   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3844 
3845   return bb;
3846 }
3847 
3848 /* Return the difference between the floor and the truncated result of
3849    a signed division by OP1 with remainder MOD.  */
3850 static rtx
floor_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)3851 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3852 {
3853   /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3854   return gen_rtx_IF_THEN_ELSE
3855     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3856      gen_rtx_IF_THEN_ELSE
3857      (mode, gen_rtx_LT (BImode,
3858 			gen_rtx_DIV (mode, op1, mod),
3859 			const0_rtx),
3860       constm1_rtx, const0_rtx),
3861      const0_rtx);
3862 }
3863 
3864 /* Return the difference between the ceil and the truncated result of
3865    a signed division by OP1 with remainder MOD.  */
3866 static rtx
ceil_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)3867 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3868 {
3869   /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3870   return gen_rtx_IF_THEN_ELSE
3871     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3872      gen_rtx_IF_THEN_ELSE
3873      (mode, gen_rtx_GT (BImode,
3874 			gen_rtx_DIV (mode, op1, mod),
3875 			const0_rtx),
3876       const1_rtx, const0_rtx),
3877      const0_rtx);
3878 }
3879 
3880 /* Return the difference between the ceil and the truncated result of
3881    an unsigned division by OP1 with remainder MOD.  */
3882 static rtx
ceil_udiv_adjust(machine_mode mode,rtx mod,rtx op1 ATTRIBUTE_UNUSED)3883 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3884 {
3885   /* (mod != 0 ? 1 : 0) */
3886   return gen_rtx_IF_THEN_ELSE
3887     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3888      const1_rtx, const0_rtx);
3889 }
3890 
3891 /* Return the difference between the rounded and the truncated result
3892    of a signed division by OP1 with remainder MOD.  Halfway cases are
3893    rounded away from zero, rather than to the nearest even number.  */
3894 static rtx
round_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)3895 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3896 {
3897   /* (abs (mod) >= abs (op1) - abs (mod)
3898       ? (op1 / mod > 0 ? 1 : -1)
3899       : 0) */
3900   return gen_rtx_IF_THEN_ELSE
3901     (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3902 		       gen_rtx_MINUS (mode,
3903 				      gen_rtx_ABS (mode, op1),
3904 				      gen_rtx_ABS (mode, mod))),
3905      gen_rtx_IF_THEN_ELSE
3906      (mode, gen_rtx_GT (BImode,
3907 			gen_rtx_DIV (mode, op1, mod),
3908 			const0_rtx),
3909       const1_rtx, constm1_rtx),
3910      const0_rtx);
3911 }
3912 
3913 /* Return the difference between the rounded and the truncated result
3914    of a unsigned division by OP1 with remainder MOD.  Halfway cases
3915    are rounded away from zero, rather than to the nearest even
3916    number.  */
3917 static rtx
round_udiv_adjust(machine_mode mode,rtx mod,rtx op1)3918 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3919 {
3920   /* (mod >= op1 - mod ? 1 : 0) */
3921   return gen_rtx_IF_THEN_ELSE
3922     (mode, gen_rtx_GE (BImode, mod,
3923 		       gen_rtx_MINUS (mode, op1, mod)),
3924      const1_rtx, const0_rtx);
3925 }
3926 
3927 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3928    any rtl.  */
3929 
3930 static rtx
convert_debug_memory_address(machine_mode mode,rtx x,addr_space_t as)3931 convert_debug_memory_address (machine_mode mode, rtx x,
3932 			      addr_space_t as)
3933 {
3934   machine_mode xmode = GET_MODE (x);
3935 
3936 #ifndef POINTERS_EXTEND_UNSIGNED
3937   gcc_assert (mode == Pmode
3938 	      || mode == targetm.addr_space.address_mode (as));
3939   gcc_assert (xmode == mode || xmode == VOIDmode);
3940 #else
3941   rtx temp;
3942 
3943   gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3944 
3945   if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3946     return x;
3947 
3948   if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3949     x = lowpart_subreg (mode, x, xmode);
3950   else if (POINTERS_EXTEND_UNSIGNED > 0)
3951     x = gen_rtx_ZERO_EXTEND (mode, x);
3952   else if (!POINTERS_EXTEND_UNSIGNED)
3953     x = gen_rtx_SIGN_EXTEND (mode, x);
3954   else
3955     {
3956       switch (GET_CODE (x))
3957 	{
3958 	case SUBREG:
3959 	  if ((SUBREG_PROMOTED_VAR_P (x)
3960 	       || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3961 	       || (GET_CODE (SUBREG_REG (x)) == PLUS
3962 		   && REG_P (XEXP (SUBREG_REG (x), 0))
3963 		   && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3964 		   && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3965 	      && GET_MODE (SUBREG_REG (x)) == mode)
3966 	    return SUBREG_REG (x);
3967 	  break;
3968 	case LABEL_REF:
3969 	  temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
3970 	  LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3971 	  return temp;
3972 	case SYMBOL_REF:
3973 	  temp = shallow_copy_rtx (x);
3974 	  PUT_MODE (temp, mode);
3975 	  return temp;
3976 	case CONST:
3977 	  temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3978 	  if (temp)
3979 	    temp = gen_rtx_CONST (mode, temp);
3980 	  return temp;
3981 	case PLUS:
3982 	case MINUS:
3983 	  if (CONST_INT_P (XEXP (x, 1)))
3984 	    {
3985 	      temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3986 	      if (temp)
3987 		return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3988 	    }
3989 	  break;
3990 	default:
3991 	  break;
3992 	}
3993       /* Don't know how to express ptr_extend as operation in debug info.  */
3994       return NULL;
3995     }
3996 #endif /* POINTERS_EXTEND_UNSIGNED */
3997 
3998   return x;
3999 }
4000 
4001 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4002    by avoid_deep_ter_for_debug.  */
4003 
4004 static hash_map<tree, tree> *deep_ter_debug_map;
4005 
4006 /* Split too deep TER chains for debug stmts using debug temporaries.  */
4007 
4008 static void
avoid_deep_ter_for_debug(gimple * stmt,int depth)4009 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4010 {
4011   use_operand_p use_p;
4012   ssa_op_iter iter;
4013   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4014     {
4015       tree use = USE_FROM_PTR (use_p);
4016       if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4017 	continue;
4018       gimple *g = get_gimple_for_ssa_name (use);
4019       if (g == NULL)
4020 	continue;
4021       if (depth > 6 && !stmt_ends_bb_p (g))
4022 	{
4023 	  if (deep_ter_debug_map == NULL)
4024 	    deep_ter_debug_map = new hash_map<tree, tree>;
4025 
4026 	  tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4027 	  if (vexpr != NULL)
4028 	    continue;
4029 	  vexpr = make_node (DEBUG_EXPR_DECL);
4030 	  gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4031 	  DECL_ARTIFICIAL (vexpr) = 1;
4032 	  TREE_TYPE (vexpr) = TREE_TYPE (use);
4033 	  DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (use));
4034 	  gimple_stmt_iterator gsi = gsi_for_stmt (g);
4035 	  gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4036 	  avoid_deep_ter_for_debug (def_temp, 0);
4037 	}
4038       else
4039 	avoid_deep_ter_for_debug (g, depth + 1);
4040     }
4041 }
4042 
4043 /* Return an RTX equivalent to the value of the parameter DECL.  */
4044 
4045 static rtx
expand_debug_parm_decl(tree decl)4046 expand_debug_parm_decl (tree decl)
4047 {
4048   rtx incoming = DECL_INCOMING_RTL (decl);
4049 
4050   if (incoming
4051       && GET_MODE (incoming) != BLKmode
4052       && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4053 	  || (MEM_P (incoming)
4054 	      && REG_P (XEXP (incoming, 0))
4055 	      && HARD_REGISTER_P (XEXP (incoming, 0)))))
4056     {
4057       rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4058 
4059 #ifdef HAVE_window_save
4060       /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4061 	 If the target machine has an explicit window save instruction, the
4062 	 actual entry value is the corresponding OUTGOING_REGNO instead.  */
4063       if (REG_P (incoming)
4064 	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4065 	incoming
4066 	  = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4067 				OUTGOING_REGNO (REGNO (incoming)), 0);
4068       else if (MEM_P (incoming))
4069 	{
4070 	  rtx reg = XEXP (incoming, 0);
4071 	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4072 	    {
4073 	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4074 	      incoming = replace_equiv_address_nv (incoming, reg);
4075 	    }
4076 	  else
4077 	    incoming = copy_rtx (incoming);
4078 	}
4079 #endif
4080 
4081       ENTRY_VALUE_EXP (rtl) = incoming;
4082       return rtl;
4083     }
4084 
4085   if (incoming
4086       && GET_MODE (incoming) != BLKmode
4087       && !TREE_ADDRESSABLE (decl)
4088       && MEM_P (incoming)
4089       && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4090 	  || (GET_CODE (XEXP (incoming, 0)) == PLUS
4091 	      && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4092 	      && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4093     return copy_rtx (incoming);
4094 
4095   return NULL_RTX;
4096 }
4097 
4098 /* Return an RTX equivalent to the value of the tree expression EXP.  */
4099 
4100 static rtx
expand_debug_expr(tree exp)4101 expand_debug_expr (tree exp)
4102 {
4103   rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4104   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4105   machine_mode inner_mode = VOIDmode;
4106   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4107   addr_space_t as;
4108 
4109   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4110     {
4111     case tcc_expression:
4112       switch (TREE_CODE (exp))
4113 	{
4114 	case COND_EXPR:
4115 	case DOT_PROD_EXPR:
4116 	case SAD_EXPR:
4117 	case WIDEN_MULT_PLUS_EXPR:
4118 	case WIDEN_MULT_MINUS_EXPR:
4119 	case FMA_EXPR:
4120 	  goto ternary;
4121 
4122 	case TRUTH_ANDIF_EXPR:
4123 	case TRUTH_ORIF_EXPR:
4124 	case TRUTH_AND_EXPR:
4125 	case TRUTH_OR_EXPR:
4126 	case TRUTH_XOR_EXPR:
4127 	  goto binary;
4128 
4129 	case TRUTH_NOT_EXPR:
4130 	  goto unary;
4131 
4132 	default:
4133 	  break;
4134 	}
4135       break;
4136 
4137     ternary:
4138       op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4139       if (!op2)
4140 	return NULL_RTX;
4141       /* Fall through.  */
4142 
4143     binary:
4144     case tcc_binary:
4145       op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4146       if (!op1)
4147 	return NULL_RTX;
4148       switch (TREE_CODE (exp))
4149 	{
4150 	case LSHIFT_EXPR:
4151 	case RSHIFT_EXPR:
4152 	case LROTATE_EXPR:
4153 	case RROTATE_EXPR:
4154 	case WIDEN_LSHIFT_EXPR:
4155 	  /* Ensure second operand isn't wider than the first one.  */
4156 	  inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4157 	  if (SCALAR_INT_MODE_P (inner_mode))
4158 	    {
4159 	      machine_mode opmode = mode;
4160 	      if (VECTOR_MODE_P (mode))
4161 		opmode = GET_MODE_INNER (mode);
4162 	      if (SCALAR_INT_MODE_P (opmode)
4163 		  && (GET_MODE_PRECISION (opmode)
4164 		      < GET_MODE_PRECISION (inner_mode)))
4165 		op1 = lowpart_subreg (opmode, op1, inner_mode);
4166 	    }
4167 	  break;
4168 	default:
4169 	  break;
4170 	}
4171       /* Fall through.  */
4172 
4173     unary:
4174     case tcc_unary:
4175       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4176       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4177       if (!op0)
4178 	return NULL_RTX;
4179       break;
4180 
4181     case tcc_comparison:
4182       unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4183       goto binary;
4184 
4185     case tcc_type:
4186     case tcc_statement:
4187       gcc_unreachable ();
4188 
4189     case tcc_constant:
4190     case tcc_exceptional:
4191     case tcc_declaration:
4192     case tcc_reference:
4193     case tcc_vl_exp:
4194       break;
4195     }
4196 
4197   switch (TREE_CODE (exp))
4198     {
4199     case STRING_CST:
4200       if (!lookup_constant_def (exp))
4201 	{
4202 	  if (strlen (TREE_STRING_POINTER (exp)) + 1
4203 	      != (size_t) TREE_STRING_LENGTH (exp))
4204 	    return NULL_RTX;
4205 	  op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4206 	  op0 = gen_rtx_MEM (BLKmode, op0);
4207 	  set_mem_attributes (op0, exp, 0);
4208 	  return op0;
4209 	}
4210       /* Fall through...  */
4211 
4212     case INTEGER_CST:
4213     case REAL_CST:
4214     case FIXED_CST:
4215       op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4216       return op0;
4217 
4218     case COMPLEX_CST:
4219       gcc_assert (COMPLEX_MODE_P (mode));
4220       op0 = expand_debug_expr (TREE_REALPART (exp));
4221       op1 = expand_debug_expr (TREE_IMAGPART (exp));
4222       return gen_rtx_CONCAT (mode, op0, op1);
4223 
4224     case DEBUG_EXPR_DECL:
4225       op0 = DECL_RTL_IF_SET (exp);
4226 
4227       if (op0)
4228 	return op0;
4229 
4230       op0 = gen_rtx_DEBUG_EXPR (mode);
4231       DEBUG_EXPR_TREE_DECL (op0) = exp;
4232       SET_DECL_RTL (exp, op0);
4233 
4234       return op0;
4235 
4236     case VAR_DECL:
4237     case PARM_DECL:
4238     case FUNCTION_DECL:
4239     case LABEL_DECL:
4240     case CONST_DECL:
4241     case RESULT_DECL:
4242       op0 = DECL_RTL_IF_SET (exp);
4243 
4244       /* This decl was probably optimized away.  */
4245       if (!op0)
4246 	{
4247 	  if (TREE_CODE (exp) != VAR_DECL
4248 	      || DECL_EXTERNAL (exp)
4249 	      || !TREE_STATIC (exp)
4250 	      || !DECL_NAME (exp)
4251 	      || DECL_HARD_REGISTER (exp)
4252 	      || DECL_IN_CONSTANT_POOL (exp)
4253 	      || mode == VOIDmode)
4254 	    return NULL;
4255 
4256 	  op0 = make_decl_rtl_for_debug (exp);
4257 	  if (!MEM_P (op0)
4258 	      || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4259 	      || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4260 	    return NULL;
4261 	}
4262       else
4263 	op0 = copy_rtx (op0);
4264 
4265       if (GET_MODE (op0) == BLKmode
4266 	  /* If op0 is not BLKmode, but mode is, adjust_mode
4267 	     below would ICE.  While it is likely a FE bug,
4268 	     try to be robust here.  See PR43166.  */
4269 	  || mode == BLKmode
4270 	  || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4271 	{
4272 	  gcc_assert (MEM_P (op0));
4273 	  op0 = adjust_address_nv (op0, mode, 0);
4274 	  return op0;
4275 	}
4276 
4277       /* Fall through.  */
4278 
4279     adjust_mode:
4280     case PAREN_EXPR:
4281     CASE_CONVERT:
4282       {
4283 	inner_mode = GET_MODE (op0);
4284 
4285 	if (mode == inner_mode)
4286 	  return op0;
4287 
4288 	if (inner_mode == VOIDmode)
4289 	  {
4290 	    if (TREE_CODE (exp) == SSA_NAME)
4291 	      inner_mode = TYPE_MODE (TREE_TYPE (exp));
4292 	    else
4293 	      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4294 	    if (mode == inner_mode)
4295 	      return op0;
4296 	  }
4297 
4298 	if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4299 	  {
4300 	    if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4301 	      op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4302 	    else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4303 	      op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4304 	    else
4305 	      op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4306 	  }
4307 	else if (FLOAT_MODE_P (mode))
4308 	  {
4309 	    gcc_assert (TREE_CODE (exp) != SSA_NAME);
4310 	    if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4311 	      op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4312 	    else
4313 	      op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4314 	  }
4315 	else if (FLOAT_MODE_P (inner_mode))
4316 	  {
4317 	    if (unsignedp)
4318 	      op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4319 	    else
4320 	      op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4321 	  }
4322 	else if (CONSTANT_P (op0)
4323 		 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
4324 	  op0 = lowpart_subreg (mode, op0, inner_mode);
4325 	else if (UNARY_CLASS_P (exp)
4326 		 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4327 		 : unsignedp)
4328 	  op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4329 	else
4330 	  op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4331 
4332 	return op0;
4333       }
4334 
4335     case MEM_REF:
4336       if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4337 	{
4338 	  tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4339 				     TREE_OPERAND (exp, 0),
4340 				     TREE_OPERAND (exp, 1));
4341 	  if (newexp)
4342 	    return expand_debug_expr (newexp);
4343 	}
4344       /* FALLTHROUGH */
4345     case INDIRECT_REF:
4346       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4347       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4348       if (!op0)
4349 	return NULL;
4350 
4351       if (TREE_CODE (exp) == MEM_REF)
4352 	{
4353 	  if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4354 	      || (GET_CODE (op0) == PLUS
4355 		  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4356 	    /* (mem (debug_implicit_ptr)) might confuse aliasing.
4357 	       Instead just use get_inner_reference.  */
4358 	    goto component_ref;
4359 
4360 	  op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4361 	  if (!op1 || !CONST_INT_P (op1))
4362 	    return NULL;
4363 
4364 	  op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4365 	}
4366 
4367       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4368 
4369       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4370 					  op0, as);
4371       if (op0 == NULL_RTX)
4372 	return NULL;
4373 
4374       op0 = gen_rtx_MEM (mode, op0);
4375       set_mem_attributes (op0, exp, 0);
4376       if (TREE_CODE (exp) == MEM_REF
4377 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4378 	set_mem_expr (op0, NULL_TREE);
4379       set_mem_addr_space (op0, as);
4380 
4381       return op0;
4382 
4383     case TARGET_MEM_REF:
4384       if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4385 	  && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4386 	return NULL;
4387 
4388       op0 = expand_debug_expr
4389 	    (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4390       if (!op0)
4391 	return NULL;
4392 
4393       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4394       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4395 					  op0, as);
4396       if (op0 == NULL_RTX)
4397 	return NULL;
4398 
4399       op0 = gen_rtx_MEM (mode, op0);
4400 
4401       set_mem_attributes (op0, exp, 0);
4402       set_mem_addr_space (op0, as);
4403 
4404       return op0;
4405 
4406     component_ref:
4407     case ARRAY_REF:
4408     case ARRAY_RANGE_REF:
4409     case COMPONENT_REF:
4410     case BIT_FIELD_REF:
4411     case REALPART_EXPR:
4412     case IMAGPART_EXPR:
4413     case VIEW_CONVERT_EXPR:
4414       {
4415 	machine_mode mode1;
4416 	HOST_WIDE_INT bitsize, bitpos;
4417 	tree offset;
4418 	int reversep, volatilep = 0;
4419 	tree tem
4420 	  = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4421 				 &unsignedp, &reversep, &volatilep, false);
4422 	rtx orig_op0;
4423 
4424 	if (bitsize == 0)
4425 	  return NULL;
4426 
4427 	orig_op0 = op0 = expand_debug_expr (tem);
4428 
4429 	if (!op0)
4430 	  return NULL;
4431 
4432 	if (offset)
4433 	  {
4434 	    machine_mode addrmode, offmode;
4435 
4436 	    if (!MEM_P (op0))
4437 	      return NULL;
4438 
4439 	    op0 = XEXP (op0, 0);
4440 	    addrmode = GET_MODE (op0);
4441 	    if (addrmode == VOIDmode)
4442 	      addrmode = Pmode;
4443 
4444 	    op1 = expand_debug_expr (offset);
4445 	    if (!op1)
4446 	      return NULL;
4447 
4448 	    offmode = GET_MODE (op1);
4449 	    if (offmode == VOIDmode)
4450 	      offmode = TYPE_MODE (TREE_TYPE (offset));
4451 
4452 	    if (addrmode != offmode)
4453 	      op1 = lowpart_subreg (addrmode, op1, offmode);
4454 
4455 	    /* Don't use offset_address here, we don't need a
4456 	       recognizable address, and we don't want to generate
4457 	       code.  */
4458 	    op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4459 							  op0, op1));
4460 	  }
4461 
4462 	if (MEM_P (op0))
4463 	  {
4464 	    if (mode1 == VOIDmode)
4465 	      /* Bitfield.  */
4466 	      mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4467 	    if (bitpos >= BITS_PER_UNIT)
4468 	      {
4469 		op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4470 		bitpos %= BITS_PER_UNIT;
4471 	      }
4472 	    else if (bitpos < 0)
4473 	      {
4474 		HOST_WIDE_INT units
4475 		  = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4476 		op0 = adjust_address_nv (op0, mode1, units);
4477 		bitpos += units * BITS_PER_UNIT;
4478 	      }
4479 	    else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4480 	      op0 = adjust_address_nv (op0, mode, 0);
4481 	    else if (GET_MODE (op0) != mode1)
4482 	      op0 = adjust_address_nv (op0, mode1, 0);
4483 	    else
4484 	      op0 = copy_rtx (op0);
4485 	    if (op0 == orig_op0)
4486 	      op0 = shallow_copy_rtx (op0);
4487 	    set_mem_attributes (op0, exp, 0);
4488 	  }
4489 
4490 	if (bitpos == 0 && mode == GET_MODE (op0))
4491 	  return op0;
4492 
4493         if (bitpos < 0)
4494           return NULL;
4495 
4496 	if (GET_MODE (op0) == BLKmode)
4497 	  return NULL;
4498 
4499 	if ((bitpos % BITS_PER_UNIT) == 0
4500 	    && bitsize == GET_MODE_BITSIZE (mode1))
4501 	  {
4502 	    machine_mode opmode = GET_MODE (op0);
4503 
4504 	    if (opmode == VOIDmode)
4505 	      opmode = TYPE_MODE (TREE_TYPE (tem));
4506 
4507 	    /* This condition may hold if we're expanding the address
4508 	       right past the end of an array that turned out not to
4509 	       be addressable (i.e., the address was only computed in
4510 	       debug stmts).  The gen_subreg below would rightfully
4511 	       crash, and the address doesn't really exist, so just
4512 	       drop it.  */
4513 	    if (bitpos >= GET_MODE_BITSIZE (opmode))
4514 	      return NULL;
4515 
4516 	    if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4517 	      return simplify_gen_subreg (mode, op0, opmode,
4518 					  bitpos / BITS_PER_UNIT);
4519 	  }
4520 
4521 	return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4522 				     && TYPE_UNSIGNED (TREE_TYPE (exp))
4523 				     ? SIGN_EXTRACT
4524 				     : ZERO_EXTRACT, mode,
4525 				     GET_MODE (op0) != VOIDmode
4526 				     ? GET_MODE (op0)
4527 				     : TYPE_MODE (TREE_TYPE (tem)),
4528 				     op0, GEN_INT (bitsize), GEN_INT (bitpos));
4529       }
4530 
4531     case ABS_EXPR:
4532       return simplify_gen_unary (ABS, mode, op0, mode);
4533 
4534     case NEGATE_EXPR:
4535       return simplify_gen_unary (NEG, mode, op0, mode);
4536 
4537     case BIT_NOT_EXPR:
4538       return simplify_gen_unary (NOT, mode, op0, mode);
4539 
4540     case FLOAT_EXPR:
4541       return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4542 									 0)))
4543 				 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4544 				 inner_mode);
4545 
4546     case FIX_TRUNC_EXPR:
4547       return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4548 				 inner_mode);
4549 
4550     case POINTER_PLUS_EXPR:
4551       /* For the rare target where pointers are not the same size as
4552 	 size_t, we need to check for mis-matched modes and correct
4553 	 the addend.  */
4554       if (op0 && op1
4555 	  && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4556 	  && GET_MODE (op0) != GET_MODE (op1))
4557 	{
4558 	  if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4559 	      /* If OP0 is a partial mode, then we must truncate, even if it has
4560 		 the same bitsize as OP1 as GCC's representation of partial modes
4561 		 is opaque.  */
4562 	      || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4563 		  && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4564 	    op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4565 				      GET_MODE (op1));
4566 	  else
4567 	    /* We always sign-extend, regardless of the signedness of
4568 	       the operand, because the operand is always unsigned
4569 	       here even if the original C expression is signed.  */
4570 	    op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4571 				      GET_MODE (op1));
4572 	}
4573       /* Fall through.  */
4574     case PLUS_EXPR:
4575       return simplify_gen_binary (PLUS, mode, op0, op1);
4576 
4577     case MINUS_EXPR:
4578       return simplify_gen_binary (MINUS, mode, op0, op1);
4579 
4580     case MULT_EXPR:
4581       return simplify_gen_binary (MULT, mode, op0, op1);
4582 
4583     case RDIV_EXPR:
4584     case TRUNC_DIV_EXPR:
4585     case EXACT_DIV_EXPR:
4586       if (unsignedp)
4587 	return simplify_gen_binary (UDIV, mode, op0, op1);
4588       else
4589 	return simplify_gen_binary (DIV, mode, op0, op1);
4590 
4591     case TRUNC_MOD_EXPR:
4592       return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4593 
4594     case FLOOR_DIV_EXPR:
4595       if (unsignedp)
4596 	return simplify_gen_binary (UDIV, mode, op0, op1);
4597       else
4598 	{
4599 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4600 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4601 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4602 	  return simplify_gen_binary (PLUS, mode, div, adj);
4603 	}
4604 
4605     case FLOOR_MOD_EXPR:
4606       if (unsignedp)
4607 	return simplify_gen_binary (UMOD, mode, op0, op1);
4608       else
4609 	{
4610 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4611 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4612 	  adj = simplify_gen_unary (NEG, mode,
4613 				    simplify_gen_binary (MULT, mode, adj, op1),
4614 				    mode);
4615 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4616 	}
4617 
4618     case CEIL_DIV_EXPR:
4619       if (unsignedp)
4620 	{
4621 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4622 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4623 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4624 	  return simplify_gen_binary (PLUS, mode, div, adj);
4625 	}
4626       else
4627 	{
4628 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4629 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4630 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4631 	  return simplify_gen_binary (PLUS, mode, div, adj);
4632 	}
4633 
4634     case CEIL_MOD_EXPR:
4635       if (unsignedp)
4636 	{
4637 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4638 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4639 	  adj = simplify_gen_unary (NEG, mode,
4640 				    simplify_gen_binary (MULT, mode, adj, op1),
4641 				    mode);
4642 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4643 	}
4644       else
4645 	{
4646 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4647 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4648 	  adj = simplify_gen_unary (NEG, mode,
4649 				    simplify_gen_binary (MULT, mode, adj, op1),
4650 				    mode);
4651 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4652 	}
4653 
4654     case ROUND_DIV_EXPR:
4655       if (unsignedp)
4656 	{
4657 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4658 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4659 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4660 	  return simplify_gen_binary (PLUS, mode, div, adj);
4661 	}
4662       else
4663 	{
4664 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4665 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4666 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4667 	  return simplify_gen_binary (PLUS, mode, div, adj);
4668 	}
4669 
4670     case ROUND_MOD_EXPR:
4671       if (unsignedp)
4672 	{
4673 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4674 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4675 	  adj = simplify_gen_unary (NEG, mode,
4676 				    simplify_gen_binary (MULT, mode, adj, op1),
4677 				    mode);
4678 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4679 	}
4680       else
4681 	{
4682 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4683 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4684 	  adj = simplify_gen_unary (NEG, mode,
4685 				    simplify_gen_binary (MULT, mode, adj, op1),
4686 				    mode);
4687 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4688 	}
4689 
4690     case LSHIFT_EXPR:
4691       return simplify_gen_binary (ASHIFT, mode, op0, op1);
4692 
4693     case RSHIFT_EXPR:
4694       if (unsignedp)
4695 	return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4696       else
4697 	return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4698 
4699     case LROTATE_EXPR:
4700       return simplify_gen_binary (ROTATE, mode, op0, op1);
4701 
4702     case RROTATE_EXPR:
4703       return simplify_gen_binary (ROTATERT, mode, op0, op1);
4704 
4705     case MIN_EXPR:
4706       return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4707 
4708     case MAX_EXPR:
4709       return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4710 
4711     case BIT_AND_EXPR:
4712     case TRUTH_AND_EXPR:
4713       return simplify_gen_binary (AND, mode, op0, op1);
4714 
4715     case BIT_IOR_EXPR:
4716     case TRUTH_OR_EXPR:
4717       return simplify_gen_binary (IOR, mode, op0, op1);
4718 
4719     case BIT_XOR_EXPR:
4720     case TRUTH_XOR_EXPR:
4721       return simplify_gen_binary (XOR, mode, op0, op1);
4722 
4723     case TRUTH_ANDIF_EXPR:
4724       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4725 
4726     case TRUTH_ORIF_EXPR:
4727       return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4728 
4729     case TRUTH_NOT_EXPR:
4730       return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4731 
4732     case LT_EXPR:
4733       return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4734 				      op0, op1);
4735 
4736     case LE_EXPR:
4737       return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4738 				      op0, op1);
4739 
4740     case GT_EXPR:
4741       return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4742 				      op0, op1);
4743 
4744     case GE_EXPR:
4745       return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4746 				      op0, op1);
4747 
4748     case EQ_EXPR:
4749       return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4750 
4751     case NE_EXPR:
4752       return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4753 
4754     case UNORDERED_EXPR:
4755       return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4756 
4757     case ORDERED_EXPR:
4758       return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4759 
4760     case UNLT_EXPR:
4761       return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4762 
4763     case UNLE_EXPR:
4764       return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4765 
4766     case UNGT_EXPR:
4767       return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4768 
4769     case UNGE_EXPR:
4770       return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4771 
4772     case UNEQ_EXPR:
4773       return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4774 
4775     case LTGT_EXPR:
4776       return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4777 
4778     case COND_EXPR:
4779       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4780 
4781     case COMPLEX_EXPR:
4782       gcc_assert (COMPLEX_MODE_P (mode));
4783       if (GET_MODE (op0) == VOIDmode)
4784 	op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4785       if (GET_MODE (op1) == VOIDmode)
4786 	op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4787       return gen_rtx_CONCAT (mode, op0, op1);
4788 
4789     case CONJ_EXPR:
4790       if (GET_CODE (op0) == CONCAT)
4791 	return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4792 			       simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4793 						   XEXP (op0, 1),
4794 						   GET_MODE_INNER (mode)));
4795       else
4796 	{
4797 	  machine_mode imode = GET_MODE_INNER (mode);
4798 	  rtx re, im;
4799 
4800 	  if (MEM_P (op0))
4801 	    {
4802 	      re = adjust_address_nv (op0, imode, 0);
4803 	      im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4804 	    }
4805 	  else
4806 	    {
4807 	      machine_mode ifmode = int_mode_for_mode (mode);
4808 	      machine_mode ihmode = int_mode_for_mode (imode);
4809 	      rtx halfsize;
4810 	      if (ifmode == BLKmode || ihmode == BLKmode)
4811 		return NULL;
4812 	      halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4813 	      re = op0;
4814 	      if (mode != ifmode)
4815 		re = gen_rtx_SUBREG (ifmode, re, 0);
4816 	      re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4817 	      if (imode != ihmode)
4818 		re = gen_rtx_SUBREG (imode, re, 0);
4819 	      im = copy_rtx (op0);
4820 	      if (mode != ifmode)
4821 		im = gen_rtx_SUBREG (ifmode, im, 0);
4822 	      im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4823 	      if (imode != ihmode)
4824 		im = gen_rtx_SUBREG (imode, im, 0);
4825 	    }
4826 	  im = gen_rtx_NEG (imode, im);
4827 	  return gen_rtx_CONCAT (mode, re, im);
4828 	}
4829 
4830     case ADDR_EXPR:
4831       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4832       if (!op0 || !MEM_P (op0))
4833 	{
4834 	  if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4835 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4836 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4837 	      && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4838 		  || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4839 	    return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4840 
4841 	  if (handled_component_p (TREE_OPERAND (exp, 0)))
4842 	    {
4843 	      HOST_WIDE_INT bitoffset, bitsize, maxsize;
4844 	      bool reverse;
4845 	      tree decl
4846 		= get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4847 					   &bitsize, &maxsize, &reverse);
4848 	      if ((TREE_CODE (decl) == VAR_DECL
4849 		   || TREE_CODE (decl) == PARM_DECL
4850 		   || TREE_CODE (decl) == RESULT_DECL)
4851 		  && (!TREE_ADDRESSABLE (decl)
4852 		      || target_for_debug_bind (decl))
4853 		  && (bitoffset % BITS_PER_UNIT) == 0
4854 		  && bitsize > 0
4855 		  && bitsize == maxsize)
4856 		{
4857 		  rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4858 		  return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4859 		}
4860 	    }
4861 
4862 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4863 	      && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4864 		 == ADDR_EXPR)
4865 	    {
4866 	      op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4867 						     0));
4868 	      if (op0 != NULL
4869 		  && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4870 		      || (GET_CODE (op0) == PLUS
4871 			  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4872 			  && CONST_INT_P (XEXP (op0, 1)))))
4873 		{
4874 		  op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4875 							 1));
4876 		  if (!op1 || !CONST_INT_P (op1))
4877 		    return NULL;
4878 
4879 		  return plus_constant (mode, op0, INTVAL (op1));
4880 		}
4881 	    }
4882 
4883 	  return NULL;
4884 	}
4885 
4886       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4887       op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4888 
4889       return op0;
4890 
4891     case VECTOR_CST:
4892       {
4893 	unsigned i;
4894 
4895 	op0 = gen_rtx_CONCATN
4896 	  (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4897 
4898 	for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4899 	  {
4900 	    op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4901 	    if (!op1)
4902 	      return NULL;
4903 	    XVECEXP (op0, 0, i) = op1;
4904 	  }
4905 
4906 	return op0;
4907       }
4908 
4909     case CONSTRUCTOR:
4910       if (TREE_CLOBBER_P (exp))
4911 	return NULL;
4912       else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4913 	{
4914 	  unsigned i;
4915 	  tree val;
4916 
4917 	  op0 = gen_rtx_CONCATN
4918 	    (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4919 
4920 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4921 	    {
4922 	      op1 = expand_debug_expr (val);
4923 	      if (!op1)
4924 		return NULL;
4925 	      XVECEXP (op0, 0, i) = op1;
4926 	    }
4927 
4928 	  if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4929 	    {
4930 	      op1 = expand_debug_expr
4931 		(build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4932 
4933 	      if (!op1)
4934 		return NULL;
4935 
4936 	      for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4937 		XVECEXP (op0, 0, i) = op1;
4938 	    }
4939 
4940 	  return op0;
4941 	}
4942       else
4943 	goto flag_unsupported;
4944 
4945     case CALL_EXPR:
4946       /* ??? Maybe handle some builtins?  */
4947       return NULL;
4948 
4949     case SSA_NAME:
4950       {
4951 	gimple *g = get_gimple_for_ssa_name (exp);
4952 	if (g)
4953 	  {
4954 	    tree t = NULL_TREE;
4955 	    if (deep_ter_debug_map)
4956 	      {
4957 		tree *slot = deep_ter_debug_map->get (exp);
4958 		if (slot)
4959 		  t = *slot;
4960 	      }
4961 	    if (t == NULL_TREE)
4962 	      t = gimple_assign_rhs_to_tree (g);
4963 	    op0 = expand_debug_expr (t);
4964 	    if (!op0)
4965 	      return NULL;
4966 	  }
4967 	else
4968 	  {
4969 	    /* If this is a reference to an incoming value of
4970 	       parameter that is never used in the code or where the
4971 	       incoming value is never used in the code, use
4972 	       PARM_DECL's DECL_RTL if set.  */
4973 	    if (SSA_NAME_IS_DEFAULT_DEF (exp)
4974 		&& SSA_NAME_VAR (exp)
4975 		&& TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
4976 		&& has_zero_uses (exp))
4977 	      {
4978 		op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4979 		if (op0)
4980 		  goto adjust_mode;
4981 		op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4982 		if (op0)
4983 		  goto adjust_mode;
4984 	      }
4985 
4986 	    int part = var_to_partition (SA.map, exp);
4987 
4988 	    if (part == NO_PARTITION)
4989 	      return NULL;
4990 
4991 	    gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4992 
4993 	    op0 = copy_rtx (SA.partition_to_pseudo[part]);
4994 	  }
4995 	goto adjust_mode;
4996       }
4997 
4998     case ERROR_MARK:
4999       return NULL;
5000 
5001     /* Vector stuff.  For most of the codes we don't have rtl codes.  */
5002     case REALIGN_LOAD_EXPR:
5003     case REDUC_MAX_EXPR:
5004     case REDUC_MIN_EXPR:
5005     case REDUC_PLUS_EXPR:
5006     case VEC_COND_EXPR:
5007     case VEC_PACK_FIX_TRUNC_EXPR:
5008     case VEC_PACK_SAT_EXPR:
5009     case VEC_PACK_TRUNC_EXPR:
5010     case VEC_UNPACK_FLOAT_HI_EXPR:
5011     case VEC_UNPACK_FLOAT_LO_EXPR:
5012     case VEC_UNPACK_HI_EXPR:
5013     case VEC_UNPACK_LO_EXPR:
5014     case VEC_WIDEN_MULT_HI_EXPR:
5015     case VEC_WIDEN_MULT_LO_EXPR:
5016     case VEC_WIDEN_MULT_EVEN_EXPR:
5017     case VEC_WIDEN_MULT_ODD_EXPR:
5018     case VEC_WIDEN_LSHIFT_HI_EXPR:
5019     case VEC_WIDEN_LSHIFT_LO_EXPR:
5020     case VEC_PERM_EXPR:
5021       return NULL;
5022 
5023     /* Misc codes.  */
5024     case ADDR_SPACE_CONVERT_EXPR:
5025     case FIXED_CONVERT_EXPR:
5026     case OBJ_TYPE_REF:
5027     case WITH_SIZE_EXPR:
5028       return NULL;
5029 
5030     case DOT_PROD_EXPR:
5031       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5032 	  && SCALAR_INT_MODE_P (mode))
5033 	{
5034 	  op0
5035 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5036 									  0)))
5037 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5038 				  inner_mode);
5039 	  op1
5040 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5041 									  1)))
5042 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5043 				  inner_mode);
5044 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5045 	  return simplify_gen_binary (PLUS, mode, op0, op2);
5046 	}
5047       return NULL;
5048 
5049     case WIDEN_MULT_EXPR:
5050     case WIDEN_MULT_PLUS_EXPR:
5051     case WIDEN_MULT_MINUS_EXPR:
5052       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5053 	  && SCALAR_INT_MODE_P (mode))
5054 	{
5055 	  inner_mode = GET_MODE (op0);
5056 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5057 	    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5058 	  else
5059 	    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5060 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5061 	    op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5062 	  else
5063 	    op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5064 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5065 	  if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5066 	    return op0;
5067 	  else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5068 	    return simplify_gen_binary (PLUS, mode, op0, op2);
5069 	  else
5070 	    return simplify_gen_binary (MINUS, mode, op2, op0);
5071 	}
5072       return NULL;
5073 
5074     case MULT_HIGHPART_EXPR:
5075       /* ??? Similar to the above.  */
5076       return NULL;
5077 
5078     case WIDEN_SUM_EXPR:
5079     case WIDEN_LSHIFT_EXPR:
5080       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5081 	  && SCALAR_INT_MODE_P (mode))
5082 	{
5083 	  op0
5084 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5085 									  0)))
5086 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5087 				  inner_mode);
5088 	  return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5089 				      ? ASHIFT : PLUS, mode, op0, op1);
5090 	}
5091       return NULL;
5092 
5093     case FMA_EXPR:
5094       return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
5095 
5096     default:
5097     flag_unsupported:
5098       if (flag_checking)
5099 	{
5100 	  debug_tree (exp);
5101 	  gcc_unreachable ();
5102 	}
5103       return NULL;
5104     }
5105 }
5106 
5107 /* Return an RTX equivalent to the source bind value of the tree expression
5108    EXP.  */
5109 
5110 static rtx
expand_debug_source_expr(tree exp)5111 expand_debug_source_expr (tree exp)
5112 {
5113   rtx op0 = NULL_RTX;
5114   machine_mode mode = VOIDmode, inner_mode;
5115 
5116   switch (TREE_CODE (exp))
5117     {
5118     case PARM_DECL:
5119       {
5120 	mode = DECL_MODE (exp);
5121 	op0 = expand_debug_parm_decl (exp);
5122 	if (op0)
5123 	   break;
5124 	/* See if this isn't an argument that has been completely
5125 	   optimized out.  */
5126 	if (!DECL_RTL_SET_P (exp)
5127 	    && !DECL_INCOMING_RTL (exp)
5128 	    && DECL_ABSTRACT_ORIGIN (current_function_decl))
5129 	  {
5130 	    tree aexp = DECL_ORIGIN (exp);
5131 	    if (DECL_CONTEXT (aexp)
5132 		== DECL_ABSTRACT_ORIGIN (current_function_decl))
5133 	      {
5134 		vec<tree, va_gc> **debug_args;
5135 		unsigned int ix;
5136 		tree ddecl;
5137 		debug_args = decl_debug_args_lookup (current_function_decl);
5138 		if (debug_args != NULL)
5139 		  {
5140 		    for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5141 			 ix += 2)
5142 		      if (ddecl == aexp)
5143 			return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5144 		  }
5145 	      }
5146 	  }
5147 	break;
5148       }
5149     default:
5150       break;
5151     }
5152 
5153   if (op0 == NULL_RTX)
5154     return NULL_RTX;
5155 
5156   inner_mode = GET_MODE (op0);
5157   if (mode == inner_mode)
5158     return op0;
5159 
5160   if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5161     {
5162       if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
5163 	op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5164       else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
5165 	op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5166       else
5167 	op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5168     }
5169   else if (FLOAT_MODE_P (mode))
5170     gcc_unreachable ();
5171   else if (FLOAT_MODE_P (inner_mode))
5172     {
5173       if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5174 	op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5175       else
5176 	op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5177     }
5178   else if (CONSTANT_P (op0)
5179 	   || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
5180     op0 = lowpart_subreg (mode, op0, inner_mode);
5181   else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5182     op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5183   else
5184     op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5185 
5186   return op0;
5187 }
5188 
5189 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5190    Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5191    deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN.  */
5192 
5193 static void
avoid_complex_debug_insns(rtx_insn * insn,rtx * exp_p,int depth)5194 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5195 {
5196   rtx exp = *exp_p;
5197 
5198   if (exp == NULL_RTX)
5199     return;
5200 
5201   if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5202     return;
5203 
5204   if (depth == 4)
5205     {
5206       /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
5207       rtx dval = make_debug_expr_from_rtl (exp);
5208 
5209       /* Emit a debug bind insn before INSN.  */
5210       rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5211 				       DEBUG_EXPR_TREE_DECL (dval), exp,
5212 				       VAR_INIT_STATUS_INITIALIZED);
5213 
5214       emit_debug_insn_before (bind, insn);
5215       *exp_p = dval;
5216       return;
5217     }
5218 
5219   const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5220   int i, j;
5221   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5222     switch (*format_ptr++)
5223       {
5224       case 'e':
5225 	avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5226 	break;
5227 
5228       case 'E':
5229       case 'V':
5230 	for (j = 0; j < XVECLEN (exp, i); j++)
5231 	  avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5232 	break;
5233 
5234       default:
5235 	break;
5236       }
5237 }
5238 
5239 /* Expand the _LOCs in debug insns.  We run this after expanding all
5240    regular insns, so that any variables referenced in the function
5241    will have their DECL_RTLs set.  */
5242 
5243 static void
expand_debug_locations(void)5244 expand_debug_locations (void)
5245 {
5246   rtx_insn *insn;
5247   rtx_insn *last = get_last_insn ();
5248   int save_strict_alias = flag_strict_aliasing;
5249 
5250   /* New alias sets while setting up memory attributes cause
5251      -fcompare-debug failures, even though it doesn't bring about any
5252      codegen changes.  */
5253   flag_strict_aliasing = 0;
5254 
5255   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5256     if (DEBUG_INSN_P (insn))
5257       {
5258 	tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5259 	rtx val;
5260 	rtx_insn *prev_insn, *insn2;
5261 	machine_mode mode;
5262 
5263 	if (value == NULL_TREE)
5264 	  val = NULL_RTX;
5265 	else
5266 	  {
5267 	    if (INSN_VAR_LOCATION_STATUS (insn)
5268 		== VAR_INIT_STATUS_UNINITIALIZED)
5269 	      val = expand_debug_source_expr (value);
5270 	    /* The avoid_deep_ter_for_debug function inserts
5271 	       debug bind stmts after SSA_NAME definition, with the
5272 	       SSA_NAME as the whole bind location.  Disable temporarily
5273 	       expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5274 	       being defined in this DEBUG_INSN.  */
5275 	    else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5276 	      {
5277 		tree *slot = deep_ter_debug_map->get (value);
5278 		if (slot)
5279 		  {
5280 		    if (*slot == INSN_VAR_LOCATION_DECL (insn))
5281 		      *slot = NULL_TREE;
5282 		    else
5283 		      slot = NULL;
5284 		  }
5285 		val = expand_debug_expr (value);
5286 		if (slot)
5287 		  *slot = INSN_VAR_LOCATION_DECL (insn);
5288 	      }
5289 	    else
5290 	      val = expand_debug_expr (value);
5291 	    gcc_assert (last == get_last_insn ());
5292 	  }
5293 
5294 	if (!val)
5295 	  val = gen_rtx_UNKNOWN_VAR_LOC ();
5296 	else
5297 	  {
5298 	    mode = GET_MODE (INSN_VAR_LOCATION (insn));
5299 
5300 	    gcc_assert (mode == GET_MODE (val)
5301 			|| (GET_MODE (val) == VOIDmode
5302 			    && (CONST_SCALAR_INT_P (val)
5303 				|| GET_CODE (val) == CONST_FIXED
5304 				|| GET_CODE (val) == LABEL_REF)));
5305 	  }
5306 
5307 	INSN_VAR_LOCATION_LOC (insn) = val;
5308 	prev_insn = PREV_INSN (insn);
5309 	for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5310 	  avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5311       }
5312 
5313   flag_strict_aliasing = save_strict_alias;
5314 }
5315 
5316 /* Performs swapping operands of commutative operations to expand
5317    the expensive one first.  */
5318 
5319 static void
reorder_operands(basic_block bb)5320 reorder_operands (basic_block bb)
5321 {
5322   unsigned int *lattice;  /* Hold cost of each statement.  */
5323   unsigned int i = 0, n = 0;
5324   gimple_stmt_iterator gsi;
5325   gimple_seq stmts;
5326   gimple *stmt;
5327   bool swap;
5328   tree op0, op1;
5329   ssa_op_iter iter;
5330   use_operand_p use_p;
5331   gimple *def0, *def1;
5332 
5333   /* Compute cost of each statement using estimate_num_insns.  */
5334   stmts = bb_seq (bb);
5335   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5336     {
5337       stmt = gsi_stmt (gsi);
5338       if (!is_gimple_debug (stmt))
5339         gimple_set_uid (stmt, n++);
5340     }
5341   lattice = XNEWVEC (unsigned int, n);
5342   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5343     {
5344       unsigned cost;
5345       stmt = gsi_stmt (gsi);
5346       if (is_gimple_debug (stmt))
5347 	continue;
5348       cost = estimate_num_insns (stmt, &eni_size_weights);
5349       lattice[i] = cost;
5350       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5351 	{
5352 	  tree use = USE_FROM_PTR (use_p);
5353 	  gimple *def_stmt;
5354 	  if (TREE_CODE (use) != SSA_NAME)
5355 	    continue;
5356 	  def_stmt = get_gimple_for_ssa_name (use);
5357 	  if (!def_stmt)
5358 	    continue;
5359 	  lattice[i] += lattice[gimple_uid (def_stmt)];
5360 	}
5361       i++;
5362       if (!is_gimple_assign (stmt)
5363 	  || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5364 	continue;
5365       op0 = gimple_op (stmt, 1);
5366       op1 = gimple_op (stmt, 2);
5367       if (TREE_CODE (op0) != SSA_NAME
5368 	  || TREE_CODE (op1) != SSA_NAME)
5369 	continue;
5370       /* Swap operands if the second one is more expensive.  */
5371       def0 = get_gimple_for_ssa_name (op0);
5372       def1 = get_gimple_for_ssa_name (op1);
5373       if (!def1)
5374 	continue;
5375       swap = false;
5376       if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5377 	swap = true;
5378       if (swap)
5379 	{
5380 	  if (dump_file && (dump_flags & TDF_DETAILS))
5381 	    {
5382 	      fprintf (dump_file, "Swap operands in stmt:\n");
5383 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5384 	      fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5385 		       def0 ? lattice[gimple_uid (def0)] : 0,
5386 		       lattice[gimple_uid (def1)]);
5387 	    }
5388 	  swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5389 			     gimple_assign_rhs2_ptr (stmt));
5390 	}
5391     }
5392   XDELETE (lattice);
5393 }
5394 
5395 /* Expand basic block BB from GIMPLE trees to RTL.  */
5396 
5397 static basic_block
expand_gimple_basic_block(basic_block bb,bool disable_tail_calls)5398 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5399 {
5400   gimple_stmt_iterator gsi;
5401   gimple_seq stmts;
5402   gimple *stmt = NULL;
5403   rtx_note *note;
5404   rtx_insn *last;
5405   edge e;
5406   edge_iterator ei;
5407 
5408   if (dump_file)
5409     fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5410 	     bb->index);
5411 
5412   /* Note that since we are now transitioning from GIMPLE to RTL, we
5413      cannot use the gsi_*_bb() routines because they expect the basic
5414      block to be in GIMPLE, instead of RTL.  Therefore, we need to
5415      access the BB sequence directly.  */
5416   if (optimize)
5417     reorder_operands (bb);
5418   stmts = bb_seq (bb);
5419   bb->il.gimple.seq = NULL;
5420   bb->il.gimple.phi_nodes = NULL;
5421   rtl_profile_for_bb (bb);
5422   init_rtl_bb_info (bb);
5423   bb->flags |= BB_RTL;
5424 
5425   /* Remove the RETURN_EXPR if we may fall though to the exit
5426      instead.  */
5427   gsi = gsi_last (stmts);
5428   if (!gsi_end_p (gsi)
5429       && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5430     {
5431       greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5432 
5433       gcc_assert (single_succ_p (bb));
5434       gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5435 
5436       if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5437 	  && !gimple_return_retval (ret_stmt))
5438 	{
5439 	  gsi_remove (&gsi, false);
5440 	  single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5441 	}
5442     }
5443 
5444   gsi = gsi_start (stmts);
5445   if (!gsi_end_p (gsi))
5446     {
5447       stmt = gsi_stmt (gsi);
5448       if (gimple_code (stmt) != GIMPLE_LABEL)
5449 	stmt = NULL;
5450     }
5451 
5452   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5453 
5454   if (stmt || elt)
5455     {
5456       last = get_last_insn ();
5457 
5458       if (stmt)
5459 	{
5460 	  expand_gimple_stmt (stmt);
5461 	  gsi_next (&gsi);
5462 	}
5463 
5464       if (elt)
5465 	emit_label (*elt);
5466 
5467       /* Java emits line number notes in the top of labels.
5468 	 ??? Make this go away once line number notes are obsoleted.  */
5469       BB_HEAD (bb) = NEXT_INSN (last);
5470       if (NOTE_P (BB_HEAD (bb)))
5471 	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5472       note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5473 
5474       maybe_dump_rtl_for_gimple_stmt (stmt, last);
5475     }
5476   else
5477     BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5478 
5479   NOTE_BASIC_BLOCK (note) = bb;
5480 
5481   for (; !gsi_end_p (gsi); gsi_next (&gsi))
5482     {
5483       basic_block new_bb;
5484 
5485       stmt = gsi_stmt (gsi);
5486 
5487       /* If this statement is a non-debug one, and we generate debug
5488 	 insns, then this one might be the last real use of a TERed
5489 	 SSA_NAME, but where there are still some debug uses further
5490 	 down.  Expanding the current SSA name in such further debug
5491 	 uses by their RHS might lead to wrong debug info, as coalescing
5492 	 might make the operands of such RHS be placed into the same
5493 	 pseudo as something else.  Like so:
5494 	   a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
5495 	   use(a_1);
5496 	   a_2 = ...
5497            #DEBUG ... => a_1
5498 	 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5499 	 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5500 	 the write to a_2 would actually have clobbered the place which
5501 	 formerly held a_0.
5502 
5503 	 So, instead of that, we recognize the situation, and generate
5504 	 debug temporaries at the last real use of TERed SSA names:
5505 	   a_1 = a_0 + 1;
5506            #DEBUG #D1 => a_1
5507 	   use(a_1);
5508 	   a_2 = ...
5509            #DEBUG ... => #D1
5510 	 */
5511       if (MAY_HAVE_DEBUG_INSNS
5512 	  && SA.values
5513 	  && !is_gimple_debug (stmt))
5514 	{
5515 	  ssa_op_iter iter;
5516 	  tree op;
5517 	  gimple *def;
5518 
5519 	  location_t sloc = curr_insn_location ();
5520 
5521 	  /* Look for SSA names that have their last use here (TERed
5522 	     names always have only one real use).  */
5523 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5524 	    if ((def = get_gimple_for_ssa_name (op)))
5525 	      {
5526 		imm_use_iterator imm_iter;
5527 		use_operand_p use_p;
5528 		bool have_debug_uses = false;
5529 
5530 		FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5531 		  {
5532 		    if (gimple_debug_bind_p (USE_STMT (use_p)))
5533 		      {
5534 			have_debug_uses = true;
5535 			break;
5536 		      }
5537 		  }
5538 
5539 		if (have_debug_uses)
5540 		  {
5541 		    /* OP is a TERed SSA name, with DEF its defining
5542 		       statement, and where OP is used in further debug
5543 		       instructions.  Generate a debug temporary, and
5544 		       replace all uses of OP in debug insns with that
5545 		       temporary.  */
5546 		    gimple *debugstmt;
5547 		    tree value = gimple_assign_rhs_to_tree (def);
5548 		    tree vexpr = make_node (DEBUG_EXPR_DECL);
5549 		    rtx val;
5550 		    machine_mode mode;
5551 
5552 		    set_curr_insn_location (gimple_location (def));
5553 
5554 		    DECL_ARTIFICIAL (vexpr) = 1;
5555 		    TREE_TYPE (vexpr) = TREE_TYPE (value);
5556 		    if (DECL_P (value))
5557 		      mode = DECL_MODE (value);
5558 		    else
5559 		      mode = TYPE_MODE (TREE_TYPE (value));
5560 		    DECL_MODE (vexpr) = mode;
5561 
5562 		    val = gen_rtx_VAR_LOCATION
5563 			(mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5564 
5565 		    emit_debug_insn (val);
5566 
5567 		    FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5568 		      {
5569 			if (!gimple_debug_bind_p (debugstmt))
5570 			  continue;
5571 
5572 			FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5573 			  SET_USE (use_p, vexpr);
5574 
5575 			update_stmt (debugstmt);
5576 		      }
5577 		  }
5578 	      }
5579 	  set_curr_insn_location (sloc);
5580 	}
5581 
5582       currently_expanding_gimple_stmt = stmt;
5583 
5584       /* Expand this statement, then evaluate the resulting RTL and
5585 	 fixup the CFG accordingly.  */
5586       if (gimple_code (stmt) == GIMPLE_COND)
5587 	{
5588 	  new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5589 	  if (new_bb)
5590 	    return new_bb;
5591 	}
5592       else if (gimple_debug_bind_p (stmt))
5593 	{
5594 	  location_t sloc = curr_insn_location ();
5595 	  gimple_stmt_iterator nsi = gsi;
5596 
5597 	  for (;;)
5598 	    {
5599 	      tree var = gimple_debug_bind_get_var (stmt);
5600 	      tree value;
5601 	      rtx val;
5602 	      machine_mode mode;
5603 
5604 	      if (TREE_CODE (var) != DEBUG_EXPR_DECL
5605 		  && TREE_CODE (var) != LABEL_DECL
5606 		  && !target_for_debug_bind (var))
5607 		goto delink_debug_stmt;
5608 
5609 	      if (gimple_debug_bind_has_value_p (stmt))
5610 		value = gimple_debug_bind_get_value (stmt);
5611 	      else
5612 		value = NULL_TREE;
5613 
5614 	      last = get_last_insn ();
5615 
5616 	      set_curr_insn_location (gimple_location (stmt));
5617 
5618 	      if (DECL_P (var))
5619 		mode = DECL_MODE (var);
5620 	      else
5621 		mode = TYPE_MODE (TREE_TYPE (var));
5622 
5623 	      val = gen_rtx_VAR_LOCATION
5624 		(mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5625 
5626 	      emit_debug_insn (val);
5627 
5628 	      if (dump_file && (dump_flags & TDF_DETAILS))
5629 		{
5630 		  /* We can't dump the insn with a TREE where an RTX
5631 		     is expected.  */
5632 		  PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5633 		  maybe_dump_rtl_for_gimple_stmt (stmt, last);
5634 		  PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5635 		}
5636 
5637 	    delink_debug_stmt:
5638 	      /* In order not to generate too many debug temporaries,
5639 	         we delink all uses of debug statements we already expanded.
5640 		 Therefore debug statements between definition and real
5641 		 use of TERed SSA names will continue to use the SSA name,
5642 		 and not be replaced with debug temps.  */
5643 	      delink_stmt_imm_use (stmt);
5644 
5645 	      gsi = nsi;
5646 	      gsi_next (&nsi);
5647 	      if (gsi_end_p (nsi))
5648 		break;
5649 	      stmt = gsi_stmt (nsi);
5650 	      if (!gimple_debug_bind_p (stmt))
5651 		break;
5652 	    }
5653 
5654 	  set_curr_insn_location (sloc);
5655 	}
5656       else if (gimple_debug_source_bind_p (stmt))
5657 	{
5658 	  location_t sloc = curr_insn_location ();
5659 	  tree var = gimple_debug_source_bind_get_var (stmt);
5660 	  tree value = gimple_debug_source_bind_get_value (stmt);
5661 	  rtx val;
5662 	  machine_mode mode;
5663 
5664 	  last = get_last_insn ();
5665 
5666 	  set_curr_insn_location (gimple_location (stmt));
5667 
5668 	  mode = DECL_MODE (var);
5669 
5670 	  val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5671 				      VAR_INIT_STATUS_UNINITIALIZED);
5672 
5673 	  emit_debug_insn (val);
5674 
5675 	  if (dump_file && (dump_flags & TDF_DETAILS))
5676 	    {
5677 	      /* We can't dump the insn with a TREE where an RTX
5678 		 is expected.  */
5679 	      PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5680 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5681 	      PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5682 	    }
5683 
5684 	  set_curr_insn_location (sloc);
5685 	}
5686       else
5687 	{
5688 	  gcall *call_stmt = dyn_cast <gcall *> (stmt);
5689 	  if (call_stmt
5690 	      && gimple_call_tail_p (call_stmt)
5691 	      && disable_tail_calls)
5692 	    gimple_call_set_tail (call_stmt, false);
5693 
5694 	  if (call_stmt && gimple_call_tail_p (call_stmt))
5695 	    {
5696 	      bool can_fallthru;
5697 	      new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5698 	      if (new_bb)
5699 		{
5700 		  if (can_fallthru)
5701 		    bb = new_bb;
5702 		  else
5703 		    return new_bb;
5704 		}
5705 	    }
5706 	  else
5707 	    {
5708 	      def_operand_p def_p;
5709 	      def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5710 
5711 	      if (def_p != NULL)
5712 		{
5713 		  /* Ignore this stmt if it is in the list of
5714 		     replaceable expressions.  */
5715 		  if (SA.values
5716 		      && bitmap_bit_p (SA.values,
5717 				       SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5718 		    continue;
5719 		}
5720 	      last = expand_gimple_stmt (stmt);
5721 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5722 	    }
5723 	}
5724     }
5725 
5726   currently_expanding_gimple_stmt = NULL;
5727 
5728   /* Expand implicit goto and convert goto_locus.  */
5729   FOR_EACH_EDGE (e, ei, bb->succs)
5730     {
5731       if (e->goto_locus != UNKNOWN_LOCATION)
5732 	set_curr_insn_location (e->goto_locus);
5733       if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5734 	{
5735 	  emit_jump (label_rtx_for_bb (e->dest));
5736 	  e->flags &= ~EDGE_FALLTHRU;
5737 	}
5738     }
5739 
5740   /* Expanded RTL can create a jump in the last instruction of block.
5741      This later might be assumed to be a jump to successor and break edge insertion.
5742      We need to insert dummy move to prevent this. PR41440. */
5743   if (single_succ_p (bb)
5744       && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5745       && (last = get_last_insn ())
5746       && JUMP_P (last))
5747     {
5748       rtx dummy = gen_reg_rtx (SImode);
5749       emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5750     }
5751 
5752   do_pending_stack_adjust ();
5753 
5754   /* Find the block tail.  The last insn in the block is the insn
5755      before a barrier and/or table jump insn.  */
5756   last = get_last_insn ();
5757   if (BARRIER_P (last))
5758     last = PREV_INSN (last);
5759   if (JUMP_TABLE_DATA_P (last))
5760     last = PREV_INSN (PREV_INSN (last));
5761   BB_END (bb) = last;
5762 
5763   update_bb_for_insn (bb);
5764 
5765   return bb;
5766 }
5767 
5768 
5769 /* Create a basic block for initialization code.  */
5770 
5771 static basic_block
construct_init_block(void)5772 construct_init_block (void)
5773 {
5774   basic_block init_block, first_block;
5775   edge e = NULL;
5776   int flags;
5777 
5778   /* Multiple entry points not supported yet.  */
5779   gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5780   init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5781   init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5782   ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5783   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5784 
5785   e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5786 
5787   /* When entry edge points to first basic block, we don't need jump,
5788      otherwise we have to jump into proper target.  */
5789   if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5790     {
5791       tree label = gimple_block_label (e->dest);
5792 
5793       emit_jump (jump_target_rtx (label));
5794       flags = 0;
5795     }
5796   else
5797     flags = EDGE_FALLTHRU;
5798 
5799   init_block = create_basic_block (NEXT_INSN (get_insns ()),
5800 				   get_last_insn (),
5801 				   ENTRY_BLOCK_PTR_FOR_FN (cfun));
5802   init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5803   init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5804   add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5805   if (e)
5806     {
5807       first_block = e->dest;
5808       redirect_edge_succ (e, init_block);
5809       e = make_edge (init_block, first_block, flags);
5810     }
5811   else
5812     e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5813   e->probability = REG_BR_PROB_BASE;
5814   e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5815 
5816   update_bb_for_insn (init_block);
5817   return init_block;
5818 }
5819 
5820 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5821    found in the block tree.  */
5822 
5823 static void
set_block_levels(tree block,int level)5824 set_block_levels (tree block, int level)
5825 {
5826   while (block)
5827     {
5828       BLOCK_NUMBER (block) = level;
5829       set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5830       block = BLOCK_CHAIN (block);
5831     }
5832 }
5833 
5834 /* Create a block containing landing pads and similar stuff.  */
5835 
5836 static void
construct_exit_block(void)5837 construct_exit_block (void)
5838 {
5839   rtx_insn *head = get_last_insn ();
5840   rtx_insn *end;
5841   basic_block exit_block;
5842   edge e, e2;
5843   unsigned ix;
5844   edge_iterator ei;
5845   basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5846   rtx_insn *orig_end = BB_END (prev_bb);
5847 
5848   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5849 
5850   /* Make sure the locus is set to the end of the function, so that
5851      epilogue line numbers and warnings are set properly.  */
5852   if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5853     input_location = cfun->function_end_locus;
5854 
5855   /* Generate rtl for function exit.  */
5856   expand_function_end ();
5857 
5858   end = get_last_insn ();
5859   if (head == end)
5860     return;
5861   /* While emitting the function end we could move end of the last basic
5862      block.  */
5863   BB_END (prev_bb) = orig_end;
5864   while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5865     head = NEXT_INSN (head);
5866   /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5867      bb frequency counting will be confused.  Any instructions before that
5868      label are emitted for the case where PREV_BB falls through into the
5869      exit block, so append those instructions to prev_bb in that case.  */
5870   if (NEXT_INSN (head) != return_label)
5871     {
5872       while (NEXT_INSN (head) != return_label)
5873 	{
5874 	  if (!NOTE_P (NEXT_INSN (head)))
5875 	    BB_END (prev_bb) = NEXT_INSN (head);
5876 	  head = NEXT_INSN (head);
5877 	}
5878     }
5879   exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5880   exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5881   exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5882   add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5883 
5884   ix = 0;
5885   while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5886     {
5887       e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5888       if (!(e->flags & EDGE_ABNORMAL))
5889 	redirect_edge_succ (e, exit_block);
5890       else
5891 	ix++;
5892     }
5893 
5894   e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5895   e->probability = REG_BR_PROB_BASE;
5896   e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5897   FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5898     if (e2 != e)
5899       {
5900 	e->count -= e2->count;
5901 	exit_block->count -= e2->count;
5902 	exit_block->frequency -= EDGE_FREQUENCY (e2);
5903       }
5904   if (e->count < 0)
5905     e->count = 0;
5906   if (exit_block->count < 0)
5907     exit_block->count = 0;
5908   if (exit_block->frequency < 0)
5909     exit_block->frequency = 0;
5910   update_bb_for_insn (exit_block);
5911 }
5912 
5913 /* Helper function for discover_nonconstant_array_refs.
5914    Look for ARRAY_REF nodes with non-constant indexes and mark them
5915    addressable.  */
5916 
5917 static tree
discover_nonconstant_array_refs_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)5918 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5919 				   void *data ATTRIBUTE_UNUSED)
5920 {
5921   tree t = *tp;
5922 
5923   if (IS_TYPE_OR_DECL_P (t))
5924     *walk_subtrees = 0;
5925   else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5926     {
5927       while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5928 	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5929 	      && (!TREE_OPERAND (t, 2)
5930 		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5931 	     || (TREE_CODE (t) == COMPONENT_REF
5932 		 && (!TREE_OPERAND (t,2)
5933 		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5934 	     || TREE_CODE (t) == BIT_FIELD_REF
5935 	     || TREE_CODE (t) == REALPART_EXPR
5936 	     || TREE_CODE (t) == IMAGPART_EXPR
5937 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
5938 	     || CONVERT_EXPR_P (t))
5939 	t = TREE_OPERAND (t, 0);
5940 
5941       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5942 	{
5943 	  t = get_base_address (t);
5944 	  if (t && DECL_P (t)
5945               && DECL_MODE (t) != BLKmode)
5946 	    TREE_ADDRESSABLE (t) = 1;
5947 	}
5948 
5949       *walk_subtrees = 0;
5950     }
5951 
5952   return NULL_TREE;
5953 }
5954 
5955 /* RTL expansion is not able to compile array references with variable
5956    offsets for arrays stored in single register.  Discover such
5957    expressions and mark variables as addressable to avoid this
5958    scenario.  */
5959 
5960 static void
discover_nonconstant_array_refs(void)5961 discover_nonconstant_array_refs (void)
5962 {
5963   basic_block bb;
5964   gimple_stmt_iterator gsi;
5965 
5966   FOR_EACH_BB_FN (bb, cfun)
5967     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5968       {
5969 	gimple *stmt = gsi_stmt (gsi);
5970 	if (!is_gimple_debug (stmt))
5971 	  walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5972       }
5973 }
5974 
5975 /* This function sets crtl->args.internal_arg_pointer to a virtual
5976    register if DRAP is needed.  Local register allocator will replace
5977    virtual_incoming_args_rtx with the virtual register.  */
5978 
5979 static void
expand_stack_alignment(void)5980 expand_stack_alignment (void)
5981 {
5982   rtx drap_rtx;
5983   unsigned int preferred_stack_boundary;
5984 
5985   if (! SUPPORTS_STACK_ALIGNMENT)
5986     return;
5987 
5988   if (cfun->calls_alloca
5989       || cfun->has_nonlocal_label
5990       || crtl->has_nonlocal_goto)
5991     crtl->need_drap = true;
5992 
5993   /* Call update_stack_boundary here again to update incoming stack
5994      boundary.  It may set incoming stack alignment to a different
5995      value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
5996      use the minimum incoming stack alignment to check if it is OK
5997      to perform sibcall optimization since sibcall optimization will
5998      only align the outgoing stack to incoming stack boundary.  */
5999   if (targetm.calls.update_stack_boundary)
6000     targetm.calls.update_stack_boundary ();
6001 
6002   /* The incoming stack frame has to be aligned at least at
6003      parm_stack_boundary.  */
6004   gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6005 
6006   /* Update crtl->stack_alignment_estimated and use it later to align
6007      stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
6008      exceptions since callgraph doesn't collect incoming stack alignment
6009      in this case.  */
6010   if (cfun->can_throw_non_call_exceptions
6011       && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6012     preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6013   else
6014     preferred_stack_boundary = crtl->preferred_stack_boundary;
6015   if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6016     crtl->stack_alignment_estimated = preferred_stack_boundary;
6017   if (preferred_stack_boundary > crtl->stack_alignment_needed)
6018     crtl->stack_alignment_needed = preferred_stack_boundary;
6019 
6020   gcc_assert (crtl->stack_alignment_needed
6021 	      <= crtl->stack_alignment_estimated);
6022 
6023   crtl->stack_realign_needed
6024     = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6025   crtl->stack_realign_tried = crtl->stack_realign_needed;
6026 
6027   crtl->stack_realign_processed = true;
6028 
6029   /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6030      alignment.  */
6031   gcc_assert (targetm.calls.get_drap_rtx != NULL);
6032   drap_rtx = targetm.calls.get_drap_rtx ();
6033 
6034   /* stack_realign_drap and drap_rtx must match.  */
6035   gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6036 
6037   /* Do nothing if NULL is returned, which means DRAP is not needed.  */
6038   if (NULL != drap_rtx)
6039     {
6040       crtl->args.internal_arg_pointer = drap_rtx;
6041 
6042       /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6043          needed. */
6044       fixup_tail_calls ();
6045     }
6046 }
6047 
6048 
6049 static void
expand_main_function(void)6050 expand_main_function (void)
6051 {
6052 #if (defined(INVOKE__main)				\
6053      || (!defined(HAS_INIT_SECTION)			\
6054 	 && !defined(INIT_SECTION_ASM_OP)		\
6055 	 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6056   emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
6057 #endif
6058 }
6059 
6060 
6061 /* Expand code to initialize the stack_protect_guard.  This is invoked at
6062    the beginning of a function to be protected.  */
6063 
6064 static void
stack_protect_prologue(void)6065 stack_protect_prologue (void)
6066 {
6067   tree guard_decl = targetm.stack_protect_guard ();
6068   rtx x, y;
6069 
6070   x = expand_normal (crtl->stack_protect_guard);
6071   y = expand_normal (guard_decl);
6072 
6073   /* Allow the target to copy from Y to X without leaking Y into a
6074      register.  */
6075   if (targetm.have_stack_protect_set ())
6076     if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6077       {
6078 	emit_insn (insn);
6079 	return;
6080       }
6081 
6082   /* Otherwise do a straight move.  */
6083   emit_move_insn (x, y);
6084 }
6085 
6086 /* Translate the intermediate representation contained in the CFG
6087    from GIMPLE trees to RTL.
6088 
6089    We do conversion per basic block and preserve/update the tree CFG.
6090    This implies we have to do some magic as the CFG can simultaneously
6091    consist of basic blocks containing RTL and GIMPLE trees.  This can
6092    confuse the CFG hooks, so be careful to not manipulate CFG during
6093    the expansion.  */
6094 
6095 namespace {
6096 
6097 const pass_data pass_data_expand =
6098 {
6099   RTL_PASS, /* type */
6100   "expand", /* name */
6101   OPTGROUP_NONE, /* optinfo_flags */
6102   TV_EXPAND, /* tv_id */
6103   ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6104     | PROP_gimple_lcx
6105     | PROP_gimple_lvec
6106     | PROP_gimple_lva), /* properties_required */
6107   PROP_rtl, /* properties_provided */
6108   ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6109   0, /* todo_flags_start */
6110   0, /* todo_flags_finish */
6111 };
6112 
6113 class pass_expand : public rtl_opt_pass
6114 {
6115 public:
pass_expand(gcc::context * ctxt)6116   pass_expand (gcc::context *ctxt)
6117     : rtl_opt_pass (pass_data_expand, ctxt)
6118   {}
6119 
6120   /* opt_pass methods: */
6121   virtual unsigned int execute (function *);
6122 
6123 }; // class pass_expand
6124 
6125 unsigned int
execute(function * fun)6126 pass_expand::execute (function *fun)
6127 {
6128   basic_block bb, init_block;
6129   sbitmap blocks;
6130   edge_iterator ei;
6131   edge e;
6132   rtx_insn *var_seq, *var_ret_seq;
6133   unsigned i;
6134 
6135   timevar_push (TV_OUT_OF_SSA);
6136   rewrite_out_of_ssa (&SA);
6137   timevar_pop (TV_OUT_OF_SSA);
6138   SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6139 
6140   if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter)
6141     {
6142       gimple_stmt_iterator gsi;
6143       FOR_EACH_BB_FN (bb, cfun)
6144 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6145 	  if (gimple_debug_bind_p (gsi_stmt (gsi)))
6146 	    avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6147     }
6148 
6149   /* Make sure all values used by the optimization passes have sane
6150      defaults.  */
6151   reg_renumber = 0;
6152 
6153   /* Some backends want to know that we are expanding to RTL.  */
6154   currently_expanding_to_rtl = 1;
6155   /* Dominators are not kept up-to-date as we may create new basic-blocks.  */
6156   free_dominance_info (CDI_DOMINATORS);
6157 
6158   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6159 
6160   if (chkp_function_instrumented_p (current_function_decl))
6161     chkp_reset_rtl_bounds ();
6162 
6163   insn_locations_init ();
6164   if (!DECL_IS_BUILTIN (current_function_decl))
6165     {
6166       /* Eventually, all FEs should explicitly set function_start_locus.  */
6167       if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6168 	set_curr_insn_location
6169 	  (DECL_SOURCE_LOCATION (current_function_decl));
6170       else
6171 	set_curr_insn_location (fun->function_start_locus);
6172     }
6173   else
6174     set_curr_insn_location (UNKNOWN_LOCATION);
6175   prologue_location = curr_insn_location ();
6176 
6177 #ifdef INSN_SCHEDULING
6178   init_sched_attrs ();
6179 #endif
6180 
6181   /* Make sure first insn is a note even if we don't want linenums.
6182      This makes sure the first insn will never be deleted.
6183      Also, final expects a note to appear there.  */
6184   emit_note (NOTE_INSN_DELETED);
6185 
6186   /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
6187   discover_nonconstant_array_refs ();
6188 
6189   targetm.expand_to_rtl_hook ();
6190   crtl->stack_alignment_needed = STACK_BOUNDARY;
6191   crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
6192   crtl->stack_alignment_estimated = 0;
6193   crtl->preferred_stack_boundary = STACK_BOUNDARY;
6194   fun->cfg->max_jumptable_ents = 0;
6195 
6196   /* Resovle the function section.  Some targets, like ARM EABI rely on knowledge
6197      of the function section at exapnsion time to predict distance of calls.  */
6198   resolve_unique_section (current_function_decl, 0, flag_function_sections);
6199 
6200   /* Expand the variables recorded during gimple lowering.  */
6201   timevar_push (TV_VAR_EXPAND);
6202   start_sequence ();
6203 
6204   var_ret_seq = expand_used_vars ();
6205 
6206   var_seq = get_insns ();
6207   end_sequence ();
6208   timevar_pop (TV_VAR_EXPAND);
6209 
6210   /* Honor stack protection warnings.  */
6211   if (warn_stack_protect)
6212     {
6213       if (fun->calls_alloca)
6214 	warning (OPT_Wstack_protector,
6215 		 "stack protector not protecting local variables: "
6216 		 "variable length buffer");
6217       if (has_short_buffer && !crtl->stack_protect_guard)
6218 	warning (OPT_Wstack_protector,
6219 		 "stack protector not protecting function: "
6220 		 "all local arrays are less than %d bytes long",
6221 		 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6222     }
6223 
6224   /* Set up parameters and prepare for return, for the function.  */
6225   expand_function_start (current_function_decl);
6226 
6227   /* If we emitted any instructions for setting up the variables,
6228      emit them before the FUNCTION_START note.  */
6229   if (var_seq)
6230     {
6231       emit_insn_before (var_seq, parm_birth_insn);
6232 
6233       /* In expand_function_end we'll insert the alloca save/restore
6234 	 before parm_birth_insn.  We've just insertted an alloca call.
6235 	 Adjust the pointer to match.  */
6236       parm_birth_insn = var_seq;
6237     }
6238 
6239   /* Now propagate the RTL assignment of each partition to the
6240      underlying var of each SSA_NAME.  */
6241   for (i = 1; i < num_ssa_names; i++)
6242     {
6243       tree name = ssa_name (i);
6244 
6245       if (!name
6246 	  /* We might have generated new SSA names in
6247 	     update_alias_info_with_stack_vars.  They will have a NULL
6248 	     defining statements, and won't be part of the partitioning,
6249 	     so ignore those.  */
6250 	  || !SSA_NAME_DEF_STMT (name))
6251 	continue;
6252 
6253       adjust_one_expanded_partition_var (name);
6254     }
6255 
6256   /* Clean up RTL of variables that straddle across multiple
6257      partitions, and check that the rtl of any PARM_DECLs that are not
6258      cleaned up is that of their default defs.  */
6259   for (i = 1; i < num_ssa_names; i++)
6260     {
6261       tree name = ssa_name (i);
6262       int part;
6263 
6264       if (!name
6265 	  /* We might have generated new SSA names in
6266 	     update_alias_info_with_stack_vars.  They will have a NULL
6267 	     defining statements, and won't be part of the partitioning,
6268 	     so ignore those.  */
6269 	  || !SSA_NAME_DEF_STMT (name))
6270 	continue;
6271       part = var_to_partition (SA.map, name);
6272       if (part == NO_PARTITION)
6273 	continue;
6274 
6275       /* If this decl was marked as living in multiple places, reset
6276 	 this now to NULL.  */
6277       tree var = SSA_NAME_VAR (name);
6278       if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6279 	SET_DECL_RTL (var, NULL);
6280       /* Check that the pseudos chosen by assign_parms are those of
6281 	 the corresponding default defs.  */
6282       else if (SSA_NAME_IS_DEFAULT_DEF (name)
6283 	       && (TREE_CODE (var) == PARM_DECL
6284 		   || TREE_CODE (var) == RESULT_DECL))
6285 	{
6286 	  rtx in = DECL_RTL_IF_SET (var);
6287 	  gcc_assert (in);
6288 	  rtx out = SA.partition_to_pseudo[part];
6289 	  gcc_assert (in == out);
6290 
6291 	  /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6292 	     those expected by debug backends for each parm and for
6293 	     the result.  This is particularly important for stabs,
6294 	     whose register elimination from parm's DECL_RTL may cause
6295 	     -fcompare-debug differences as SET_DECL_RTL changes reg's
6296 	     attrs.  So, make sure the RTL already has the parm as the
6297 	     EXPR, so that it won't change.  */
6298 	  SET_DECL_RTL (var, NULL_RTX);
6299 	  if (MEM_P (in))
6300 	    set_mem_attributes (in, var, true);
6301 	  SET_DECL_RTL (var, in);
6302 	}
6303     }
6304 
6305   /* If this function is `main', emit a call to `__main'
6306      to run global initializers, etc.  */
6307   if (DECL_NAME (current_function_decl)
6308       && MAIN_NAME_P (DECL_NAME (current_function_decl))
6309       && DECL_FILE_SCOPE_P (current_function_decl))
6310     expand_main_function ();
6311 
6312   /* Initialize the stack_protect_guard field.  This must happen after the
6313      call to __main (if any) so that the external decl is initialized.  */
6314   if (crtl->stack_protect_guard)
6315     stack_protect_prologue ();
6316 
6317   expand_phi_nodes (&SA);
6318 
6319   /* Release any stale SSA redirection data.  */
6320   redirect_edge_var_map_empty ();
6321 
6322   /* Register rtl specific functions for cfg.  */
6323   rtl_register_cfg_hooks ();
6324 
6325   init_block = construct_init_block ();
6326 
6327   /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
6328      remaining edges later.  */
6329   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6330     e->flags &= ~EDGE_EXECUTABLE;
6331 
6332   lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6333   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6334 		  next_bb)
6335     bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6336 
6337   if (MAY_HAVE_DEBUG_INSNS)
6338     expand_debug_locations ();
6339 
6340   if (deep_ter_debug_map)
6341     {
6342       delete deep_ter_debug_map;
6343       deep_ter_debug_map = NULL;
6344     }
6345 
6346   /* Free stuff we no longer need after GIMPLE optimizations.  */
6347   free_dominance_info (CDI_DOMINATORS);
6348   free_dominance_info (CDI_POST_DOMINATORS);
6349   delete_tree_cfg_annotations (fun);
6350 
6351   timevar_push (TV_OUT_OF_SSA);
6352   finish_out_of_ssa (&SA);
6353   timevar_pop (TV_OUT_OF_SSA);
6354 
6355   timevar_push (TV_POST_EXPAND);
6356   /* We are no longer in SSA form.  */
6357   fun->gimple_df->in_ssa_p = false;
6358   loops_state_clear (LOOP_CLOSED_SSA);
6359 
6360   /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6361      conservatively to true until they are all profile aware.  */
6362   delete lab_rtx_for_bb;
6363   free_histograms (fun);
6364 
6365   construct_exit_block ();
6366   insn_locations_finalize ();
6367 
6368   if (var_ret_seq)
6369     {
6370       rtx_insn *after = return_label;
6371       rtx_insn *next = NEXT_INSN (after);
6372       if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6373 	after = next;
6374       emit_insn_after (var_ret_seq, after);
6375     }
6376 
6377   /* Zap the tree EH table.  */
6378   set_eh_throw_stmt_table (fun, NULL);
6379 
6380   /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6381      split edges which edge insertions might do.  */
6382   rebuild_jump_labels (get_insns ());
6383 
6384   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6385 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6386     {
6387       edge e;
6388       edge_iterator ei;
6389       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6390 	{
6391 	  if (e->insns.r)
6392 	    {
6393 	      rebuild_jump_labels_chain (e->insns.r);
6394 	      /* Put insns after parm birth, but before
6395 		 NOTE_INSNS_FUNCTION_BEG.  */
6396 	      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6397 		  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6398 		{
6399 		  rtx_insn *insns = e->insns.r;
6400 		  e->insns.r = NULL;
6401 		  if (NOTE_P (parm_birth_insn)
6402 		      && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6403 		    emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6404 		  else
6405 		    emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6406 		}
6407 	      else
6408 		commit_one_edge_insertion (e);
6409 	    }
6410 	  else
6411 	    ei_next (&ei);
6412 	}
6413     }
6414 
6415   /* We're done expanding trees to RTL.  */
6416   currently_expanding_to_rtl = 0;
6417 
6418   flush_mark_addressable_queue ();
6419 
6420   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6421 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6422     {
6423       edge e;
6424       edge_iterator ei;
6425       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6426 	{
6427 	  /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
6428 	  e->flags &= ~EDGE_EXECUTABLE;
6429 
6430 	  /* At the moment not all abnormal edges match the RTL
6431 	     representation.  It is safe to remove them here as
6432 	     find_many_sub_basic_blocks will rediscover them.
6433 	     In the future we should get this fixed properly.  */
6434 	  if ((e->flags & EDGE_ABNORMAL)
6435 	      && !(e->flags & EDGE_SIBCALL))
6436 	    remove_edge (e);
6437 	  else
6438 	    ei_next (&ei);
6439 	}
6440     }
6441 
6442   blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
6443   bitmap_ones (blocks);
6444   find_many_sub_basic_blocks (blocks);
6445   sbitmap_free (blocks);
6446   purge_all_dead_edges ();
6447 
6448   expand_stack_alignment ();
6449 
6450   /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6451      function.  */
6452   if (crtl->tail_call_emit)
6453     fixup_tail_calls ();
6454 
6455   /* After initial rtl generation, call back to finish generating
6456      exception support code.  We need to do this before cleaning up
6457      the CFG as the code does not expect dead landing pads.  */
6458   if (fun->eh->region_tree != NULL)
6459     finish_eh_generation ();
6460 
6461   /* Remove unreachable blocks, otherwise we cannot compute dominators
6462      which are needed for loop state verification.  As a side-effect
6463      this also compacts blocks.
6464      ???  We cannot remove trivially dead insns here as for example
6465      the DRAP reg on i?86 is not magically live at this point.
6466      gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise.  */
6467   cleanup_cfg (CLEANUP_NO_INSN_DEL);
6468 
6469   checking_verify_flow_info ();
6470 
6471   /* Initialize pseudos allocated for hard registers.  */
6472   emit_initial_value_sets ();
6473 
6474   /* And finally unshare all RTL.  */
6475   unshare_all_rtl ();
6476 
6477   /* There's no need to defer outputting this function any more; we
6478      know we want to output it.  */
6479   DECL_DEFER_OUTPUT (current_function_decl) = 0;
6480 
6481   /* Now that we're done expanding trees to RTL, we shouldn't have any
6482      more CONCATs anywhere.  */
6483   generating_concat_p = 0;
6484 
6485   if (dump_file)
6486     {
6487       fprintf (dump_file,
6488 	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6489       /* And the pass manager will dump RTL for us.  */
6490     }
6491 
6492   /* If we're emitting a nested function, make sure its parent gets
6493      emitted as well.  Doing otherwise confuses debug info.  */
6494     {
6495       tree parent;
6496       for (parent = DECL_CONTEXT (current_function_decl);
6497 	   parent != NULL_TREE;
6498 	   parent = get_containing_scope (parent))
6499 	if (TREE_CODE (parent) == FUNCTION_DECL)
6500 	  TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6501     }
6502 
6503   /* We are now committed to emitting code for this function.  Do any
6504      preparation, such as emitting abstract debug info for the inline
6505      before it gets mangled by optimization.  */
6506   if (cgraph_function_possibly_inlined_p (current_function_decl))
6507     (*debug_hooks->outlining_inline_function) (current_function_decl);
6508 
6509   TREE_ASM_WRITTEN (current_function_decl) = 1;
6510 
6511   /* After expanding, the return labels are no longer needed. */
6512   return_label = NULL;
6513   naked_return_label = NULL;
6514 
6515   /* After expanding, the tm_restart map is no longer needed.  */
6516   if (fun->gimple_df->tm_restart)
6517     fun->gimple_df->tm_restart = NULL;
6518 
6519   /* Tag the blocks with a depth number so that change_scope can find
6520      the common parent easily.  */
6521   set_block_levels (DECL_INITIAL (fun->decl), 0);
6522   default_rtl_profile ();
6523 
6524   timevar_pop (TV_POST_EXPAND);
6525 
6526   return 0;
6527 }
6528 
6529 } // anon namespace
6530 
6531 rtl_opt_pass *
make_pass_expand(gcc::context * ctxt)6532 make_pass_expand (gcc::context *ctxt)
6533 {
6534   return new pass_expand (ctxt);
6535 }
6536