xref: /dragonfly/contrib/gcc-8.0/gcc/cfgexpand.c (revision 3851e4b8)
1 /* A pass for lowering trees to RTL.
2    Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber.  */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "params.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
69 #include "cfgloop.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING.  */
71 #include "stringpool.h"
72 #include "attribs.h"
73 #include "asan.h"
74 #include "tree-ssa-address.h"
75 #include "output.h"
76 #include "builtins.h"
77 #include "tree-chkp.h"
78 #include "rtl-chkp.h"
79 
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81    cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82    give the same symbol without quotes for an alternative entry point.  You
83    must define both, or neither.  */
84 #ifndef NAME__MAIN
85 #define NAME__MAIN "__main"
86 #endif
87 
88 /* This variable holds information helping the rewriting of SSA trees
89    into RTL.  */
90 struct ssaexpand SA;
91 
92 /* This variable holds the currently expanded gimple statement for purposes
93    of comminucating the profile info to the builtin expanders.  */
94 gimple *currently_expanding_gimple_stmt;
95 
96 static rtx expand_debug_expr (tree);
97 
98 static bool defer_stack_allocation (tree, bool);
99 
100 static void record_alignment_for_reg_var (unsigned int);
101 
102 /* Return an expression tree corresponding to the RHS of GIMPLE
103    statement STMT.  */
104 
105 tree
106 gimple_assign_rhs_to_tree (gimple *stmt)
107 {
108   tree t;
109   enum gimple_rhs_class grhs_class;
110 
111   grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
112 
113   if (grhs_class == GIMPLE_TERNARY_RHS)
114     t = build3 (gimple_assign_rhs_code (stmt),
115 		TREE_TYPE (gimple_assign_lhs (stmt)),
116 		gimple_assign_rhs1 (stmt),
117 		gimple_assign_rhs2 (stmt),
118 		gimple_assign_rhs3 (stmt));
119   else if (grhs_class == GIMPLE_BINARY_RHS)
120     t = build2 (gimple_assign_rhs_code (stmt),
121 		TREE_TYPE (gimple_assign_lhs (stmt)),
122 		gimple_assign_rhs1 (stmt),
123 		gimple_assign_rhs2 (stmt));
124   else if (grhs_class == GIMPLE_UNARY_RHS)
125     t = build1 (gimple_assign_rhs_code (stmt),
126 		TREE_TYPE (gimple_assign_lhs (stmt)),
127 		gimple_assign_rhs1 (stmt));
128   else if (grhs_class == GIMPLE_SINGLE_RHS)
129     {
130       t = gimple_assign_rhs1 (stmt);
131       /* Avoid modifying this tree in place below.  */
132       if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
133 	   && gimple_location (stmt) != EXPR_LOCATION (t))
134 	  || (gimple_block (stmt)
135 	      && currently_expanding_to_rtl
136 	      && EXPR_P (t)))
137 	t = copy_node (t);
138     }
139   else
140     gcc_unreachable ();
141 
142   if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
143     SET_EXPR_LOCATION (t, gimple_location (stmt));
144 
145   return t;
146 }
147 
148 
149 #ifndef STACK_ALIGNMENT_NEEDED
150 #define STACK_ALIGNMENT_NEEDED 1
151 #endif
152 
153 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
154 
155 /* Choose either CUR or NEXT as the leader DECL for a partition.
156    Prefer ignored decls, to simplify debug dumps and reduce ambiguity
157    out of the same user variable being in multiple partitions (this is
158    less likely for compiler-introduced temps).  */
159 
160 static tree
161 leader_merge (tree cur, tree next)
162 {
163   if (cur == NULL || cur == next)
164     return next;
165 
166   if (DECL_P (cur) && DECL_IGNORED_P (cur))
167     return cur;
168 
169   if (DECL_P (next) && DECL_IGNORED_P (next))
170     return next;
171 
172   return cur;
173 }
174 
175 /* Associate declaration T with storage space X.  If T is no
176    SSA name this is exactly SET_DECL_RTL, otherwise make the
177    partition of T associated with X.  */
178 static inline void
179 set_rtl (tree t, rtx x)
180 {
181   gcc_checking_assert (!x
182 		       || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
183 		       || (use_register_for_decl (t)
184 			   ? (REG_P (x)
185 			      || (GET_CODE (x) == CONCAT
186 				  && (REG_P (XEXP (x, 0))
187 				      || SUBREG_P (XEXP (x, 0)))
188 				  && (REG_P (XEXP (x, 1))
189 				      || SUBREG_P (XEXP (x, 1))))
190 			      /* We need to accept PARALLELs for RESUT_DECLs
191 				 because of vector types with BLKmode returned
192 				 in multiple registers, but they are supposed
193 				 to be uncoalesced.  */
194 			      || (GET_CODE (x) == PARALLEL
195 				  && SSAVAR (t)
196 				  && TREE_CODE (SSAVAR (t)) == RESULT_DECL
197 				  && (GET_MODE (x) == BLKmode
198 				      || !flag_tree_coalesce_vars)))
199 			   : (MEM_P (x) || x == pc_rtx
200 			      || (GET_CODE (x) == CONCAT
201 				  && MEM_P (XEXP (x, 0))
202 				  && MEM_P (XEXP (x, 1))))));
203   /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
204      RESULT_DECLs has the expected mode.  For memory, we accept
205      unpromoted modes, since that's what we're likely to get.  For
206      PARM_DECLs and RESULT_DECLs, we'll have been called by
207      set_parm_rtl, which will give us the default def, so we don't
208      have to compute it ourselves.  For RESULT_DECLs, we accept mode
209      mismatches too, as long as we have BLKmode or are not coalescing
210      across variables, so that we don't reject BLKmode PARALLELs or
211      unpromoted REGs.  */
212   gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
213 		       || (SSAVAR (t)
214 			   && TREE_CODE (SSAVAR (t)) == RESULT_DECL
215 			   && (promote_ssa_mode (t, NULL) == BLKmode
216 			       || !flag_tree_coalesce_vars))
217 		       || !use_register_for_decl (t)
218 		       || GET_MODE (x) == promote_ssa_mode (t, NULL));
219 
220   if (x)
221     {
222       bool skip = false;
223       tree cur = NULL_TREE;
224       rtx xm = x;
225 
226     retry:
227       if (MEM_P (xm))
228 	cur = MEM_EXPR (xm);
229       else if (REG_P (xm))
230 	cur = REG_EXPR (xm);
231       else if (SUBREG_P (xm))
232 	{
233 	  gcc_assert (subreg_lowpart_p (xm));
234 	  xm = SUBREG_REG (xm);
235 	  goto retry;
236 	}
237       else if (GET_CODE (xm) == CONCAT)
238 	{
239 	  xm = XEXP (xm, 0);
240 	  goto retry;
241 	}
242       else if (GET_CODE (xm) == PARALLEL)
243 	{
244 	  xm = XVECEXP (xm, 0, 0);
245 	  gcc_assert (GET_CODE (xm) == EXPR_LIST);
246 	  xm = XEXP (xm, 0);
247 	  goto retry;
248 	}
249       else if (xm == pc_rtx)
250 	skip = true;
251       else
252 	gcc_unreachable ();
253 
254       tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
255 
256       if (cur != next)
257 	{
258 	  if (MEM_P (x))
259 	    set_mem_attributes (x,
260 				next && TREE_CODE (next) == SSA_NAME
261 				? TREE_TYPE (next)
262 				: next, true);
263 	  else
264 	    set_reg_attrs_for_decl_rtl (next, x);
265 	}
266     }
267 
268   if (TREE_CODE (t) == SSA_NAME)
269     {
270       int part = var_to_partition (SA.map, t);
271       if (part != NO_PARTITION)
272 	{
273 	  if (SA.partition_to_pseudo[part])
274 	    gcc_assert (SA.partition_to_pseudo[part] == x);
275 	  else if (x != pc_rtx)
276 	    SA.partition_to_pseudo[part] = x;
277 	}
278       /* For the benefit of debug information at -O0 (where
279          vartracking doesn't run) record the place also in the base
280          DECL.  For PARMs and RESULTs, do so only when setting the
281          default def.  */
282       if (x && x != pc_rtx && SSA_NAME_VAR (t)
283 	  && (VAR_P (SSA_NAME_VAR (t))
284 	      || SSA_NAME_IS_DEFAULT_DEF (t)))
285 	{
286 	  tree var = SSA_NAME_VAR (t);
287 	  /* If we don't yet have something recorded, just record it now.  */
288 	  if (!DECL_RTL_SET_P (var))
289 	    SET_DECL_RTL (var, x);
290 	  /* If we have it set already to "multiple places" don't
291 	     change this.  */
292 	  else if (DECL_RTL (var) == pc_rtx)
293 	    ;
294 	  /* If we have something recorded and it's not the same place
295 	     as we want to record now, we have multiple partitions for the
296 	     same base variable, with different places.  We can't just
297 	     randomly chose one, hence we have to say that we don't know.
298 	     This only happens with optimization, and there var-tracking
299 	     will figure out the right thing.  */
300 	  else if (DECL_RTL (var) != x)
301 	    SET_DECL_RTL (var, pc_rtx);
302 	}
303     }
304   else
305     SET_DECL_RTL (t, x);
306 }
307 
308 /* This structure holds data relevant to one variable that will be
309    placed in a stack slot.  */
310 struct stack_var
311 {
312   /* The Variable.  */
313   tree decl;
314 
315   /* Initially, the size of the variable.  Later, the size of the partition,
316      if this variable becomes it's partition's representative.  */
317   poly_uint64 size;
318 
319   /* The *byte* alignment required for this variable.  Or as, with the
320      size, the alignment for this partition.  */
321   unsigned int alignb;
322 
323   /* The partition representative.  */
324   size_t representative;
325 
326   /* The next stack variable in the partition, or EOC.  */
327   size_t next;
328 
329   /* The numbers of conflicting stack variables.  */
330   bitmap conflicts;
331 };
332 
333 #define EOC  ((size_t)-1)
334 
335 /* We have an array of such objects while deciding allocation.  */
336 static struct stack_var *stack_vars;
337 static size_t stack_vars_alloc;
338 static size_t stack_vars_num;
339 static hash_map<tree, size_t> *decl_to_stack_part;
340 
341 /* Conflict bitmaps go on this obstack.  This allows us to destroy
342    all of them in one big sweep.  */
343 static bitmap_obstack stack_var_bitmap_obstack;
344 
345 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
346    is non-decreasing.  */
347 static size_t *stack_vars_sorted;
348 
349 /* The phase of the stack frame.  This is the known misalignment of
350    virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
351    (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
352 static int frame_phase;
353 
354 /* Used during expand_used_vars to remember if we saw any decls for
355    which we'd like to enable stack smashing protection.  */
356 static bool has_protected_decls;
357 
358 /* Used during expand_used_vars.  Remember if we say a character buffer
359    smaller than our cutoff threshold.  Used for -Wstack-protector.  */
360 static bool has_short_buffer;
361 
362 /* Compute the byte alignment to use for DECL.  Ignore alignment
363    we can't do with expected alignment of the stack boundary.  */
364 
365 static unsigned int
366 align_local_variable (tree decl)
367 {
368   unsigned int align;
369 
370   if (TREE_CODE (decl) == SSA_NAME)
371     align = TYPE_ALIGN (TREE_TYPE (decl));
372   else
373     {
374       align = LOCAL_DECL_ALIGNMENT (decl);
375       SET_DECL_ALIGN (decl, align);
376     }
377   return align / BITS_PER_UNIT;
378 }
379 
380 /* Align given offset BASE with ALIGN.  Truncate up if ALIGN_UP is true,
381    down otherwise.  Return truncated BASE value.  */
382 
383 static inline unsigned HOST_WIDE_INT
384 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
385 {
386   return align_up ? (base + align - 1) & -align : base & -align;
387 }
388 
389 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
390    Return the frame offset.  */
391 
392 static poly_int64
393 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
394 {
395   poly_int64 offset, new_frame_offset;
396 
397   if (FRAME_GROWS_DOWNWARD)
398     {
399       new_frame_offset
400 	= aligned_lower_bound (frame_offset - frame_phase - size,
401 			       align) + frame_phase;
402       offset = new_frame_offset;
403     }
404   else
405     {
406       new_frame_offset
407 	= aligned_upper_bound (frame_offset - frame_phase,
408 			       align) + frame_phase;
409       offset = new_frame_offset;
410       new_frame_offset += size;
411     }
412   frame_offset = new_frame_offset;
413 
414   if (frame_offset_overflow (frame_offset, cfun->decl))
415     frame_offset = offset = 0;
416 
417   return offset;
418 }
419 
420 /* Accumulate DECL into STACK_VARS.  */
421 
422 static void
423 add_stack_var (tree decl)
424 {
425   struct stack_var *v;
426 
427   if (stack_vars_num >= stack_vars_alloc)
428     {
429       if (stack_vars_alloc)
430 	stack_vars_alloc = stack_vars_alloc * 3 / 2;
431       else
432 	stack_vars_alloc = 32;
433       stack_vars
434 	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
435     }
436   if (!decl_to_stack_part)
437     decl_to_stack_part = new hash_map<tree, size_t>;
438 
439   v = &stack_vars[stack_vars_num];
440   decl_to_stack_part->put (decl, stack_vars_num);
441 
442   v->decl = decl;
443   tree size = TREE_CODE (decl) == SSA_NAME
444     ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
445     : DECL_SIZE_UNIT (decl);
446   v->size = tree_to_poly_uint64 (size);
447   /* Ensure that all variables have size, so that &a != &b for any two
448      variables that are simultaneously live.  */
449   if (known_eq (v->size, 0U))
450     v->size = 1;
451   v->alignb = align_local_variable (decl);
452   /* An alignment of zero can mightily confuse us later.  */
453   gcc_assert (v->alignb != 0);
454 
455   /* All variables are initially in their own partition.  */
456   v->representative = stack_vars_num;
457   v->next = EOC;
458 
459   /* All variables initially conflict with no other.  */
460   v->conflicts = NULL;
461 
462   /* Ensure that this decl doesn't get put onto the list twice.  */
463   set_rtl (decl, pc_rtx);
464 
465   stack_vars_num++;
466 }
467 
468 /* Make the decls associated with luid's X and Y conflict.  */
469 
470 static void
471 add_stack_var_conflict (size_t x, size_t y)
472 {
473   struct stack_var *a = &stack_vars[x];
474   struct stack_var *b = &stack_vars[y];
475   if (!a->conflicts)
476     a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
477   if (!b->conflicts)
478     b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
479   bitmap_set_bit (a->conflicts, y);
480   bitmap_set_bit (b->conflicts, x);
481 }
482 
483 /* Check whether the decls associated with luid's X and Y conflict.  */
484 
485 static bool
486 stack_var_conflict_p (size_t x, size_t y)
487 {
488   struct stack_var *a = &stack_vars[x];
489   struct stack_var *b = &stack_vars[y];
490   if (x == y)
491     return false;
492   /* Partitions containing an SSA name result from gimple registers
493      with things like unsupported modes.  They are top-level and
494      hence conflict with everything else.  */
495   if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
496     return true;
497 
498   if (!a->conflicts || !b->conflicts)
499     return false;
500   return bitmap_bit_p (a->conflicts, y);
501 }
502 
503 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
504    enter its partition number into bitmap DATA.  */
505 
506 static bool
507 visit_op (gimple *, tree op, tree, void *data)
508 {
509   bitmap active = (bitmap)data;
510   op = get_base_address (op);
511   if (op
512       && DECL_P (op)
513       && DECL_RTL_IF_SET (op) == pc_rtx)
514     {
515       size_t *v = decl_to_stack_part->get (op);
516       if (v)
517 	bitmap_set_bit (active, *v);
518     }
519   return false;
520 }
521 
522 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
523    record conflicts between it and all currently active other partitions
524    from bitmap DATA.  */
525 
526 static bool
527 visit_conflict (gimple *, tree op, tree, void *data)
528 {
529   bitmap active = (bitmap)data;
530   op = get_base_address (op);
531   if (op
532       && DECL_P (op)
533       && DECL_RTL_IF_SET (op) == pc_rtx)
534     {
535       size_t *v = decl_to_stack_part->get (op);
536       if (v && bitmap_set_bit (active, *v))
537 	{
538 	  size_t num = *v;
539 	  bitmap_iterator bi;
540 	  unsigned i;
541 	  gcc_assert (num < stack_vars_num);
542 	  EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
543 	    add_stack_var_conflict (num, i);
544 	}
545     }
546   return false;
547 }
548 
549 /* Helper routine for add_scope_conflicts, calculating the active partitions
550    at the end of BB, leaving the result in WORK.  We're called to generate
551    conflicts when FOR_CONFLICT is true, otherwise we're just tracking
552    liveness.  */
553 
554 static void
555 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
556 {
557   edge e;
558   edge_iterator ei;
559   gimple_stmt_iterator gsi;
560   walk_stmt_load_store_addr_fn visit;
561 
562   bitmap_clear (work);
563   FOR_EACH_EDGE (e, ei, bb->preds)
564     bitmap_ior_into (work, (bitmap)e->src->aux);
565 
566   visit = visit_op;
567 
568   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
569     {
570       gimple *stmt = gsi_stmt (gsi);
571       walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
572     }
573   for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
574     {
575       gimple *stmt = gsi_stmt (gsi);
576 
577       if (gimple_clobber_p (stmt))
578 	{
579 	  tree lhs = gimple_assign_lhs (stmt);
580 	  size_t *v;
581 	  /* Nested function lowering might introduce LHSs
582 	     that are COMPONENT_REFs.  */
583 	  if (!VAR_P (lhs))
584 	    continue;
585 	  if (DECL_RTL_IF_SET (lhs) == pc_rtx
586 	      && (v = decl_to_stack_part->get (lhs)))
587 	    bitmap_clear_bit (work, *v);
588 	}
589       else if (!is_gimple_debug (stmt))
590 	{
591 	  if (for_conflict
592 	      && visit == visit_op)
593 	    {
594 	      /* If this is the first real instruction in this BB we need
595 	         to add conflicts for everything live at this point now.
596 		 Unlike classical liveness for named objects we can't
597 		 rely on seeing a def/use of the names we're interested in.
598 		 There might merely be indirect loads/stores.  We'd not add any
599 		 conflicts for such partitions.  */
600 	      bitmap_iterator bi;
601 	      unsigned i;
602 	      EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
603 		{
604 		  struct stack_var *a = &stack_vars[i];
605 		  if (!a->conflicts)
606 		    a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
607 		  bitmap_ior_into (a->conflicts, work);
608 		}
609 	      visit = visit_conflict;
610 	    }
611 	  walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
612 	}
613     }
614 }
615 
616 /* Generate stack partition conflicts between all partitions that are
617    simultaneously live.  */
618 
619 static void
620 add_scope_conflicts (void)
621 {
622   basic_block bb;
623   bool changed;
624   bitmap work = BITMAP_ALLOC (NULL);
625   int *rpo;
626   int n_bbs;
627 
628   /* We approximate the live range of a stack variable by taking the first
629      mention of its name as starting point(s), and by the end-of-scope
630      death clobber added by gimplify as ending point(s) of the range.
631      This overapproximates in the case we for instance moved an address-taken
632      operation upward, without also moving a dereference to it upwards.
633      But it's conservatively correct as a variable never can hold values
634      before its name is mentioned at least once.
635 
636      We then do a mostly classical bitmap liveness algorithm.  */
637 
638   FOR_ALL_BB_FN (bb, cfun)
639     bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
640 
641   rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
642   n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
643 
644   changed = true;
645   while (changed)
646     {
647       int i;
648       changed = false;
649       for (i = 0; i < n_bbs; i++)
650 	{
651 	  bitmap active;
652 	  bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
653 	  active = (bitmap)bb->aux;
654 	  add_scope_conflicts_1 (bb, work, false);
655 	  if (bitmap_ior_into (active, work))
656 	    changed = true;
657 	}
658     }
659 
660   FOR_EACH_BB_FN (bb, cfun)
661     add_scope_conflicts_1 (bb, work, true);
662 
663   free (rpo);
664   BITMAP_FREE (work);
665   FOR_ALL_BB_FN (bb, cfun)
666     BITMAP_FREE (bb->aux);
667 }
668 
669 /* A subroutine of partition_stack_vars.  A comparison function for qsort,
670    sorting an array of indices by the properties of the object.  */
671 
672 static int
673 stack_var_cmp (const void *a, const void *b)
674 {
675   size_t ia = *(const size_t *)a;
676   size_t ib = *(const size_t *)b;
677   unsigned int aligna = stack_vars[ia].alignb;
678   unsigned int alignb = stack_vars[ib].alignb;
679   poly_int64 sizea = stack_vars[ia].size;
680   poly_int64 sizeb = stack_vars[ib].size;
681   tree decla = stack_vars[ia].decl;
682   tree declb = stack_vars[ib].decl;
683   bool largea, largeb;
684   unsigned int uida, uidb;
685 
686   /* Primary compare on "large" alignment.  Large comes first.  */
687   largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
688   largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
689   if (largea != largeb)
690     return (int)largeb - (int)largea;
691 
692   /* Secondary compare on size, decreasing  */
693   int diff = compare_sizes_for_sort (sizeb, sizea);
694   if (diff != 0)
695     return diff;
696 
697   /* Tertiary compare on true alignment, decreasing.  */
698   if (aligna < alignb)
699     return -1;
700   if (aligna > alignb)
701     return 1;
702 
703   /* Final compare on ID for sort stability, increasing.
704      Two SSA names are compared by their version, SSA names come before
705      non-SSA names, and two normal decls are compared by their DECL_UID.  */
706   if (TREE_CODE (decla) == SSA_NAME)
707     {
708       if (TREE_CODE (declb) == SSA_NAME)
709 	uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
710       else
711 	return -1;
712     }
713   else if (TREE_CODE (declb) == SSA_NAME)
714     return 1;
715   else
716     uida = DECL_UID (decla), uidb = DECL_UID (declb);
717   if (uida < uidb)
718     return 1;
719   if (uida > uidb)
720     return -1;
721   return 0;
722 }
723 
724 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
725 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
726 
727 /* If the points-to solution *PI points to variables that are in a partition
728    together with other variables add all partition members to the pointed-to
729    variables bitmap.  */
730 
731 static void
732 add_partitioned_vars_to_ptset (struct pt_solution *pt,
733 			       part_hashmap *decls_to_partitions,
734 			       hash_set<bitmap> *visited, bitmap temp)
735 {
736   bitmap_iterator bi;
737   unsigned i;
738   bitmap *part;
739 
740   if (pt->anything
741       || pt->vars == NULL
742       /* The pointed-to vars bitmap is shared, it is enough to
743 	 visit it once.  */
744       || visited->add (pt->vars))
745     return;
746 
747   bitmap_clear (temp);
748 
749   /* By using a temporary bitmap to store all members of the partitions
750      we have to add we make sure to visit each of the partitions only
751      once.  */
752   EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
753     if ((!temp
754 	 || !bitmap_bit_p (temp, i))
755 	&& (part = decls_to_partitions->get (i)))
756       bitmap_ior_into (temp, *part);
757   if (!bitmap_empty_p (temp))
758     bitmap_ior_into (pt->vars, temp);
759 }
760 
761 /* Update points-to sets based on partition info, so we can use them on RTL.
762    The bitmaps representing stack partitions will be saved until expand,
763    where partitioned decls used as bases in memory expressions will be
764    rewritten.  */
765 
766 static void
767 update_alias_info_with_stack_vars (void)
768 {
769   part_hashmap *decls_to_partitions = NULL;
770   size_t i, j;
771   tree var = NULL_TREE;
772 
773   for (i = 0; i < stack_vars_num; i++)
774     {
775       bitmap part = NULL;
776       tree name;
777       struct ptr_info_def *pi;
778 
779       /* Not interested in partitions with single variable.  */
780       if (stack_vars[i].representative != i
781           || stack_vars[i].next == EOC)
782         continue;
783 
784       if (!decls_to_partitions)
785 	{
786 	  decls_to_partitions = new part_hashmap;
787 	  cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
788 	}
789 
790       /* Create an SSA_NAME that points to the partition for use
791          as base during alias-oracle queries on RTL for bases that
792 	 have been partitioned.  */
793       if (var == NULL_TREE)
794 	var = create_tmp_var (ptr_type_node);
795       name = make_ssa_name (var);
796 
797       /* Create bitmaps representing partitions.  They will be used for
798          points-to sets later, so use GGC alloc.  */
799       part = BITMAP_GGC_ALLOC ();
800       for (j = i; j != EOC; j = stack_vars[j].next)
801 	{
802 	  tree decl = stack_vars[j].decl;
803 	  unsigned int uid = DECL_PT_UID (decl);
804 	  bitmap_set_bit (part, uid);
805 	  decls_to_partitions->put (uid, part);
806 	  cfun->gimple_df->decls_to_pointers->put (decl, name);
807 	  if (TREE_ADDRESSABLE (decl))
808 	    TREE_ADDRESSABLE (name) = 1;
809 	}
810 
811       /* Make the SSA name point to all partition members.  */
812       pi = get_ptr_info (name);
813       pt_solution_set (&pi->pt, part, false);
814     }
815 
816   /* Make all points-to sets that contain one member of a partition
817      contain all members of the partition.  */
818   if (decls_to_partitions)
819     {
820       unsigned i;
821       tree name;
822       hash_set<bitmap> visited;
823       bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
824 
825       FOR_EACH_SSA_NAME (i, name, cfun)
826 	{
827 	  struct ptr_info_def *pi;
828 
829 	  if (POINTER_TYPE_P (TREE_TYPE (name))
830 	      && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
831 	    add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
832 					   &visited, temp);
833 	}
834 
835       add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
836 				     decls_to_partitions, &visited, temp);
837 
838       delete decls_to_partitions;
839       BITMAP_FREE (temp);
840     }
841 }
842 
843 /* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
844    partitioning algorithm.  Partitions A and B are known to be non-conflicting.
845    Merge them into a single partition A.  */
846 
847 static void
848 union_stack_vars (size_t a, size_t b)
849 {
850   struct stack_var *vb = &stack_vars[b];
851   bitmap_iterator bi;
852   unsigned u;
853 
854   gcc_assert (stack_vars[b].next == EOC);
855    /* Add B to A's partition.  */
856   stack_vars[b].next = stack_vars[a].next;
857   stack_vars[b].representative = a;
858   stack_vars[a].next = b;
859 
860   /* Update the required alignment of partition A to account for B.  */
861   if (stack_vars[a].alignb < stack_vars[b].alignb)
862     stack_vars[a].alignb = stack_vars[b].alignb;
863 
864   /* Update the interference graph and merge the conflicts.  */
865   if (vb->conflicts)
866     {
867       EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
868 	add_stack_var_conflict (a, stack_vars[u].representative);
869       BITMAP_FREE (vb->conflicts);
870     }
871 }
872 
873 /* A subroutine of expand_used_vars.  Binpack the variables into
874    partitions constrained by the interference graph.  The overall
875    algorithm used is as follows:
876 
877 	Sort the objects by size in descending order.
878 	For each object A {
879 	  S = size(A)
880 	  O = 0
881 	  loop {
882 	    Look for the largest non-conflicting object B with size <= S.
883 	    UNION (A, B)
884 	  }
885 	}
886 */
887 
888 static void
889 partition_stack_vars (void)
890 {
891   size_t si, sj, n = stack_vars_num;
892 
893   stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
894   for (si = 0; si < n; ++si)
895     stack_vars_sorted[si] = si;
896 
897   if (n == 1)
898     return;
899 
900   qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
901 
902   for (si = 0; si < n; ++si)
903     {
904       size_t i = stack_vars_sorted[si];
905       unsigned int ialign = stack_vars[i].alignb;
906       poly_int64 isize = stack_vars[i].size;
907 
908       /* Ignore objects that aren't partition representatives. If we
909          see a var that is not a partition representative, it must
910          have been merged earlier.  */
911       if (stack_vars[i].representative != i)
912         continue;
913 
914       for (sj = si + 1; sj < n; ++sj)
915 	{
916 	  size_t j = stack_vars_sorted[sj];
917 	  unsigned int jalign = stack_vars[j].alignb;
918 	  poly_int64 jsize = stack_vars[j].size;
919 
920 	  /* Ignore objects that aren't partition representatives.  */
921 	  if (stack_vars[j].representative != j)
922 	    continue;
923 
924 	  /* Do not mix objects of "small" (supported) alignment
925 	     and "large" (unsupported) alignment.  */
926 	  if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
927 	      != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
928 	    break;
929 
930 	  /* For Address Sanitizer do not mix objects with different
931 	     sizes, as the shorter vars wouldn't be adequately protected.
932 	     Don't do that for "large" (unsupported) alignment objects,
933 	     those aren't protected anyway.  */
934 	  if (asan_sanitize_stack_p ()
935 	      && maybe_ne (isize, jsize)
936 	      && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 	    break;
938 
939 	  /* Ignore conflicting objects.  */
940 	  if (stack_var_conflict_p (i, j))
941 	    continue;
942 
943 	  /* UNION the objects, placing J at OFFSET.  */
944 	  union_stack_vars (i, j);
945 	}
946     }
947 
948   update_alias_info_with_stack_vars ();
949 }
950 
951 /* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
952 
953 static void
954 dump_stack_var_partition (void)
955 {
956   size_t si, i, j, n = stack_vars_num;
957 
958   for (si = 0; si < n; ++si)
959     {
960       i = stack_vars_sorted[si];
961 
962       /* Skip variables that aren't partition representatives, for now.  */
963       if (stack_vars[i].representative != i)
964 	continue;
965 
966       fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
967       print_dec (stack_vars[i].size, dump_file);
968       fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
969 
970       for (j = i; j != EOC; j = stack_vars[j].next)
971 	{
972 	  fputc ('\t', dump_file);
973 	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
974 	}
975       fputc ('\n', dump_file);
976     }
977 }
978 
979 /* Assign rtl to DECL at BASE + OFFSET.  */
980 
981 static void
982 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
983 			 poly_int64 offset)
984 {
985   unsigned align;
986   rtx x;
987 
988   /* If this fails, we've overflowed the stack frame.  Error nicely?  */
989   gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
990 
991   x = plus_constant (Pmode, base, offset);
992   x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
993 		   ? TYPE_MODE (TREE_TYPE (decl))
994 		   : DECL_MODE (SSAVAR (decl)), x);
995 
996   if (TREE_CODE (decl) != SSA_NAME)
997     {
998       /* Set alignment we actually gave this decl if it isn't an SSA name.
999          If it is we generate stack slots only accidentally so it isn't as
1000 	 important, we'll simply use the alignment that is already set.  */
1001       if (base == virtual_stack_vars_rtx)
1002 	offset -= frame_phase;
1003       align = known_alignment (offset);
1004       align *= BITS_PER_UNIT;
1005       if (align == 0 || align > base_align)
1006 	align = base_align;
1007 
1008       /* One would think that we could assert that we're not decreasing
1009 	 alignment here, but (at least) the i386 port does exactly this
1010 	 via the MINIMUM_ALIGNMENT hook.  */
1011 
1012       SET_DECL_ALIGN (decl, align);
1013       DECL_USER_ALIGN (decl) = 0;
1014     }
1015 
1016   set_rtl (decl, x);
1017 }
1018 
1019 struct stack_vars_data
1020 {
1021   /* Vector of offset pairs, always end of some padding followed
1022      by start of the padding that needs Address Sanitizer protection.
1023      The vector is in reversed, highest offset pairs come first.  */
1024   auto_vec<HOST_WIDE_INT> asan_vec;
1025 
1026   /* Vector of partition representative decls in between the paddings.  */
1027   auto_vec<tree> asan_decl_vec;
1028 
1029   /* Base pseudo register for Address Sanitizer protected automatic vars.  */
1030   rtx asan_base;
1031 
1032   /* Alignment needed for the Address Sanitizer protected automatic vars.  */
1033   unsigned int asan_alignb;
1034 };
1035 
1036 /* A subroutine of expand_used_vars.  Give each partition representative
1037    a unique location within the stack frame.  Update each partition member
1038    with that location.  */
1039 
1040 static void
1041 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1042 {
1043   size_t si, i, j, n = stack_vars_num;
1044   poly_uint64 large_size = 0, large_alloc = 0;
1045   rtx large_base = NULL;
1046   unsigned large_align = 0;
1047   bool large_allocation_done = false;
1048   tree decl;
1049 
1050   /* Determine if there are any variables requiring "large" alignment.
1051      Since these are dynamically allocated, we only process these if
1052      no predicate involved.  */
1053   large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1054   if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1055     {
1056       /* Find the total size of these variables.  */
1057       for (si = 0; si < n; ++si)
1058 	{
1059 	  unsigned alignb;
1060 
1061 	  i = stack_vars_sorted[si];
1062 	  alignb = stack_vars[i].alignb;
1063 
1064 	  /* All "large" alignment decls come before all "small" alignment
1065 	     decls, but "large" alignment decls are not sorted based on
1066 	     their alignment.  Increase large_align to track the largest
1067 	     required alignment.  */
1068 	  if ((alignb * BITS_PER_UNIT) > large_align)
1069 	    large_align = alignb * BITS_PER_UNIT;
1070 
1071 	  /* Stop when we get to the first decl with "small" alignment.  */
1072 	  if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1073 	    break;
1074 
1075 	  /* Skip variables that aren't partition representatives.  */
1076 	  if (stack_vars[i].representative != i)
1077 	    continue;
1078 
1079 	  /* Skip variables that have already had rtl assigned.  See also
1080 	     add_stack_var where we perpetrate this pc_rtx hack.  */
1081 	  decl = stack_vars[i].decl;
1082 	  if (TREE_CODE (decl) == SSA_NAME
1083 	      ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1084 	      : DECL_RTL (decl) != pc_rtx)
1085 	    continue;
1086 
1087 	  large_size = aligned_upper_bound (large_size, alignb);
1088 	  large_size += stack_vars[i].size;
1089 	}
1090     }
1091 
1092   for (si = 0; si < n; ++si)
1093     {
1094       rtx base;
1095       unsigned base_align, alignb;
1096       poly_int64 offset;
1097 
1098       i = stack_vars_sorted[si];
1099 
1100       /* Skip variables that aren't partition representatives, for now.  */
1101       if (stack_vars[i].representative != i)
1102 	continue;
1103 
1104       /* Skip variables that have already had rtl assigned.  See also
1105 	 add_stack_var where we perpetrate this pc_rtx hack.  */
1106       decl = stack_vars[i].decl;
1107       if (TREE_CODE (decl) == SSA_NAME
1108 	  ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1109 	  : DECL_RTL (decl) != pc_rtx)
1110 	continue;
1111 
1112       /* Check the predicate to see whether this variable should be
1113 	 allocated in this pass.  */
1114       if (pred && !pred (i))
1115 	continue;
1116 
1117       alignb = stack_vars[i].alignb;
1118       if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1119 	{
1120 	  base = virtual_stack_vars_rtx;
1121 	  /* ASAN description strings don't yet have a syntax for expressing
1122 	     polynomial offsets.  */
1123 	  HOST_WIDE_INT prev_offset;
1124 	  if (asan_sanitize_stack_p ()
1125 	      && pred
1126 	      && frame_offset.is_constant (&prev_offset)
1127 	      && stack_vars[i].size.is_constant ())
1128 	    {
1129 	      prev_offset = align_base (prev_offset,
1130 					MAX (alignb, ASAN_RED_ZONE_SIZE),
1131 					!FRAME_GROWS_DOWNWARD);
1132 	      tree repr_decl = NULL_TREE;
1133 	      offset
1134 		= alloc_stack_frame_space (stack_vars[i].size
1135 					   + ASAN_RED_ZONE_SIZE,
1136 					   MAX (alignb, ASAN_RED_ZONE_SIZE));
1137 
1138 	      data->asan_vec.safe_push (prev_offset);
1139 	      /* Allocating a constant amount of space from a constant
1140 		 starting offset must give a constant result.  */
1141 	      data->asan_vec.safe_push ((offset + stack_vars[i].size)
1142 					.to_constant ());
1143 	      /* Find best representative of the partition.
1144 		 Prefer those with DECL_NAME, even better
1145 		 satisfying asan_protect_stack_decl predicate.  */
1146 	      for (j = i; j != EOC; j = stack_vars[j].next)
1147 		if (asan_protect_stack_decl (stack_vars[j].decl)
1148 		    && DECL_NAME (stack_vars[j].decl))
1149 		  {
1150 		    repr_decl = stack_vars[j].decl;
1151 		    break;
1152 		  }
1153 		else if (repr_decl == NULL_TREE
1154 			 && DECL_P (stack_vars[j].decl)
1155 			 && DECL_NAME (stack_vars[j].decl))
1156 		  repr_decl = stack_vars[j].decl;
1157 	      if (repr_decl == NULL_TREE)
1158 		repr_decl = stack_vars[i].decl;
1159 	      data->asan_decl_vec.safe_push (repr_decl);
1160 	      data->asan_alignb = MAX (data->asan_alignb, alignb);
1161 	      if (data->asan_base == NULL)
1162 		data->asan_base = gen_reg_rtx (Pmode);
1163 	      base = data->asan_base;
1164 
1165 	      if (!STRICT_ALIGNMENT)
1166 		base_align = crtl->max_used_stack_slot_alignment;
1167 	      else
1168 		base_align = MAX (crtl->max_used_stack_slot_alignment,
1169 				  GET_MODE_ALIGNMENT (SImode)
1170 				  << ASAN_SHADOW_SHIFT);
1171 	    }
1172 	  else
1173 	    {
1174 	      offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1175 	      base_align = crtl->max_used_stack_slot_alignment;
1176 	    }
1177 	}
1178       else
1179 	{
1180 	  /* Large alignment is only processed in the last pass.  */
1181 	  if (pred)
1182 	    continue;
1183 
1184 	  /* If there were any variables requiring "large" alignment, allocate
1185 	     space.  */
1186 	  if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1187 	    {
1188 	      poly_int64 loffset;
1189 	      rtx large_allocsize;
1190 
1191 	      large_allocsize = gen_int_mode (large_size, Pmode);
1192 	      get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1193 	      loffset = alloc_stack_frame_space
1194 		(rtx_to_poly_int64 (large_allocsize),
1195 		 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1196 	      large_base = get_dynamic_stack_base (loffset, large_align);
1197 	      large_allocation_done = true;
1198 	    }
1199 	  gcc_assert (large_base != NULL);
1200 
1201 	  large_alloc = aligned_upper_bound (large_alloc, alignb);
1202 	  offset = large_alloc;
1203 	  large_alloc += stack_vars[i].size;
1204 
1205 	  base = large_base;
1206 	  base_align = large_align;
1207 	}
1208 
1209       /* Create rtl for each variable based on their location within the
1210 	 partition.  */
1211       for (j = i; j != EOC; j = stack_vars[j].next)
1212 	{
1213 	  expand_one_stack_var_at (stack_vars[j].decl,
1214 				   base, base_align,
1215 				   offset);
1216 	}
1217     }
1218 
1219   gcc_assert (known_eq (large_alloc, large_size));
1220 }
1221 
1222 /* Take into account all sizes of partitions and reset DECL_RTLs.  */
1223 static poly_uint64
1224 account_stack_vars (void)
1225 {
1226   size_t si, j, i, n = stack_vars_num;
1227   poly_uint64 size = 0;
1228 
1229   for (si = 0; si < n; ++si)
1230     {
1231       i = stack_vars_sorted[si];
1232 
1233       /* Skip variables that aren't partition representatives, for now.  */
1234       if (stack_vars[i].representative != i)
1235 	continue;
1236 
1237       size += stack_vars[i].size;
1238       for (j = i; j != EOC; j = stack_vars[j].next)
1239 	set_rtl (stack_vars[j].decl, NULL);
1240     }
1241   return size;
1242 }
1243 
1244 /* Record the RTL assignment X for the default def of PARM.  */
1245 
1246 extern void
1247 set_parm_rtl (tree parm, rtx x)
1248 {
1249   gcc_assert (TREE_CODE (parm) == PARM_DECL
1250 	      || TREE_CODE (parm) == RESULT_DECL);
1251 
1252   if (x && !MEM_P (x))
1253     {
1254       unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1255 					      TYPE_MODE (TREE_TYPE (parm)),
1256 					      TYPE_ALIGN (TREE_TYPE (parm)));
1257 
1258       /* If the variable alignment is very large we'll dynamicaly
1259 	 allocate it, which means that in-frame portion is just a
1260 	 pointer.  ??? We've got a pseudo for sure here, do we
1261 	 actually dynamically allocate its spilling area if needed?
1262 	 ??? Isn't it a problem when POINTER_SIZE also exceeds
1263 	 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32?  */
1264       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1265 	align = POINTER_SIZE;
1266 
1267       record_alignment_for_reg_var (align);
1268     }
1269 
1270   tree ssa = ssa_default_def (cfun, parm);
1271   if (!ssa)
1272     return set_rtl (parm, x);
1273 
1274   int part = var_to_partition (SA.map, ssa);
1275   gcc_assert (part != NO_PARTITION);
1276 
1277   bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1278   gcc_assert (changed);
1279 
1280   set_rtl (ssa, x);
1281   gcc_assert (DECL_RTL (parm) == x);
1282 }
1283 
1284 /* A subroutine of expand_one_var.  Called to immediately assign rtl
1285    to a variable to be allocated in the stack frame.  */
1286 
1287 static void
1288 expand_one_stack_var_1 (tree var)
1289 {
1290   poly_uint64 size;
1291   poly_int64 offset;
1292   unsigned byte_align;
1293 
1294   if (TREE_CODE (var) == SSA_NAME)
1295     {
1296       tree type = TREE_TYPE (var);
1297       size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1298       byte_align = TYPE_ALIGN_UNIT (type);
1299     }
1300   else
1301     {
1302       size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1303       byte_align = align_local_variable (var);
1304     }
1305 
1306   /* We handle highly aligned variables in expand_stack_vars.  */
1307   gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1308 
1309   offset = alloc_stack_frame_space (size, byte_align);
1310 
1311   expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1312 			   crtl->max_used_stack_slot_alignment, offset);
1313 }
1314 
1315 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1316    already assigned some MEM.  */
1317 
1318 static void
1319 expand_one_stack_var (tree var)
1320 {
1321   if (TREE_CODE (var) == SSA_NAME)
1322     {
1323       int part = var_to_partition (SA.map, var);
1324       if (part != NO_PARTITION)
1325 	{
1326 	  rtx x = SA.partition_to_pseudo[part];
1327 	  gcc_assert (x);
1328 	  gcc_assert (MEM_P (x));
1329 	  return;
1330 	}
1331     }
1332 
1333   return expand_one_stack_var_1 (var);
1334 }
1335 
1336 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1337    that will reside in a hard register.  */
1338 
1339 static void
1340 expand_one_hard_reg_var (tree var)
1341 {
1342   rest_of_decl_compilation (var, 0, 0);
1343 }
1344 
1345 /* Record the alignment requirements of some variable assigned to a
1346    pseudo.  */
1347 
1348 static void
1349 record_alignment_for_reg_var (unsigned int align)
1350 {
1351   if (SUPPORTS_STACK_ALIGNMENT
1352       && crtl->stack_alignment_estimated < align)
1353     {
1354       /* stack_alignment_estimated shouldn't change after stack
1355          realign decision made */
1356       gcc_assert (!crtl->stack_realign_processed);
1357       crtl->stack_alignment_estimated = align;
1358     }
1359 
1360   /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1361      So here we only make sure stack_alignment_needed >= align.  */
1362   if (crtl->stack_alignment_needed < align)
1363     crtl->stack_alignment_needed = align;
1364   if (crtl->max_used_stack_slot_alignment < align)
1365     crtl->max_used_stack_slot_alignment = align;
1366 }
1367 
1368 /* Create RTL for an SSA partition.  */
1369 
1370 static void
1371 expand_one_ssa_partition (tree var)
1372 {
1373   int part = var_to_partition (SA.map, var);
1374   gcc_assert (part != NO_PARTITION);
1375 
1376   if (SA.partition_to_pseudo[part])
1377     return;
1378 
1379   unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1380 					  TYPE_MODE (TREE_TYPE (var)),
1381 					  TYPE_ALIGN (TREE_TYPE (var)));
1382 
1383   /* If the variable alignment is very large we'll dynamicaly allocate
1384      it, which means that in-frame portion is just a pointer.  */
1385   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1386     align = POINTER_SIZE;
1387 
1388   record_alignment_for_reg_var (align);
1389 
1390   if (!use_register_for_decl (var))
1391     {
1392       if (defer_stack_allocation (var, true))
1393 	add_stack_var (var);
1394       else
1395 	expand_one_stack_var_1 (var);
1396       return;
1397     }
1398 
1399   machine_mode reg_mode = promote_ssa_mode (var, NULL);
1400   rtx x = gen_reg_rtx (reg_mode);
1401 
1402   set_rtl (var, x);
1403 
1404   /* For a promoted variable, X will not be used directly but wrapped in a
1405      SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1406      will assume that its upper bits can be inferred from its lower bits.
1407      Therefore, if X isn't initialized on every path from the entry, then
1408      we must do it manually in order to fulfill the above assumption.  */
1409   if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1410       && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1411     emit_move_insn (x, CONST0_RTX (reg_mode));
1412 }
1413 
1414 /* Record the association between the RTL generated for partition PART
1415    and the underlying variable of the SSA_NAME VAR.  */
1416 
1417 static void
1418 adjust_one_expanded_partition_var (tree var)
1419 {
1420   if (!var)
1421     return;
1422 
1423   tree decl = SSA_NAME_VAR (var);
1424 
1425   int part = var_to_partition (SA.map, var);
1426   if (part == NO_PARTITION)
1427     return;
1428 
1429   rtx x = SA.partition_to_pseudo[part];
1430 
1431   gcc_assert (x);
1432 
1433   set_rtl (var, x);
1434 
1435   if (!REG_P (x))
1436     return;
1437 
1438   /* Note if the object is a user variable.  */
1439   if (decl && !DECL_ARTIFICIAL (decl))
1440     mark_user_reg (x);
1441 
1442   if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1443     mark_reg_pointer (x, get_pointer_alignment (var));
1444 }
1445 
1446 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1447    that will reside in a pseudo register.  */
1448 
1449 static void
1450 expand_one_register_var (tree var)
1451 {
1452   if (TREE_CODE (var) == SSA_NAME)
1453     {
1454       int part = var_to_partition (SA.map, var);
1455       if (part != NO_PARTITION)
1456 	{
1457 	  rtx x = SA.partition_to_pseudo[part];
1458 	  gcc_assert (x);
1459 	  gcc_assert (REG_P (x));
1460 	  return;
1461 	}
1462       gcc_unreachable ();
1463     }
1464 
1465   tree decl = var;
1466   tree type = TREE_TYPE (decl);
1467   machine_mode reg_mode = promote_decl_mode (decl, NULL);
1468   rtx x = gen_reg_rtx (reg_mode);
1469 
1470   set_rtl (var, x);
1471 
1472   /* Note if the object is a user variable.  */
1473   if (!DECL_ARTIFICIAL (decl))
1474     mark_user_reg (x);
1475 
1476   if (POINTER_TYPE_P (type))
1477     mark_reg_pointer (x, get_pointer_alignment (var));
1478 }
1479 
1480 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
1481    has some associated error, e.g. its type is error-mark.  We just need
1482    to pick something that won't crash the rest of the compiler.  */
1483 
1484 static void
1485 expand_one_error_var (tree var)
1486 {
1487   machine_mode mode = DECL_MODE (var);
1488   rtx x;
1489 
1490   if (mode == BLKmode)
1491     x = gen_rtx_MEM (BLKmode, const0_rtx);
1492   else if (mode == VOIDmode)
1493     x = const0_rtx;
1494   else
1495     x = gen_reg_rtx (mode);
1496 
1497   SET_DECL_RTL (var, x);
1498 }
1499 
1500 /* A subroutine of expand_one_var.  VAR is a variable that will be
1501    allocated to the local stack frame.  Return true if we wish to
1502    add VAR to STACK_VARS so that it will be coalesced with other
1503    variables.  Return false to allocate VAR immediately.
1504 
1505    This function is used to reduce the number of variables considered
1506    for coalescing, which reduces the size of the quadratic problem.  */
1507 
1508 static bool
1509 defer_stack_allocation (tree var, bool toplevel)
1510 {
1511   tree size_unit = TREE_CODE (var) == SSA_NAME
1512     ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1513     : DECL_SIZE_UNIT (var);
1514   poly_uint64 size;
1515 
1516   /* Whether the variable is small enough for immediate allocation not to be
1517      a problem with regard to the frame size.  */
1518   bool smallish
1519     = (poly_int_tree_p (size_unit, &size)
1520        && (estimated_poly_value (size)
1521 	   < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
1522 
1523   /* If stack protection is enabled, *all* stack variables must be deferred,
1524      so that we can re-order the strings to the top of the frame.
1525      Similarly for Address Sanitizer.  */
1526   if (flag_stack_protect || asan_sanitize_stack_p ())
1527     return true;
1528 
1529   unsigned int align = TREE_CODE (var) == SSA_NAME
1530     ? TYPE_ALIGN (TREE_TYPE (var))
1531     : DECL_ALIGN (var);
1532 
1533   /* We handle "large" alignment via dynamic allocation.  We want to handle
1534      this extra complication in only one place, so defer them.  */
1535   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1536     return true;
1537 
1538   bool ignored = TREE_CODE (var) == SSA_NAME
1539     ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1540     : DECL_IGNORED_P (var);
1541 
1542   /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1543      might be detached from their block and appear at toplevel when we reach
1544      here.  We want to coalesce them with variables from other blocks when
1545      the immediate contribution to the frame size would be noticeable.  */
1546   if (toplevel && optimize > 0 && ignored && !smallish)
1547     return true;
1548 
1549   /* Variables declared in the outermost scope automatically conflict
1550      with every other variable.  The only reason to want to defer them
1551      at all is that, after sorting, we can more efficiently pack
1552      small variables in the stack frame.  Continue to defer at -O2.  */
1553   if (toplevel && optimize < 2)
1554     return false;
1555 
1556   /* Without optimization, *most* variables are allocated from the
1557      stack, which makes the quadratic problem large exactly when we
1558      want compilation to proceed as quickly as possible.  On the
1559      other hand, we don't want the function's stack frame size to
1560      get completely out of hand.  So we avoid adding scalars and
1561      "small" aggregates to the list at all.  */
1562   if (optimize == 0 && smallish)
1563     return false;
1564 
1565   return true;
1566 }
1567 
1568 /* A subroutine of expand_used_vars.  Expand one variable according to
1569    its flavor.  Variables to be placed on the stack are not actually
1570    expanded yet, merely recorded.
1571    When REALLY_EXPAND is false, only add stack values to be allocated.
1572    Return stack usage this variable is supposed to take.
1573 */
1574 
1575 static poly_uint64
1576 expand_one_var (tree var, bool toplevel, bool really_expand)
1577 {
1578   unsigned int align = BITS_PER_UNIT;
1579   tree origvar = var;
1580 
1581   var = SSAVAR (var);
1582 
1583   if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1584     {
1585       if (is_global_var (var))
1586 	return 0;
1587 
1588       /* Because we don't know if VAR will be in register or on stack,
1589 	 we conservatively assume it will be on stack even if VAR is
1590 	 eventually put into register after RA pass.  For non-automatic
1591 	 variables, which won't be on stack, we collect alignment of
1592 	 type and ignore user specified alignment.  Similarly for
1593 	 SSA_NAMEs for which use_register_for_decl returns true.  */
1594       if (TREE_STATIC (var)
1595 	  || DECL_EXTERNAL (var)
1596 	  || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1597 	align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1598 				   TYPE_MODE (TREE_TYPE (var)),
1599 				   TYPE_ALIGN (TREE_TYPE (var)));
1600       else if (DECL_HAS_VALUE_EXPR_P (var)
1601 	       || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1602 	/* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1603 	   or variables which were assigned a stack slot already by
1604 	   expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1605 	   changed from the offset chosen to it.  */
1606 	align = crtl->stack_alignment_estimated;
1607       else
1608 	align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1609 
1610       /* If the variable alignment is very large we'll dynamicaly allocate
1611 	 it, which means that in-frame portion is just a pointer.  */
1612       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1613 	align = POINTER_SIZE;
1614     }
1615 
1616   record_alignment_for_reg_var (align);
1617 
1618   poly_uint64 size;
1619   if (TREE_CODE (origvar) == SSA_NAME)
1620     {
1621       gcc_assert (!VAR_P (var)
1622 		  || (!DECL_EXTERNAL (var)
1623 		      && !DECL_HAS_VALUE_EXPR_P (var)
1624 		      && !TREE_STATIC (var)
1625 		      && TREE_TYPE (var) != error_mark_node
1626 		      && !DECL_HARD_REGISTER (var)
1627 		      && really_expand));
1628     }
1629   if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1630     ;
1631   else if (DECL_EXTERNAL (var))
1632     ;
1633   else if (DECL_HAS_VALUE_EXPR_P (var))
1634     ;
1635   else if (TREE_STATIC (var))
1636     ;
1637   else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1638     ;
1639   else if (TREE_TYPE (var) == error_mark_node)
1640     {
1641       if (really_expand)
1642         expand_one_error_var (var);
1643     }
1644   else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1645     {
1646       if (really_expand)
1647 	{
1648 	  expand_one_hard_reg_var (var);
1649 	  if (!DECL_HARD_REGISTER (var))
1650 	    /* Invalid register specification.  */
1651 	    expand_one_error_var (var);
1652 	}
1653     }
1654   else if (use_register_for_decl (var))
1655     {
1656       if (really_expand)
1657         expand_one_register_var (origvar);
1658     }
1659   else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1660 	   || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1661     {
1662       /* Reject variables which cover more than half of the address-space.  */
1663       if (really_expand)
1664 	{
1665 	  error ("size of variable %q+D is too large", var);
1666 	  expand_one_error_var (var);
1667 	}
1668     }
1669   else if (defer_stack_allocation (var, toplevel))
1670     add_stack_var (origvar);
1671   else
1672     {
1673       if (really_expand)
1674         {
1675           if (lookup_attribute ("naked",
1676                                 DECL_ATTRIBUTES (current_function_decl)))
1677             error ("cannot allocate stack for variable %q+D, naked function.",
1678                    var);
1679 
1680           expand_one_stack_var (origvar);
1681         }
1682       return size;
1683     }
1684   return 0;
1685 }
1686 
1687 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1688    expanding variables.  Those variables that can be put into registers
1689    are allocated pseudos; those that can't are put on the stack.
1690 
1691    TOPLEVEL is true if this is the outermost BLOCK.  */
1692 
1693 static void
1694 expand_used_vars_for_block (tree block, bool toplevel)
1695 {
1696   tree t;
1697 
1698   /* Expand all variables at this level.  */
1699   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1700     if (TREE_USED (t)
1701         && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1702 	    || !DECL_NONSHAREABLE (t)))
1703       expand_one_var (t, toplevel, true);
1704 
1705   /* Expand all variables at containing levels.  */
1706   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1707     expand_used_vars_for_block (t, false);
1708 }
1709 
1710 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1711    and clear TREE_USED on all local variables.  */
1712 
1713 static void
1714 clear_tree_used (tree block)
1715 {
1716   tree t;
1717 
1718   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1719     /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1720     if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1721 	|| !DECL_NONSHAREABLE (t))
1722       TREE_USED (t) = 0;
1723 
1724   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1725     clear_tree_used (t);
1726 }
1727 
1728 enum {
1729   SPCT_FLAG_DEFAULT = 1,
1730   SPCT_FLAG_ALL = 2,
1731   SPCT_FLAG_STRONG = 3,
1732   SPCT_FLAG_EXPLICIT = 4
1733 };
1734 
1735 /* Examine TYPE and determine a bit mask of the following features.  */
1736 
1737 #define SPCT_HAS_LARGE_CHAR_ARRAY	1
1738 #define SPCT_HAS_SMALL_CHAR_ARRAY	2
1739 #define SPCT_HAS_ARRAY			4
1740 #define SPCT_HAS_AGGREGATE		8
1741 
1742 static unsigned int
1743 stack_protect_classify_type (tree type)
1744 {
1745   unsigned int ret = 0;
1746   tree t;
1747 
1748   switch (TREE_CODE (type))
1749     {
1750     case ARRAY_TYPE:
1751       t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1752       if (t == char_type_node
1753 	  || t == signed_char_type_node
1754 	  || t == unsigned_char_type_node)
1755 	{
1756 	  unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1757 	  unsigned HOST_WIDE_INT len;
1758 
1759 	  if (!TYPE_SIZE_UNIT (type)
1760 	      || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1761 	    len = max;
1762 	  else
1763 	    len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1764 
1765 	  if (len < max)
1766 	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1767 	  else
1768 	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1769 	}
1770       else
1771 	ret = SPCT_HAS_ARRAY;
1772       break;
1773 
1774     case UNION_TYPE:
1775     case QUAL_UNION_TYPE:
1776     case RECORD_TYPE:
1777       ret = SPCT_HAS_AGGREGATE;
1778       for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1779 	if (TREE_CODE (t) == FIELD_DECL)
1780 	  ret |= stack_protect_classify_type (TREE_TYPE (t));
1781       break;
1782 
1783     default:
1784       break;
1785     }
1786 
1787   return ret;
1788 }
1789 
1790 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1791    part of the local stack frame.  Remember if we ever return nonzero for
1792    any variable in this function.  The return value is the phase number in
1793    which the variable should be allocated.  */
1794 
1795 static int
1796 stack_protect_decl_phase (tree decl)
1797 {
1798   unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1799   int ret = 0;
1800 
1801   if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1802     has_short_buffer = true;
1803 
1804   if (flag_stack_protect == SPCT_FLAG_ALL
1805       || flag_stack_protect == SPCT_FLAG_STRONG
1806       || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1807 	  && lookup_attribute ("stack_protect",
1808 			       DECL_ATTRIBUTES (current_function_decl))))
1809     {
1810       if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1811 	  && !(bits & SPCT_HAS_AGGREGATE))
1812 	ret = 1;
1813       else if (bits & SPCT_HAS_ARRAY)
1814 	ret = 2;
1815     }
1816   else
1817     ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1818 
1819   if (ret)
1820     has_protected_decls = true;
1821 
1822   return ret;
1823 }
1824 
1825 /* Two helper routines that check for phase 1 and phase 2.  These are used
1826    as callbacks for expand_stack_vars.  */
1827 
1828 static bool
1829 stack_protect_decl_phase_1 (size_t i)
1830 {
1831   return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1832 }
1833 
1834 static bool
1835 stack_protect_decl_phase_2 (size_t i)
1836 {
1837   return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1838 }
1839 
1840 /* And helper function that checks for asan phase (with stack protector
1841    it is phase 3).  This is used as callback for expand_stack_vars.
1842    Returns true if any of the vars in the partition need to be protected.  */
1843 
1844 static bool
1845 asan_decl_phase_3 (size_t i)
1846 {
1847   while (i != EOC)
1848     {
1849       if (asan_protect_stack_decl (stack_vars[i].decl))
1850 	return true;
1851       i = stack_vars[i].next;
1852     }
1853   return false;
1854 }
1855 
1856 /* Ensure that variables in different stack protection phases conflict
1857    so that they are not merged and share the same stack slot.  */
1858 
1859 static void
1860 add_stack_protection_conflicts (void)
1861 {
1862   size_t i, j, n = stack_vars_num;
1863   unsigned char *phase;
1864 
1865   phase = XNEWVEC (unsigned char, n);
1866   for (i = 0; i < n; ++i)
1867     phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1868 
1869   for (i = 0; i < n; ++i)
1870     {
1871       unsigned char ph_i = phase[i];
1872       for (j = i + 1; j < n; ++j)
1873 	if (ph_i != phase[j])
1874 	  add_stack_var_conflict (i, j);
1875     }
1876 
1877   XDELETEVEC (phase);
1878 }
1879 
1880 /* Create a decl for the guard at the top of the stack frame.  */
1881 
1882 static void
1883 create_stack_guard (void)
1884 {
1885   tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1886 			   VAR_DECL, NULL, ptr_type_node);
1887   TREE_THIS_VOLATILE (guard) = 1;
1888   TREE_USED (guard) = 1;
1889   expand_one_stack_var (guard);
1890   crtl->stack_protect_guard = guard;
1891 }
1892 
1893 /* Prepare for expanding variables.  */
1894 static void
1895 init_vars_expansion (void)
1896 {
1897   /* Conflict bitmaps, and a few related temporary bitmaps, go here.  */
1898   bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1899 
1900   /* A map from decl to stack partition.  */
1901   decl_to_stack_part = new hash_map<tree, size_t>;
1902 
1903   /* Initialize local stack smashing state.  */
1904   has_protected_decls = false;
1905   has_short_buffer = false;
1906 }
1907 
1908 /* Free up stack variable graph data.  */
1909 static void
1910 fini_vars_expansion (void)
1911 {
1912   bitmap_obstack_release (&stack_var_bitmap_obstack);
1913   if (stack_vars)
1914     XDELETEVEC (stack_vars);
1915   if (stack_vars_sorted)
1916     XDELETEVEC (stack_vars_sorted);
1917   stack_vars = NULL;
1918   stack_vars_sorted = NULL;
1919   stack_vars_alloc = stack_vars_num = 0;
1920   delete decl_to_stack_part;
1921   decl_to_stack_part = NULL;
1922 }
1923 
1924 /* Make a fair guess for the size of the stack frame of the function
1925    in NODE.  This doesn't have to be exact, the result is only used in
1926    the inline heuristics.  So we don't want to run the full stack var
1927    packing algorithm (which is quadratic in the number of stack vars).
1928    Instead, we calculate the total size of all stack vars.  This turns
1929    out to be a pretty fair estimate -- packing of stack vars doesn't
1930    happen very often.  */
1931 
1932 HOST_WIDE_INT
1933 estimated_stack_frame_size (struct cgraph_node *node)
1934 {
1935   poly_int64 size = 0;
1936   size_t i;
1937   tree var;
1938   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1939 
1940   push_cfun (fn);
1941 
1942   init_vars_expansion ();
1943 
1944   FOR_EACH_LOCAL_DECL (fn, i, var)
1945     if (auto_var_in_fn_p (var, fn->decl))
1946       size += expand_one_var (var, true, false);
1947 
1948   if (stack_vars_num > 0)
1949     {
1950       /* Fake sorting the stack vars for account_stack_vars ().  */
1951       stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1952       for (i = 0; i < stack_vars_num; ++i)
1953 	stack_vars_sorted[i] = i;
1954       size += account_stack_vars ();
1955     }
1956 
1957   fini_vars_expansion ();
1958   pop_cfun ();
1959   return estimated_poly_value (size);
1960 }
1961 
1962 /* Helper routine to check if a record or union contains an array field. */
1963 
1964 static int
1965 record_or_union_type_has_array_p (const_tree tree_type)
1966 {
1967   tree fields = TYPE_FIELDS (tree_type);
1968   tree f;
1969 
1970   for (f = fields; f; f = DECL_CHAIN (f))
1971     if (TREE_CODE (f) == FIELD_DECL)
1972       {
1973 	tree field_type = TREE_TYPE (f);
1974 	if (RECORD_OR_UNION_TYPE_P (field_type)
1975 	    && record_or_union_type_has_array_p (field_type))
1976 	  return 1;
1977 	if (TREE_CODE (field_type) == ARRAY_TYPE)
1978 	  return 1;
1979       }
1980   return 0;
1981 }
1982 
1983 /* Check if the current function has local referenced variables that
1984    have their addresses taken, contain an array, or are arrays.  */
1985 
1986 static bool
1987 stack_protect_decl_p ()
1988 {
1989   unsigned i;
1990   tree var;
1991 
1992   FOR_EACH_LOCAL_DECL (cfun, i, var)
1993     if (!is_global_var (var))
1994       {
1995 	tree var_type = TREE_TYPE (var);
1996 	if (VAR_P (var)
1997 	    && (TREE_CODE (var_type) == ARRAY_TYPE
1998 		|| TREE_ADDRESSABLE (var)
1999 		|| (RECORD_OR_UNION_TYPE_P (var_type)
2000 		    && record_or_union_type_has_array_p (var_type))))
2001 	  return true;
2002       }
2003   return false;
2004 }
2005 
2006 /* Check if the current function has calls that use a return slot.  */
2007 
2008 static bool
2009 stack_protect_return_slot_p ()
2010 {
2011   basic_block bb;
2012 
2013   FOR_ALL_BB_FN (bb, cfun)
2014     for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2015 	 !gsi_end_p (gsi); gsi_next (&gsi))
2016       {
2017 	gimple *stmt = gsi_stmt (gsi);
2018 	/* This assumes that calls to internal-only functions never
2019 	   use a return slot.  */
2020 	if (is_gimple_call (stmt)
2021 	    && !gimple_call_internal_p (stmt)
2022 	    && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2023 				  gimple_call_fndecl (stmt)))
2024 	  return true;
2025       }
2026   return false;
2027 }
2028 
2029 /* Expand all variables used in the function.  */
2030 
2031 static rtx_insn *
2032 expand_used_vars (void)
2033 {
2034   tree var, outer_block = DECL_INITIAL (current_function_decl);
2035   auto_vec<tree> maybe_local_decls;
2036   rtx_insn *var_end_seq = NULL;
2037   unsigned i;
2038   unsigned len;
2039   bool gen_stack_protect_signal = false;
2040 
2041   /* Compute the phase of the stack frame for this function.  */
2042   {
2043     int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2044     int off = targetm.starting_frame_offset () % align;
2045     frame_phase = off ? align - off : 0;
2046   }
2047 
2048   /* Set TREE_USED on all variables in the local_decls.  */
2049   FOR_EACH_LOCAL_DECL (cfun, i, var)
2050     TREE_USED (var) = 1;
2051   /* Clear TREE_USED on all variables associated with a block scope.  */
2052   clear_tree_used (DECL_INITIAL (current_function_decl));
2053 
2054   init_vars_expansion ();
2055 
2056   if (targetm.use_pseudo_pic_reg ())
2057     pic_offset_table_rtx = gen_reg_rtx (Pmode);
2058 
2059   for (i = 0; i < SA.map->num_partitions; i++)
2060     {
2061       if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2062 	continue;
2063 
2064       tree var = partition_to_var (SA.map, i);
2065 
2066       gcc_assert (!virtual_operand_p (var));
2067 
2068       expand_one_ssa_partition (var);
2069     }
2070 
2071   if (flag_stack_protect == SPCT_FLAG_STRONG)
2072       gen_stack_protect_signal
2073 	= stack_protect_decl_p () || stack_protect_return_slot_p ();
2074 
2075   /* At this point all variables on the local_decls with TREE_USED
2076      set are not associated with any block scope.  Lay them out.  */
2077 
2078   len = vec_safe_length (cfun->local_decls);
2079   FOR_EACH_LOCAL_DECL (cfun, i, var)
2080     {
2081       bool expand_now = false;
2082 
2083       /* Expanded above already.  */
2084       if (is_gimple_reg (var))
2085 	{
2086 	  TREE_USED (var) = 0;
2087 	  goto next;
2088 	}
2089       /* We didn't set a block for static or extern because it's hard
2090 	 to tell the difference between a global variable (re)declared
2091 	 in a local scope, and one that's really declared there to
2092 	 begin with.  And it doesn't really matter much, since we're
2093 	 not giving them stack space.  Expand them now.  */
2094       else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2095 	expand_now = true;
2096 
2097       /* Expand variables not associated with any block now.  Those created by
2098 	 the optimizers could be live anywhere in the function.  Those that
2099 	 could possibly have been scoped originally and detached from their
2100 	 block will have their allocation deferred so we coalesce them with
2101 	 others when optimization is enabled.  */
2102       else if (TREE_USED (var))
2103 	expand_now = true;
2104 
2105       /* Finally, mark all variables on the list as used.  We'll use
2106 	 this in a moment when we expand those associated with scopes.  */
2107       TREE_USED (var) = 1;
2108 
2109       if (expand_now)
2110 	expand_one_var (var, true, true);
2111 
2112     next:
2113       if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2114 	{
2115 	  rtx rtl = DECL_RTL_IF_SET (var);
2116 
2117 	  /* Keep artificial non-ignored vars in cfun->local_decls
2118 	     chain until instantiate_decls.  */
2119 	  if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2120 	    add_local_decl (cfun, var);
2121 	  else if (rtl == NULL_RTX)
2122 	    /* If rtl isn't set yet, which can happen e.g. with
2123 	       -fstack-protector, retry before returning from this
2124 	       function.  */
2125 	    maybe_local_decls.safe_push (var);
2126 	}
2127     }
2128 
2129   /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2130 
2131      +-----------------+-----------------+
2132      | ...processed... | ...duplicates...|
2133      +-----------------+-----------------+
2134                        ^
2135 		       +-- LEN points here.
2136 
2137      We just want the duplicates, as those are the artificial
2138      non-ignored vars that we want to keep until instantiate_decls.
2139      Move them down and truncate the array.  */
2140   if (!vec_safe_is_empty (cfun->local_decls))
2141     cfun->local_decls->block_remove (0, len);
2142 
2143   /* At this point, all variables within the block tree with TREE_USED
2144      set are actually used by the optimized function.  Lay them out.  */
2145   expand_used_vars_for_block (outer_block, true);
2146 
2147   if (stack_vars_num > 0)
2148     {
2149       add_scope_conflicts ();
2150 
2151       /* If stack protection is enabled, we don't share space between
2152 	 vulnerable data and non-vulnerable data.  */
2153       if (flag_stack_protect != 0
2154 	  && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2155 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2156 		  && lookup_attribute ("stack_protect",
2157 				       DECL_ATTRIBUTES (current_function_decl)))))
2158 	add_stack_protection_conflicts ();
2159 
2160       /* Now that we have collected all stack variables, and have computed a
2161 	 minimal interference graph, attempt to save some stack space.  */
2162       partition_stack_vars ();
2163       if (dump_file)
2164 	dump_stack_var_partition ();
2165     }
2166 
2167   switch (flag_stack_protect)
2168     {
2169     case SPCT_FLAG_ALL:
2170       create_stack_guard ();
2171       break;
2172 
2173     case SPCT_FLAG_STRONG:
2174       if (gen_stack_protect_signal
2175 	  || cfun->calls_alloca || has_protected_decls
2176 	  || lookup_attribute ("stack_protect",
2177 			       DECL_ATTRIBUTES (current_function_decl)))
2178 	create_stack_guard ();
2179       break;
2180 
2181     case SPCT_FLAG_DEFAULT:
2182       if (cfun->calls_alloca || has_protected_decls
2183 	  || lookup_attribute ("stack_protect",
2184 			       DECL_ATTRIBUTES (current_function_decl)))
2185 	create_stack_guard ();
2186       break;
2187 
2188     case SPCT_FLAG_EXPLICIT:
2189       if (lookup_attribute ("stack_protect",
2190 			    DECL_ATTRIBUTES (current_function_decl)))
2191 	create_stack_guard ();
2192       break;
2193     default:
2194       ;
2195     }
2196 
2197   /* Assign rtl to each variable based on these partitions.  */
2198   if (stack_vars_num > 0)
2199     {
2200       struct stack_vars_data data;
2201 
2202       data.asan_base = NULL_RTX;
2203       data.asan_alignb = 0;
2204 
2205       /* Reorder decls to be protected by iterating over the variables
2206 	 array multiple times, and allocating out of each phase in turn.  */
2207       /* ??? We could probably integrate this into the qsort we did
2208 	 earlier, such that we naturally see these variables first,
2209 	 and thus naturally allocate things in the right order.  */
2210       if (has_protected_decls)
2211 	{
2212 	  /* Phase 1 contains only character arrays.  */
2213 	  expand_stack_vars (stack_protect_decl_phase_1, &data);
2214 
2215 	  /* Phase 2 contains other kinds of arrays.  */
2216 	  if (flag_stack_protect == SPCT_FLAG_ALL
2217 	      || flag_stack_protect == SPCT_FLAG_STRONG
2218 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2219 		  && lookup_attribute ("stack_protect",
2220 				       DECL_ATTRIBUTES (current_function_decl))))
2221 	    expand_stack_vars (stack_protect_decl_phase_2, &data);
2222 	}
2223 
2224       if (asan_sanitize_stack_p ())
2225 	/* Phase 3, any partitions that need asan protection
2226 	   in addition to phase 1 and 2.  */
2227 	expand_stack_vars (asan_decl_phase_3, &data);
2228 
2229       /* ASAN description strings don't yet have a syntax for expressing
2230 	 polynomial offsets.  */
2231       HOST_WIDE_INT prev_offset;
2232       if (!data.asan_vec.is_empty ()
2233 	  && frame_offset.is_constant (&prev_offset))
2234 	{
2235 	  HOST_WIDE_INT offset, sz, redzonesz;
2236 	  redzonesz = ASAN_RED_ZONE_SIZE;
2237 	  sz = data.asan_vec[0] - prev_offset;
2238 	  if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2239 	      && data.asan_alignb <= 4096
2240 	      && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2241 	    redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2242 			 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2243 	  /* Allocating a constant amount of space from a constant
2244 	     starting offset must give a constant result.  */
2245 	  offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2246 		    .to_constant ());
2247 	  data.asan_vec.safe_push (prev_offset);
2248 	  data.asan_vec.safe_push (offset);
2249 	  /* Leave space for alignment if STRICT_ALIGNMENT.  */
2250 	  if (STRICT_ALIGNMENT)
2251 	    alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2252 				      << ASAN_SHADOW_SHIFT)
2253 				     / BITS_PER_UNIT, 1);
2254 
2255 	  var_end_seq
2256 	    = asan_emit_stack_protection (virtual_stack_vars_rtx,
2257 					  data.asan_base,
2258 					  data.asan_alignb,
2259 					  data.asan_vec.address (),
2260 					  data.asan_decl_vec.address (),
2261 					  data.asan_vec.length ());
2262 	}
2263 
2264       expand_stack_vars (NULL, &data);
2265     }
2266 
2267   if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2268     var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2269 					      virtual_stack_vars_rtx,
2270 					      var_end_seq);
2271 
2272   fini_vars_expansion ();
2273 
2274   /* If there were any artificial non-ignored vars without rtl
2275      found earlier, see if deferred stack allocation hasn't assigned
2276      rtl to them.  */
2277   FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2278     {
2279       rtx rtl = DECL_RTL_IF_SET (var);
2280 
2281       /* Keep artificial non-ignored vars in cfun->local_decls
2282 	 chain until instantiate_decls.  */
2283       if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2284 	add_local_decl (cfun, var);
2285     }
2286 
2287   /* If the target requires that FRAME_OFFSET be aligned, do it.  */
2288   if (STACK_ALIGNMENT_NEEDED)
2289     {
2290       HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2291       if (FRAME_GROWS_DOWNWARD)
2292 	frame_offset = aligned_lower_bound (frame_offset, align);
2293       else
2294 	frame_offset = aligned_upper_bound (frame_offset, align);
2295     }
2296 
2297   return var_end_seq;
2298 }
2299 
2300 
2301 /* If we need to produce a detailed dump, print the tree representation
2302    for STMT to the dump file.  SINCE is the last RTX after which the RTL
2303    generated for STMT should have been appended.  */
2304 
2305 static void
2306 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2307 {
2308   if (dump_file && (dump_flags & TDF_DETAILS))
2309     {
2310       fprintf (dump_file, "\n;; ");
2311       print_gimple_stmt (dump_file, stmt, 0,
2312 			 TDF_SLIM | (dump_flags & TDF_LINENO));
2313       fprintf (dump_file, "\n");
2314 
2315       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2316     }
2317 }
2318 
2319 /* Maps the blocks that do not contain tree labels to rtx labels.  */
2320 
2321 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2322 
2323 /* Returns the label_rtx expression for a label starting basic block BB.  */
2324 
2325 static rtx_code_label *
2326 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2327 {
2328   gimple_stmt_iterator gsi;
2329   tree lab;
2330 
2331   if (bb->flags & BB_RTL)
2332     return block_label (bb);
2333 
2334   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2335   if (elt)
2336     return *elt;
2337 
2338   /* Find the tree label if it is present.  */
2339 
2340   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2341     {
2342       glabel *lab_stmt;
2343 
2344       lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2345       if (!lab_stmt)
2346 	break;
2347 
2348       lab = gimple_label_label (lab_stmt);
2349       if (DECL_NONLOCAL (lab))
2350 	break;
2351 
2352       return jump_target_rtx (lab);
2353     }
2354 
2355   rtx_code_label *l = gen_label_rtx ();
2356   lab_rtx_for_bb->put (bb, l);
2357   return l;
2358 }
2359 
2360 
2361 /* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
2362    of a basic block where we just expanded the conditional at the end,
2363    possibly clean up the CFG and instruction sequence.  LAST is the
2364    last instruction before the just emitted jump sequence.  */
2365 
2366 static void
2367 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2368 {
2369   /* Special case: when jumpif decides that the condition is
2370      trivial it emits an unconditional jump (and the necessary
2371      barrier).  But we still have two edges, the fallthru one is
2372      wrong.  purge_dead_edges would clean this up later.  Unfortunately
2373      we have to insert insns (and split edges) before
2374      find_many_sub_basic_blocks and hence before purge_dead_edges.
2375      But splitting edges might create new blocks which depend on the
2376      fact that if there are two edges there's no barrier.  So the
2377      barrier would get lost and verify_flow_info would ICE.  Instead
2378      of auditing all edge splitters to care for the barrier (which
2379      normally isn't there in a cleaned CFG), fix it here.  */
2380   if (BARRIER_P (get_last_insn ()))
2381     {
2382       rtx_insn *insn;
2383       remove_edge (e);
2384       /* Now, we have a single successor block, if we have insns to
2385 	 insert on the remaining edge we potentially will insert
2386 	 it at the end of this block (if the dest block isn't feasible)
2387 	 in order to avoid splitting the edge.  This insertion will take
2388 	 place in front of the last jump.  But we might have emitted
2389 	 multiple jumps (conditional and one unconditional) to the
2390 	 same destination.  Inserting in front of the last one then
2391 	 is a problem.  See PR 40021.  We fix this by deleting all
2392 	 jumps except the last unconditional one.  */
2393       insn = PREV_INSN (get_last_insn ());
2394       /* Make sure we have an unconditional jump.  Otherwise we're
2395 	 confused.  */
2396       gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2397       for (insn = PREV_INSN (insn); insn != last;)
2398 	{
2399 	  insn = PREV_INSN (insn);
2400 	  if (JUMP_P (NEXT_INSN (insn)))
2401 	    {
2402 	      if (!any_condjump_p (NEXT_INSN (insn)))
2403 		{
2404 		  gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2405 		  delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2406 		}
2407 	      delete_insn (NEXT_INSN (insn));
2408 	    }
2409 	}
2410     }
2411 }
2412 
2413 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
2414    Returns a new basic block if we've terminated the current basic
2415    block and created a new one.  */
2416 
2417 static basic_block
2418 expand_gimple_cond (basic_block bb, gcond *stmt)
2419 {
2420   basic_block new_bb, dest;
2421   edge true_edge;
2422   edge false_edge;
2423   rtx_insn *last2, *last;
2424   enum tree_code code;
2425   tree op0, op1;
2426 
2427   code = gimple_cond_code (stmt);
2428   op0 = gimple_cond_lhs (stmt);
2429   op1 = gimple_cond_rhs (stmt);
2430   /* We're sometimes presented with such code:
2431        D.123_1 = x < y;
2432        if (D.123_1 != 0)
2433          ...
2434      This would expand to two comparisons which then later might
2435      be cleaned up by combine.  But some pattern matchers like if-conversion
2436      work better when there's only one compare, so make up for this
2437      here as special exception if TER would have made the same change.  */
2438   if (SA.values
2439       && TREE_CODE (op0) == SSA_NAME
2440       && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2441       && TREE_CODE (op1) == INTEGER_CST
2442       && ((gimple_cond_code (stmt) == NE_EXPR
2443 	   && integer_zerop (op1))
2444 	  || (gimple_cond_code (stmt) == EQ_EXPR
2445 	      && integer_onep (op1)))
2446       && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2447     {
2448       gimple *second = SSA_NAME_DEF_STMT (op0);
2449       if (gimple_code (second) == GIMPLE_ASSIGN)
2450 	{
2451 	  enum tree_code code2 = gimple_assign_rhs_code (second);
2452 	  if (TREE_CODE_CLASS (code2) == tcc_comparison)
2453 	    {
2454 	      code = code2;
2455 	      op0 = gimple_assign_rhs1 (second);
2456 	      op1 = gimple_assign_rhs2 (second);
2457 	    }
2458 	  /* If jumps are cheap and the target does not support conditional
2459 	     compare, turn some more codes into jumpy sequences.  */
2460 	  else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2461 		   && targetm.gen_ccmp_first == NULL)
2462 	    {
2463 	      if ((code2 == BIT_AND_EXPR
2464 		   && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2465 		   && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2466 		  || code2 == TRUTH_AND_EXPR)
2467 		{
2468 		  code = TRUTH_ANDIF_EXPR;
2469 		  op0 = gimple_assign_rhs1 (second);
2470 		  op1 = gimple_assign_rhs2 (second);
2471 		}
2472 	      else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2473 		{
2474 		  code = TRUTH_ORIF_EXPR;
2475 		  op0 = gimple_assign_rhs1 (second);
2476 		  op1 = gimple_assign_rhs2 (second);
2477 		}
2478 	    }
2479 	}
2480     }
2481 
2482   last2 = last = get_last_insn ();
2483 
2484   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2485   set_curr_insn_location (gimple_location (stmt));
2486 
2487   /* These flags have no purpose in RTL land.  */
2488   true_edge->flags &= ~EDGE_TRUE_VALUE;
2489   false_edge->flags &= ~EDGE_FALSE_VALUE;
2490 
2491   /* We can either have a pure conditional jump with one fallthru edge or
2492      two-way jump that needs to be decomposed into two basic blocks.  */
2493   if (false_edge->dest == bb->next_bb)
2494     {
2495       jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2496 		true_edge->probability);
2497       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2498       if (true_edge->goto_locus != UNKNOWN_LOCATION)
2499 	set_curr_insn_location (true_edge->goto_locus);
2500       false_edge->flags |= EDGE_FALLTHRU;
2501       maybe_cleanup_end_of_block (false_edge, last);
2502       return NULL;
2503     }
2504   if (true_edge->dest == bb->next_bb)
2505     {
2506       jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2507 		   false_edge->probability);
2508       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2509       if (false_edge->goto_locus != UNKNOWN_LOCATION)
2510 	set_curr_insn_location (false_edge->goto_locus);
2511       true_edge->flags |= EDGE_FALLTHRU;
2512       maybe_cleanup_end_of_block (true_edge, last);
2513       return NULL;
2514     }
2515 
2516   jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2517 	    true_edge->probability);
2518   last = get_last_insn ();
2519   if (false_edge->goto_locus != UNKNOWN_LOCATION)
2520     set_curr_insn_location (false_edge->goto_locus);
2521   emit_jump (label_rtx_for_bb (false_edge->dest));
2522 
2523   BB_END (bb) = last;
2524   if (BARRIER_P (BB_END (bb)))
2525     BB_END (bb) = PREV_INSN (BB_END (bb));
2526   update_bb_for_insn (bb);
2527 
2528   new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2529   dest = false_edge->dest;
2530   redirect_edge_succ (false_edge, new_bb);
2531   false_edge->flags |= EDGE_FALLTHRU;
2532   new_bb->count = false_edge->count ();
2533   loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2534   add_bb_to_loop (new_bb, loop);
2535   if (loop->latch == bb
2536       && loop->header == dest)
2537     loop->latch = new_bb;
2538   make_single_succ_edge (new_bb, dest, 0);
2539   if (BARRIER_P (BB_END (new_bb)))
2540     BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2541   update_bb_for_insn (new_bb);
2542 
2543   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2544 
2545   if (true_edge->goto_locus != UNKNOWN_LOCATION)
2546     {
2547       set_curr_insn_location (true_edge->goto_locus);
2548       true_edge->goto_locus = curr_insn_location ();
2549     }
2550 
2551   return new_bb;
2552 }
2553 
2554 /* Mark all calls that can have a transaction restart.  */
2555 
2556 static void
2557 mark_transaction_restart_calls (gimple *stmt)
2558 {
2559   struct tm_restart_node dummy;
2560   tm_restart_node **slot;
2561 
2562   if (!cfun->gimple_df->tm_restart)
2563     return;
2564 
2565   dummy.stmt = stmt;
2566   slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2567   if (slot)
2568     {
2569       struct tm_restart_node *n = *slot;
2570       tree list = n->label_or_list;
2571       rtx_insn *insn;
2572 
2573       for (insn = next_real_insn (get_last_insn ());
2574 	   !CALL_P (insn);
2575 	   insn = next_real_insn (insn))
2576 	continue;
2577 
2578       if (TREE_CODE (list) == LABEL_DECL)
2579 	add_reg_note (insn, REG_TM, label_rtx (list));
2580       else
2581 	for (; list ; list = TREE_CHAIN (list))
2582 	  add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2583     }
2584 }
2585 
2586 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2587    statement STMT.  */
2588 
2589 static void
2590 expand_call_stmt (gcall *stmt)
2591 {
2592   tree exp, decl, lhs;
2593   bool builtin_p;
2594   size_t i;
2595 
2596   if (gimple_call_internal_p (stmt))
2597     {
2598       expand_internal_call (stmt);
2599       return;
2600     }
2601 
2602   /* If this is a call to a built-in function and it has no effect other
2603      than setting the lhs, try to implement it using an internal function
2604      instead.  */
2605   decl = gimple_call_fndecl (stmt);
2606   if (gimple_call_lhs (stmt)
2607       && !gimple_has_side_effects (stmt)
2608       && (optimize || (decl && called_as_built_in (decl))))
2609     {
2610       internal_fn ifn = replacement_internal_fn (stmt);
2611       if (ifn != IFN_LAST)
2612 	{
2613 	  expand_internal_call (ifn, stmt);
2614 	  return;
2615 	}
2616     }
2617 
2618   exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2619 
2620   CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2621   builtin_p = decl && DECL_BUILT_IN (decl);
2622 
2623   /* If this is not a builtin function, the function type through which the
2624      call is made may be different from the type of the function.  */
2625   if (!builtin_p)
2626     CALL_EXPR_FN (exp)
2627       = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2628 		      CALL_EXPR_FN (exp));
2629 
2630   TREE_TYPE (exp) = gimple_call_return_type (stmt);
2631   CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2632 
2633   for (i = 0; i < gimple_call_num_args (stmt); i++)
2634     {
2635       tree arg = gimple_call_arg (stmt, i);
2636       gimple *def;
2637       /* TER addresses into arguments of builtin functions so we have a
2638 	 chance to infer more correct alignment information.  See PR39954.  */
2639       if (builtin_p
2640 	  && TREE_CODE (arg) == SSA_NAME
2641 	  && (def = get_gimple_for_ssa_name (arg))
2642 	  && gimple_assign_rhs_code (def) == ADDR_EXPR)
2643 	arg = gimple_assign_rhs1 (def);
2644       CALL_EXPR_ARG (exp, i) = arg;
2645     }
2646 
2647   if (gimple_has_side_effects (stmt))
2648     TREE_SIDE_EFFECTS (exp) = 1;
2649 
2650   if (gimple_call_nothrow_p (stmt))
2651     TREE_NOTHROW (exp) = 1;
2652 
2653   if (gimple_no_warning_p (stmt))
2654     TREE_NO_WARNING (exp) = 1;
2655 
2656   CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2657   CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2658   CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2659   if (decl
2660       && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2661       && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2662     CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2663   else
2664     CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2665   CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2666   CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2667   SET_EXPR_LOCATION (exp, gimple_location (stmt));
2668   CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2669 
2670   /* Ensure RTL is created for debug args.  */
2671   if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2672     {
2673       vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2674       unsigned int ix;
2675       tree dtemp;
2676 
2677       if (debug_args)
2678 	for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2679 	  {
2680 	    gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2681 	    expand_debug_expr (dtemp);
2682 	  }
2683     }
2684 
2685   rtx_insn *before_call = get_last_insn ();
2686   lhs = gimple_call_lhs (stmt);
2687   if (lhs)
2688     expand_assignment (lhs, exp, false);
2689   else
2690     expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2691 
2692   /* If the gimple call is an indirect call and has 'nocf_check'
2693      attribute find a generated CALL insn to mark it as no
2694      control-flow verification is needed.  */
2695   if (gimple_call_nocf_check_p (stmt)
2696       && !gimple_call_fndecl (stmt))
2697     {
2698       rtx_insn *last = get_last_insn ();
2699       while (!CALL_P (last)
2700 	     && last != before_call)
2701 	last = PREV_INSN (last);
2702 
2703       if (last != before_call)
2704 	add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2705     }
2706 
2707   mark_transaction_restart_calls (stmt);
2708 }
2709 
2710 
2711 /* Generate RTL for an asm statement (explicit assembler code).
2712    STRING is a STRING_CST node containing the assembler code text,
2713    or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
2714    insn is volatile; don't optimize it.  */
2715 
2716 static void
2717 expand_asm_loc (tree string, int vol, location_t locus)
2718 {
2719   rtx body;
2720 
2721   body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2722 				ggc_strdup (TREE_STRING_POINTER (string)),
2723 				locus);
2724 
2725   MEM_VOLATILE_P (body) = vol;
2726 
2727   /* Non-empty basic ASM implicitly clobbers memory.  */
2728   if (TREE_STRING_LENGTH (string) != 0)
2729     {
2730       rtx asm_op, clob;
2731       unsigned i, nclobbers;
2732       auto_vec<rtx> input_rvec, output_rvec;
2733       auto_vec<const char *> constraints;
2734       auto_vec<rtx> clobber_rvec;
2735       HARD_REG_SET clobbered_regs;
2736       CLEAR_HARD_REG_SET (clobbered_regs);
2737 
2738       clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2739       clobber_rvec.safe_push (clob);
2740 
2741       if (targetm.md_asm_adjust)
2742 	targetm.md_asm_adjust (output_rvec, input_rvec,
2743 			       constraints, clobber_rvec,
2744 			       clobbered_regs);
2745 
2746       asm_op = body;
2747       nclobbers = clobber_rvec.length ();
2748       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2749 
2750       XVECEXP (body, 0, 0) = asm_op;
2751       for (i = 0; i < nclobbers; i++)
2752 	XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2753     }
2754 
2755   emit_insn (body);
2756 }
2757 
2758 /* Return the number of times character C occurs in string S.  */
2759 static int
2760 n_occurrences (int c, const char *s)
2761 {
2762   int n = 0;
2763   while (*s)
2764     n += (*s++ == c);
2765   return n;
2766 }
2767 
2768 /* A subroutine of expand_asm_operands.  Check that all operands have
2769    the same number of alternatives.  Return true if so.  */
2770 
2771 static bool
2772 check_operand_nalternatives (const vec<const char *> &constraints)
2773 {
2774   unsigned len = constraints.length();
2775   if (len > 0)
2776     {
2777       int nalternatives = n_occurrences (',', constraints[0]);
2778 
2779       if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2780 	{
2781 	  error ("too many alternatives in %<asm%>");
2782 	  return false;
2783 	}
2784 
2785       for (unsigned i = 1; i < len; ++i)
2786 	if (n_occurrences (',', constraints[i]) != nalternatives)
2787 	  {
2788 	    error ("operand constraints for %<asm%> differ "
2789 		   "in number of alternatives");
2790 	    return false;
2791 	  }
2792     }
2793   return true;
2794 }
2795 
2796 /* Check for overlap between registers marked in CLOBBERED_REGS and
2797    anything inappropriate in T.  Emit error and return the register
2798    variable definition for error, NULL_TREE for ok.  */
2799 
2800 static bool
2801 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2802 {
2803   /* Conflicts between asm-declared register variables and the clobber
2804      list are not allowed.  */
2805   tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2806 
2807   if (overlap)
2808     {
2809       error ("asm-specifier for variable %qE conflicts with asm clobber list",
2810 	     DECL_NAME (overlap));
2811 
2812       /* Reset registerness to stop multiple errors emitted for a single
2813 	 variable.  */
2814       DECL_REGISTER (overlap) = 0;
2815       return true;
2816     }
2817 
2818   return false;
2819 }
2820 
2821 /* Generate RTL for an asm statement with arguments.
2822    STRING is the instruction template.
2823    OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2824    Each output or input has an expression in the TREE_VALUE and
2825    a tree list in TREE_PURPOSE which in turn contains a constraint
2826    name in TREE_VALUE (or NULL_TREE) and a constraint string
2827    in TREE_PURPOSE.
2828    CLOBBERS is a list of STRING_CST nodes each naming a hard register
2829    that is clobbered by this insn.
2830 
2831    LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2832    should be the fallthru basic block of the asm goto.
2833 
2834    Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2835    Some elements of OUTPUTS may be replaced with trees representing temporary
2836    values.  The caller should copy those temporary values to the originally
2837    specified lvalues.
2838 
2839    VOL nonzero means the insn is volatile; don't optimize it.  */
2840 
2841 static void
2842 expand_asm_stmt (gasm *stmt)
2843 {
2844   class save_input_location
2845   {
2846     location_t old;
2847 
2848   public:
2849     explicit save_input_location(location_t where)
2850     {
2851       old = input_location;
2852       input_location = where;
2853     }
2854 
2855     ~save_input_location()
2856     {
2857       input_location = old;
2858     }
2859   };
2860 
2861   location_t locus = gimple_location (stmt);
2862 
2863   if (gimple_asm_input_p (stmt))
2864     {
2865       const char *s = gimple_asm_string (stmt);
2866       tree string = build_string (strlen (s), s);
2867       expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2868       return;
2869     }
2870 
2871   /* There are some legacy diagnostics in here, and also avoids a
2872      sixth parameger to targetm.md_asm_adjust.  */
2873   save_input_location s_i_l(locus);
2874 
2875   unsigned noutputs = gimple_asm_noutputs (stmt);
2876   unsigned ninputs = gimple_asm_ninputs (stmt);
2877   unsigned nlabels = gimple_asm_nlabels (stmt);
2878   unsigned i;
2879 
2880   /* ??? Diagnose during gimplification?  */
2881   if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2882     {
2883       error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2884       return;
2885     }
2886 
2887   auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2888   auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2889   auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2890 
2891   /* Copy the gimple vectors into new vectors that we can manipulate.  */
2892 
2893   output_tvec.safe_grow (noutputs);
2894   input_tvec.safe_grow (ninputs);
2895   constraints.safe_grow (noutputs + ninputs);
2896 
2897   for (i = 0; i < noutputs; ++i)
2898     {
2899       tree t = gimple_asm_output_op (stmt, i);
2900       output_tvec[i] = TREE_VALUE (t);
2901       constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2902     }
2903   for (i = 0; i < ninputs; i++)
2904     {
2905       tree t = gimple_asm_input_op (stmt, i);
2906       input_tvec[i] = TREE_VALUE (t);
2907       constraints[i + noutputs]
2908 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2909     }
2910 
2911   /* ??? Diagnose during gimplification?  */
2912   if (! check_operand_nalternatives (constraints))
2913     return;
2914 
2915   /* Count the number of meaningful clobbered registers, ignoring what
2916      we would ignore later.  */
2917   auto_vec<rtx> clobber_rvec;
2918   HARD_REG_SET clobbered_regs;
2919   CLEAR_HARD_REG_SET (clobbered_regs);
2920 
2921   if (unsigned n = gimple_asm_nclobbers (stmt))
2922     {
2923       clobber_rvec.reserve (n);
2924       for (i = 0; i < n; i++)
2925 	{
2926 	  tree t = gimple_asm_clobber_op (stmt, i);
2927           const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2928 	  int nregs, j;
2929 
2930 	  j = decode_reg_name_and_count (regname, &nregs);
2931 	  if (j < 0)
2932 	    {
2933 	      if (j == -2)
2934 		{
2935 		  /* ??? Diagnose during gimplification?  */
2936 		  error ("unknown register name %qs in %<asm%>", regname);
2937 		}
2938 	      else if (j == -4)
2939 		{
2940 		  rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2941 		  clobber_rvec.safe_push (x);
2942 		}
2943 	      else
2944 		{
2945 		  /* Otherwise we should have -1 == empty string
2946 		     or -3 == cc, which is not a register.  */
2947 		  gcc_assert (j == -1 || j == -3);
2948 		}
2949 	    }
2950 	  else
2951 	    for (int reg = j; reg < j + nregs; reg++)
2952 	      {
2953 		/* Clobbering the PIC register is an error.  */
2954 		if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2955 		  {
2956 		    /* ??? Diagnose during gimplification?  */
2957 		    error ("PIC register clobbered by %qs in %<asm%>",
2958 			   regname);
2959 		    return;
2960 		  }
2961 
2962 	        SET_HARD_REG_BIT (clobbered_regs, reg);
2963 	        rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2964 		clobber_rvec.safe_push (x);
2965 	      }
2966 	}
2967     }
2968   unsigned nclobbers = clobber_rvec.length();
2969 
2970   /* First pass over inputs and outputs checks validity and sets
2971      mark_addressable if needed.  */
2972   /* ??? Diagnose during gimplification?  */
2973 
2974   for (i = 0; i < noutputs; ++i)
2975     {
2976       tree val = output_tvec[i];
2977       tree type = TREE_TYPE (val);
2978       const char *constraint;
2979       bool is_inout;
2980       bool allows_reg;
2981       bool allows_mem;
2982 
2983       /* Try to parse the output constraint.  If that fails, there's
2984 	 no point in going further.  */
2985       constraint = constraints[i];
2986       if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2987 				    &allows_mem, &allows_reg, &is_inout))
2988 	return;
2989 
2990       if (! allows_reg
2991 	  && (allows_mem
2992 	      || is_inout
2993 	      || (DECL_P (val)
2994 		  && REG_P (DECL_RTL (val))
2995 		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2996 	mark_addressable (val);
2997     }
2998 
2999   for (i = 0; i < ninputs; ++i)
3000     {
3001       bool allows_reg, allows_mem;
3002       const char *constraint;
3003 
3004       constraint = constraints[i + noutputs];
3005       if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3006 				    constraints.address (),
3007 				    &allows_mem, &allows_reg))
3008 	return;
3009 
3010       if (! allows_reg && allows_mem)
3011 	mark_addressable (input_tvec[i]);
3012     }
3013 
3014   /* Second pass evaluates arguments.  */
3015 
3016   /* Make sure stack is consistent for asm goto.  */
3017   if (nlabels > 0)
3018     do_pending_stack_adjust ();
3019   int old_generating_concat_p = generating_concat_p;
3020 
3021   /* Vector of RTX's of evaluated output operands.  */
3022   auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3023   auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3024   rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3025 
3026   output_rvec.safe_grow (noutputs);
3027 
3028   for (i = 0; i < noutputs; ++i)
3029     {
3030       tree val = output_tvec[i];
3031       tree type = TREE_TYPE (val);
3032       bool is_inout, allows_reg, allows_mem, ok;
3033       rtx op;
3034 
3035       ok = parse_output_constraint (&constraints[i], i, ninputs,
3036 				    noutputs, &allows_mem, &allows_reg,
3037 				    &is_inout);
3038       gcc_assert (ok);
3039 
3040       /* If an output operand is not a decl or indirect ref and our constraint
3041 	 allows a register, make a temporary to act as an intermediate.
3042 	 Make the asm insn write into that, then we will copy it to
3043 	 the real output operand.  Likewise for promoted variables.  */
3044 
3045       generating_concat_p = 0;
3046 
3047       if ((TREE_CODE (val) == INDIRECT_REF
3048 	   && allows_mem)
3049 	  || (DECL_P (val)
3050 	      && (allows_mem || REG_P (DECL_RTL (val)))
3051 	      && ! (REG_P (DECL_RTL (val))
3052 		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3053 	  || ! allows_reg
3054 	  || is_inout)
3055 	{
3056 	  op = expand_expr (val, NULL_RTX, VOIDmode,
3057 			    !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3058 	  if (MEM_P (op))
3059 	    op = validize_mem (op);
3060 
3061 	  if (! allows_reg && !MEM_P (op))
3062 	    error ("output number %d not directly addressable", i);
3063 	  if ((! allows_mem && MEM_P (op))
3064 	      || GET_CODE (op) == CONCAT)
3065 	    {
3066 	      rtx old_op = op;
3067 	      op = gen_reg_rtx (GET_MODE (op));
3068 
3069 	      generating_concat_p = old_generating_concat_p;
3070 
3071 	      if (is_inout)
3072 		emit_move_insn (op, old_op);
3073 
3074 	      push_to_sequence2 (after_rtl_seq, after_rtl_end);
3075 	      emit_move_insn (old_op, op);
3076 	      after_rtl_seq = get_insns ();
3077 	      after_rtl_end = get_last_insn ();
3078 	      end_sequence ();
3079 	    }
3080 	}
3081       else
3082 	{
3083 	  op = assign_temp (type, 0, 1);
3084 	  op = validize_mem (op);
3085 	  if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3086 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3087 
3088 	  generating_concat_p = old_generating_concat_p;
3089 
3090 	  push_to_sequence2 (after_rtl_seq, after_rtl_end);
3091 	  expand_assignment (val, make_tree (type, op), false);
3092 	  after_rtl_seq = get_insns ();
3093 	  after_rtl_end = get_last_insn ();
3094 	  end_sequence ();
3095 	}
3096       output_rvec[i] = op;
3097 
3098       if (is_inout)
3099 	inout_opnum.safe_push (i);
3100     }
3101 
3102   auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3103   auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3104 
3105   input_rvec.safe_grow (ninputs);
3106   input_mode.safe_grow (ninputs);
3107 
3108   generating_concat_p = 0;
3109 
3110   for (i = 0; i < ninputs; ++i)
3111     {
3112       tree val = input_tvec[i];
3113       tree type = TREE_TYPE (val);
3114       bool allows_reg, allows_mem, ok;
3115       const char *constraint;
3116       rtx op;
3117 
3118       constraint = constraints[i + noutputs];
3119       ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3120 				   constraints.address (),
3121 				   &allows_mem, &allows_reg);
3122       gcc_assert (ok);
3123 
3124       /* EXPAND_INITIALIZER will not generate code for valid initializer
3125 	 constants, but will still generate code for other types of operand.
3126 	 This is the behavior we want for constant constraints.  */
3127       op = expand_expr (val, NULL_RTX, VOIDmode,
3128 			allows_reg ? EXPAND_NORMAL
3129 			: allows_mem ? EXPAND_MEMORY
3130 			: EXPAND_INITIALIZER);
3131 
3132       /* Never pass a CONCAT to an ASM.  */
3133       if (GET_CODE (op) == CONCAT)
3134 	op = force_reg (GET_MODE (op), op);
3135       else if (MEM_P (op))
3136 	op = validize_mem (op);
3137 
3138       if (asm_operand_ok (op, constraint, NULL) <= 0)
3139 	{
3140 	  if (allows_reg && TYPE_MODE (type) != BLKmode)
3141 	    op = force_reg (TYPE_MODE (type), op);
3142 	  else if (!allows_mem)
3143 	    warning (0, "asm operand %d probably doesn%'t match constraints",
3144 		     i + noutputs);
3145 	  else if (MEM_P (op))
3146 	    {
3147 	      /* We won't recognize either volatile memory or memory
3148 		 with a queued address as available a memory_operand
3149 		 at this point.  Ignore it: clearly this *is* a memory.  */
3150 	    }
3151 	  else
3152 	    gcc_unreachable ();
3153 	}
3154       input_rvec[i] = op;
3155       input_mode[i] = TYPE_MODE (type);
3156     }
3157 
3158   /* For in-out operands, copy output rtx to input rtx.  */
3159   unsigned ninout = inout_opnum.length();
3160   for (i = 0; i < ninout; i++)
3161     {
3162       int j = inout_opnum[i];
3163       rtx o = output_rvec[j];
3164 
3165       input_rvec.safe_push (o);
3166       input_mode.safe_push (GET_MODE (o));
3167 
3168       char buffer[16];
3169       sprintf (buffer, "%d", j);
3170       constraints.safe_push (ggc_strdup (buffer));
3171     }
3172   ninputs += ninout;
3173 
3174   /* Sometimes we wish to automatically clobber registers across an asm.
3175      Case in point is when the i386 backend moved from cc0 to a hard reg --
3176      maintaining source-level compatibility means automatically clobbering
3177      the flags register.  */
3178   rtx_insn *after_md_seq = NULL;
3179   if (targetm.md_asm_adjust)
3180     after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3181 					  constraints, clobber_rvec,
3182 					  clobbered_regs);
3183 
3184   /* Do not allow the hook to change the output and input count,
3185      lest it mess up the operand numbering.  */
3186   gcc_assert (output_rvec.length() == noutputs);
3187   gcc_assert (input_rvec.length() == ninputs);
3188   gcc_assert (constraints.length() == noutputs + ninputs);
3189 
3190   /* But it certainly can adjust the clobbers.  */
3191   nclobbers = clobber_rvec.length();
3192 
3193   /* Third pass checks for easy conflicts.  */
3194   /* ??? Why are we doing this on trees instead of rtx.  */
3195 
3196   bool clobber_conflict_found = 0;
3197   for (i = 0; i < noutputs; ++i)
3198     if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3199 	clobber_conflict_found = 1;
3200   for (i = 0; i < ninputs - ninout; ++i)
3201     if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3202 	clobber_conflict_found = 1;
3203 
3204   /* Make vectors for the expression-rtx, constraint strings,
3205      and named operands.  */
3206 
3207   rtvec argvec = rtvec_alloc (ninputs);
3208   rtvec constraintvec = rtvec_alloc (ninputs);
3209   rtvec labelvec = rtvec_alloc (nlabels);
3210 
3211   rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3212 				    : GET_MODE (output_rvec[0])),
3213 				   ggc_strdup (gimple_asm_string (stmt)),
3214 				   "", 0, argvec, constraintvec,
3215 				   labelvec, locus);
3216   MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3217 
3218   for (i = 0; i < ninputs; ++i)
3219     {
3220       ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3221       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3222 	= gen_rtx_ASM_INPUT_loc (input_mode[i],
3223 				 constraints[i + noutputs],
3224 				 locus);
3225     }
3226 
3227   /* Copy labels to the vector.  */
3228   rtx_code_label *fallthru_label = NULL;
3229   if (nlabels > 0)
3230     {
3231       basic_block fallthru_bb = NULL;
3232       edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3233       if (fallthru)
3234 	fallthru_bb = fallthru->dest;
3235 
3236       for (i = 0; i < nlabels; ++i)
3237 	{
3238 	  tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3239 	  rtx_insn *r;
3240 	  /* If asm goto has any labels in the fallthru basic block, use
3241 	     a label that we emit immediately after the asm goto.  Expansion
3242 	     may insert further instructions into the same basic block after
3243 	     asm goto and if we don't do this, insertion of instructions on
3244 	     the fallthru edge might misbehave.  See PR58670.  */
3245 	  if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3246 	    {
3247 	      if (fallthru_label == NULL_RTX)
3248 	        fallthru_label = gen_label_rtx ();
3249 	      r = fallthru_label;
3250 	    }
3251 	  else
3252 	    r = label_rtx (label);
3253 	  ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3254 	}
3255     }
3256 
3257   /* Now, for each output, construct an rtx
3258      (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3259 			       ARGVEC CONSTRAINTS OPNAMES))
3260      If there is more than one, put them inside a PARALLEL.  */
3261 
3262   if (nlabels > 0 && nclobbers == 0)
3263     {
3264       gcc_assert (noutputs == 0);
3265       emit_jump_insn (body);
3266     }
3267   else if (noutputs == 0 && nclobbers == 0)
3268     {
3269       /* No output operands: put in a raw ASM_OPERANDS rtx.  */
3270       emit_insn (body);
3271     }
3272   else if (noutputs == 1 && nclobbers == 0)
3273     {
3274       ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3275       emit_insn (gen_rtx_SET (output_rvec[0], body));
3276     }
3277   else
3278     {
3279       rtx obody = body;
3280       int num = noutputs;
3281 
3282       if (num == 0)
3283 	num = 1;
3284 
3285       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3286 
3287       /* For each output operand, store a SET.  */
3288       for (i = 0; i < noutputs; ++i)
3289 	{
3290 	  rtx src, o = output_rvec[i];
3291 	  if (i == 0)
3292 	    {
3293 	      ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3294 	      src = obody;
3295 	    }
3296 	  else
3297 	    {
3298 	      src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3299 					  ASM_OPERANDS_TEMPLATE (obody),
3300 					  constraints[i], i, argvec,
3301 					  constraintvec, labelvec, locus);
3302 	      MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3303 	    }
3304 	  XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3305 	}
3306 
3307       /* If there are no outputs (but there are some clobbers)
3308 	 store the bare ASM_OPERANDS into the PARALLEL.  */
3309       if (i == 0)
3310 	XVECEXP (body, 0, i++) = obody;
3311 
3312       /* Store (clobber REG) for each clobbered register specified.  */
3313       for (unsigned j = 0; j < nclobbers; ++j)
3314 	{
3315 	  rtx clobbered_reg = clobber_rvec[j];
3316 
3317 	  /* Do sanity check for overlap between clobbers and respectively
3318 	     input and outputs that hasn't been handled.  Such overlap
3319 	     should have been detected and reported above.  */
3320 	  if (!clobber_conflict_found && REG_P (clobbered_reg))
3321 	    {
3322 	      /* We test the old body (obody) contents to avoid
3323 		 tripping over the under-construction body.  */
3324 	      for (unsigned k = 0; k < noutputs; ++k)
3325 		if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3326 		  internal_error ("asm clobber conflict with output operand");
3327 
3328 	      for (unsigned k = 0; k < ninputs - ninout; ++k)
3329 		if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3330 		  internal_error ("asm clobber conflict with input operand");
3331 	    }
3332 
3333 	  XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3334 	}
3335 
3336       if (nlabels > 0)
3337 	emit_jump_insn (body);
3338       else
3339 	emit_insn (body);
3340     }
3341 
3342   generating_concat_p = old_generating_concat_p;
3343 
3344   if (fallthru_label)
3345     emit_label (fallthru_label);
3346 
3347   if (after_md_seq)
3348     emit_insn (after_md_seq);
3349   if (after_rtl_seq)
3350     emit_insn (after_rtl_seq);
3351 
3352   free_temp_slots ();
3353   crtl->has_asm_statement = 1;
3354 }
3355 
3356 /* Emit code to jump to the address
3357    specified by the pointer expression EXP.  */
3358 
3359 static void
3360 expand_computed_goto (tree exp)
3361 {
3362   rtx x = expand_normal (exp);
3363 
3364   do_pending_stack_adjust ();
3365   emit_indirect_jump (x);
3366 }
3367 
3368 /* Generate RTL code for a `goto' statement with target label LABEL.
3369    LABEL should be a LABEL_DECL tree node that was or will later be
3370    defined with `expand_label'.  */
3371 
3372 static void
3373 expand_goto (tree label)
3374 {
3375   if (flag_checking)
3376     {
3377       /* Check for a nonlocal goto to a containing function.  Should have
3378 	 gotten translated to __builtin_nonlocal_goto.  */
3379       tree context = decl_function_context (label);
3380       gcc_assert (!context || context == current_function_decl);
3381     }
3382 
3383   emit_jump (jump_target_rtx (label));
3384 }
3385 
3386 /* Output a return with no value.  */
3387 
3388 static void
3389 expand_null_return_1 (void)
3390 {
3391   clear_pending_stack_adjust ();
3392   do_pending_stack_adjust ();
3393   emit_jump (return_label);
3394 }
3395 
3396 /* Generate RTL to return from the current function, with no value.
3397    (That is, we do not do anything about returning any value.)  */
3398 
3399 void
3400 expand_null_return (void)
3401 {
3402   /* If this function was declared to return a value, but we
3403      didn't, clobber the return registers so that they are not
3404      propagated live to the rest of the function.  */
3405   clobber_return_register ();
3406 
3407   expand_null_return_1 ();
3408 }
3409 
3410 /* Generate RTL to return from the current function, with value VAL.  */
3411 
3412 static void
3413 expand_value_return (rtx val)
3414 {
3415   /* Copy the value to the return location unless it's already there.  */
3416 
3417   tree decl = DECL_RESULT (current_function_decl);
3418   rtx return_reg = DECL_RTL (decl);
3419   if (return_reg != val)
3420     {
3421       tree funtype = TREE_TYPE (current_function_decl);
3422       tree type = TREE_TYPE (decl);
3423       int unsignedp = TYPE_UNSIGNED (type);
3424       machine_mode old_mode = DECL_MODE (decl);
3425       machine_mode mode;
3426       if (DECL_BY_REFERENCE (decl))
3427         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3428       else
3429         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3430 
3431       if (mode != old_mode)
3432 	val = convert_modes (mode, old_mode, val, unsignedp);
3433 
3434       if (GET_CODE (return_reg) == PARALLEL)
3435 	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3436       else
3437 	emit_move_insn (return_reg, val);
3438     }
3439 
3440   expand_null_return_1 ();
3441 }
3442 
3443 /* Generate RTL to evaluate the expression RETVAL and return it
3444    from the current function.  */
3445 
3446 static void
3447 expand_return (tree retval, tree bounds)
3448 {
3449   rtx result_rtl;
3450   rtx val = 0;
3451   tree retval_rhs;
3452   rtx bounds_rtl;
3453 
3454   /* If function wants no value, give it none.  */
3455   if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3456     {
3457       expand_normal (retval);
3458       expand_null_return ();
3459       return;
3460     }
3461 
3462   if (retval == error_mark_node)
3463     {
3464       /* Treat this like a return of no value from a function that
3465 	 returns a value.  */
3466       expand_null_return ();
3467       return;
3468     }
3469   else if ((TREE_CODE (retval) == MODIFY_EXPR
3470 	    || TREE_CODE (retval) == INIT_EXPR)
3471 	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3472     retval_rhs = TREE_OPERAND (retval, 1);
3473   else
3474     retval_rhs = retval;
3475 
3476   result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3477 
3478   /* Put returned bounds to the right place.  */
3479   bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3480   if (bounds_rtl)
3481     {
3482       rtx addr = NULL;
3483       rtx bnd = NULL;
3484 
3485       if (bounds && bounds != error_mark_node)
3486 	{
3487 	  bnd = expand_normal (bounds);
3488 	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3489 	}
3490       else if (REG_P (bounds_rtl))
3491 	{
3492 	  if (bounds)
3493 	    bnd = chkp_expand_zero_bounds ();
3494 	  else
3495 	    {
3496 	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3497 	      addr = gen_rtx_MEM (Pmode, addr);
3498 	      bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3499 	    }
3500 
3501 	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3502 	}
3503       else
3504 	{
3505 	  int n;
3506 
3507 	  gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3508 
3509 	  if (bounds)
3510 	    bnd = chkp_expand_zero_bounds ();
3511 	  else
3512 	    {
3513 	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3514 	      addr = gen_rtx_MEM (Pmode, addr);
3515 	    }
3516 
3517 	  for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3518 	    {
3519 	      rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3520 	      if (!bounds)
3521 		{
3522 		  rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3523 		  rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3524 		  bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3525 		}
3526 	      targetm.calls.store_returned_bounds (slot, bnd);
3527 	    }
3528 	}
3529     }
3530   else if (chkp_function_instrumented_p (current_function_decl)
3531 	   && !BOUNDED_P (retval_rhs)
3532 	   && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3533 	   && TREE_CODE (retval_rhs) != RESULT_DECL)
3534     {
3535       rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3536       addr = gen_rtx_MEM (Pmode, addr);
3537 
3538       gcc_assert (MEM_P (result_rtl));
3539 
3540       chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3541     }
3542 
3543   /* If we are returning the RESULT_DECL, then the value has already
3544      been stored into it, so we don't have to do anything special.  */
3545   if (TREE_CODE (retval_rhs) == RESULT_DECL)
3546     expand_value_return (result_rtl);
3547 
3548   /* If the result is an aggregate that is being returned in one (or more)
3549      registers, load the registers here.  */
3550 
3551   else if (retval_rhs != 0
3552 	   && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3553 	   && REG_P (result_rtl))
3554     {
3555       val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3556       if (val)
3557 	{
3558 	  /* Use the mode of the result value on the return register.  */
3559 	  PUT_MODE (result_rtl, GET_MODE (val));
3560 	  expand_value_return (val);
3561 	}
3562       else
3563 	expand_null_return ();
3564     }
3565   else if (retval_rhs != 0
3566 	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3567 	   && (REG_P (result_rtl)
3568 	       || (GET_CODE (result_rtl) == PARALLEL)))
3569     {
3570       /* Compute the return value into a temporary (usually a pseudo reg).  */
3571       val
3572 	= assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3573       val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3574       val = force_not_mem (val);
3575       expand_value_return (val);
3576     }
3577   else
3578     {
3579       /* No hard reg used; calculate value into hard return reg.  */
3580       expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3581       expand_value_return (result_rtl);
3582     }
3583 }
3584 
3585 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3586    STMT that doesn't require special handling for outgoing edges.  That
3587    is no tailcalls and no GIMPLE_COND.  */
3588 
3589 static void
3590 expand_gimple_stmt_1 (gimple *stmt)
3591 {
3592   tree op0;
3593 
3594   set_curr_insn_location (gimple_location (stmt));
3595 
3596   switch (gimple_code (stmt))
3597     {
3598     case GIMPLE_GOTO:
3599       op0 = gimple_goto_dest (stmt);
3600       if (TREE_CODE (op0) == LABEL_DECL)
3601 	expand_goto (op0);
3602       else
3603 	expand_computed_goto (op0);
3604       break;
3605     case GIMPLE_LABEL:
3606       expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3607       break;
3608     case GIMPLE_NOP:
3609     case GIMPLE_PREDICT:
3610       break;
3611     case GIMPLE_SWITCH:
3612       {
3613 	gswitch *swtch = as_a <gswitch *> (stmt);
3614 	if (gimple_switch_num_labels (swtch) == 1)
3615 	  expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3616 	else
3617 	  expand_case (swtch);
3618       }
3619       break;
3620     case GIMPLE_ASM:
3621       expand_asm_stmt (as_a <gasm *> (stmt));
3622       break;
3623     case GIMPLE_CALL:
3624       expand_call_stmt (as_a <gcall *> (stmt));
3625       break;
3626 
3627     case GIMPLE_RETURN:
3628       {
3629 	tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3630 	op0 = gimple_return_retval (as_a <greturn *> (stmt));
3631 
3632 	if (op0 && op0 != error_mark_node)
3633 	  {
3634 	    tree result = DECL_RESULT (current_function_decl);
3635 
3636 	    /* Mark we have return statement with missing bounds.  */
3637 	    if (!bnd
3638 		&& chkp_function_instrumented_p (cfun->decl)
3639 		&& !DECL_P (op0))
3640 	      bnd = error_mark_node;
3641 
3642 	    /* If we are not returning the current function's RESULT_DECL,
3643 	       build an assignment to it.  */
3644 	    if (op0 != result)
3645 	      {
3646 		/* I believe that a function's RESULT_DECL is unique.  */
3647 		gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3648 
3649 		/* ??? We'd like to use simply expand_assignment here,
3650 		   but this fails if the value is of BLKmode but the return
3651 		   decl is a register.  expand_return has special handling
3652 		   for this combination, which eventually should move
3653 		   to common code.  See comments there.  Until then, let's
3654 		   build a modify expression :-/  */
3655 		op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3656 			      result, op0);
3657 	      }
3658 	  }
3659 
3660 	if (!op0)
3661 	  expand_null_return ();
3662 	else
3663 	  expand_return (op0, bnd);
3664       }
3665       break;
3666 
3667     case GIMPLE_ASSIGN:
3668       {
3669 	gassign *assign_stmt = as_a <gassign *> (stmt);
3670 	tree lhs = gimple_assign_lhs (assign_stmt);
3671 
3672 	/* Tree expand used to fiddle with |= and &= of two bitfield
3673 	   COMPONENT_REFs here.  This can't happen with gimple, the LHS
3674 	   of binary assigns must be a gimple reg.  */
3675 
3676 	if (TREE_CODE (lhs) != SSA_NAME
3677 	    || get_gimple_rhs_class (gimple_expr_code (stmt))
3678 	       == GIMPLE_SINGLE_RHS)
3679 	  {
3680 	    tree rhs = gimple_assign_rhs1 (assign_stmt);
3681 	    gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3682 			== GIMPLE_SINGLE_RHS);
3683 	    if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3684 		/* Do not put locations on possibly shared trees.  */
3685 		&& !is_gimple_min_invariant (rhs))
3686 	      SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3687 	    if (TREE_CLOBBER_P (rhs))
3688 	      /* This is a clobber to mark the going out of scope for
3689 		 this LHS.  */
3690 	      ;
3691 	    else
3692 	      expand_assignment (lhs, rhs,
3693 				 gimple_assign_nontemporal_move_p (
3694 				   assign_stmt));
3695 	  }
3696 	else
3697 	  {
3698 	    rtx target, temp;
3699 	    bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3700 	    struct separate_ops ops;
3701 	    bool promoted = false;
3702 
3703 	    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3704 	    if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3705 	      promoted = true;
3706 
3707 	    ops.code = gimple_assign_rhs_code (assign_stmt);
3708 	    ops.type = TREE_TYPE (lhs);
3709 	    switch (get_gimple_rhs_class (ops.code))
3710 	      {
3711 		case GIMPLE_TERNARY_RHS:
3712 		  ops.op2 = gimple_assign_rhs3 (assign_stmt);
3713 		  /* Fallthru */
3714 		case GIMPLE_BINARY_RHS:
3715 		  ops.op1 = gimple_assign_rhs2 (assign_stmt);
3716 		  /* Fallthru */
3717 		case GIMPLE_UNARY_RHS:
3718 		  ops.op0 = gimple_assign_rhs1 (assign_stmt);
3719 		  break;
3720 		default:
3721 		  gcc_unreachable ();
3722 	      }
3723 	    ops.location = gimple_location (stmt);
3724 
3725 	    /* If we want to use a nontemporal store, force the value to
3726 	       register first.  If we store into a promoted register,
3727 	       don't directly expand to target.  */
3728 	    temp = nontemporal || promoted ? NULL_RTX : target;
3729 	    temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3730 				       EXPAND_NORMAL);
3731 
3732 	    if (temp == target)
3733 	      ;
3734 	    else if (promoted)
3735 	      {
3736 		int unsignedp = SUBREG_PROMOTED_SIGN (target);
3737 		/* If TEMP is a VOIDmode constant, use convert_modes to make
3738 		   sure that we properly convert it.  */
3739 		if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3740 		  {
3741 		    temp = convert_modes (GET_MODE (target),
3742 					  TYPE_MODE (ops.type),
3743 					  temp, unsignedp);
3744 		    temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3745 					  GET_MODE (target), temp, unsignedp);
3746 		  }
3747 
3748 		convert_move (SUBREG_REG (target), temp, unsignedp);
3749 	      }
3750 	    else if (nontemporal && emit_storent_insn (target, temp))
3751 	      ;
3752 	    else
3753 	      {
3754 		temp = force_operand (temp, target);
3755 		if (temp != target)
3756 		  emit_move_insn (target, temp);
3757 	      }
3758 	  }
3759       }
3760       break;
3761 
3762     default:
3763       gcc_unreachable ();
3764     }
3765 }
3766 
3767 /* Expand one gimple statement STMT and return the last RTL instruction
3768    before any of the newly generated ones.
3769 
3770    In addition to generating the necessary RTL instructions this also
3771    sets REG_EH_REGION notes if necessary and sets the current source
3772    location for diagnostics.  */
3773 
3774 static rtx_insn *
3775 expand_gimple_stmt (gimple *stmt)
3776 {
3777   location_t saved_location = input_location;
3778   rtx_insn *last = get_last_insn ();
3779   int lp_nr;
3780 
3781   gcc_assert (cfun);
3782 
3783   /* We need to save and restore the current source location so that errors
3784      discovered during expansion are emitted with the right location.  But
3785      it would be better if the diagnostic routines used the source location
3786      embedded in the tree nodes rather than globals.  */
3787   if (gimple_has_location (stmt))
3788     input_location = gimple_location (stmt);
3789 
3790   expand_gimple_stmt_1 (stmt);
3791 
3792   /* Free any temporaries used to evaluate this statement.  */
3793   free_temp_slots ();
3794 
3795   input_location = saved_location;
3796 
3797   /* Mark all insns that may trap.  */
3798   lp_nr = lookup_stmt_eh_lp (stmt);
3799   if (lp_nr)
3800     {
3801       rtx_insn *insn;
3802       for (insn = next_real_insn (last); insn;
3803 	   insn = next_real_insn (insn))
3804 	{
3805 	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3806 	      /* If we want exceptions for non-call insns, any
3807 		 may_trap_p instruction may throw.  */
3808 	      && GET_CODE (PATTERN (insn)) != CLOBBER
3809 	      && GET_CODE (PATTERN (insn)) != USE
3810 	      && insn_could_throw_p (insn))
3811 	    make_reg_eh_region_note (insn, 0, lp_nr);
3812 	}
3813     }
3814 
3815   return last;
3816 }
3817 
3818 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
3819    that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
3820    generated a tail call (something that might be denied by the ABI
3821    rules governing the call; see calls.c).
3822 
3823    Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3824    can still reach the rest of BB.  The case here is __builtin_sqrt,
3825    where the NaN result goes through the external function (with a
3826    tailcall) and the normal result happens via a sqrt instruction.  */
3827 
3828 static basic_block
3829 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3830 {
3831   rtx_insn *last2, *last;
3832   edge e;
3833   edge_iterator ei;
3834   profile_probability probability;
3835 
3836   last2 = last = expand_gimple_stmt (stmt);
3837 
3838   for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3839     if (CALL_P (last) && SIBLING_CALL_P (last))
3840       goto found;
3841 
3842   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3843 
3844   *can_fallthru = true;
3845   return NULL;
3846 
3847  found:
3848   /* ??? Wouldn't it be better to just reset any pending stack adjust?
3849      Any instructions emitted here are about to be deleted.  */
3850   do_pending_stack_adjust ();
3851 
3852   /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
3853   /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
3854      EH or abnormal edges, we shouldn't have created a tail call in
3855      the first place.  So it seems to me we should just be removing
3856      all edges here, or redirecting the existing fallthru edge to
3857      the exit block.  */
3858 
3859   probability = profile_probability::never ();
3860 
3861   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3862     {
3863       if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3864 	{
3865 	  if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3866 	    e->dest->count -= e->count ();
3867 	  probability += e->probability;
3868 	  remove_edge (e);
3869 	}
3870       else
3871 	ei_next (&ei);
3872     }
3873 
3874   /* This is somewhat ugly: the call_expr expander often emits instructions
3875      after the sibcall (to perform the function return).  These confuse the
3876      find_many_sub_basic_blocks code, so we need to get rid of these.  */
3877   last = NEXT_INSN (last);
3878   gcc_assert (BARRIER_P (last));
3879 
3880   *can_fallthru = false;
3881   while (NEXT_INSN (last))
3882     {
3883       /* For instance an sqrt builtin expander expands if with
3884 	 sibcall in the then and label for `else`.  */
3885       if (LABEL_P (NEXT_INSN (last)))
3886 	{
3887 	  *can_fallthru = true;
3888 	  break;
3889 	}
3890       delete_insn (NEXT_INSN (last));
3891     }
3892 
3893   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3894 		 | EDGE_SIBCALL);
3895   e->probability = probability;
3896   BB_END (bb) = last;
3897   update_bb_for_insn (bb);
3898 
3899   if (NEXT_INSN (last))
3900     {
3901       bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3902 
3903       last = BB_END (bb);
3904       if (BARRIER_P (last))
3905 	BB_END (bb) = PREV_INSN (last);
3906     }
3907 
3908   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3909 
3910   return bb;
3911 }
3912 
3913 /* Return the difference between the floor and the truncated result of
3914    a signed division by OP1 with remainder MOD.  */
3915 static rtx
3916 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3917 {
3918   /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3919   return gen_rtx_IF_THEN_ELSE
3920     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3921      gen_rtx_IF_THEN_ELSE
3922      (mode, gen_rtx_LT (BImode,
3923 			gen_rtx_DIV (mode, op1, mod),
3924 			const0_rtx),
3925       constm1_rtx, const0_rtx),
3926      const0_rtx);
3927 }
3928 
3929 /* Return the difference between the ceil and the truncated result of
3930    a signed division by OP1 with remainder MOD.  */
3931 static rtx
3932 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3933 {
3934   /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3935   return gen_rtx_IF_THEN_ELSE
3936     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3937      gen_rtx_IF_THEN_ELSE
3938      (mode, gen_rtx_GT (BImode,
3939 			gen_rtx_DIV (mode, op1, mod),
3940 			const0_rtx),
3941       const1_rtx, const0_rtx),
3942      const0_rtx);
3943 }
3944 
3945 /* Return the difference between the ceil and the truncated result of
3946    an unsigned division by OP1 with remainder MOD.  */
3947 static rtx
3948 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3949 {
3950   /* (mod != 0 ? 1 : 0) */
3951   return gen_rtx_IF_THEN_ELSE
3952     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3953      const1_rtx, const0_rtx);
3954 }
3955 
3956 /* Return the difference between the rounded and the truncated result
3957    of a signed division by OP1 with remainder MOD.  Halfway cases are
3958    rounded away from zero, rather than to the nearest even number.  */
3959 static rtx
3960 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3961 {
3962   /* (abs (mod) >= abs (op1) - abs (mod)
3963       ? (op1 / mod > 0 ? 1 : -1)
3964       : 0) */
3965   return gen_rtx_IF_THEN_ELSE
3966     (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3967 		       gen_rtx_MINUS (mode,
3968 				      gen_rtx_ABS (mode, op1),
3969 				      gen_rtx_ABS (mode, mod))),
3970      gen_rtx_IF_THEN_ELSE
3971      (mode, gen_rtx_GT (BImode,
3972 			gen_rtx_DIV (mode, op1, mod),
3973 			const0_rtx),
3974       const1_rtx, constm1_rtx),
3975      const0_rtx);
3976 }
3977 
3978 /* Return the difference between the rounded and the truncated result
3979    of a unsigned division by OP1 with remainder MOD.  Halfway cases
3980    are rounded away from zero, rather than to the nearest even
3981    number.  */
3982 static rtx
3983 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3984 {
3985   /* (mod >= op1 - mod ? 1 : 0) */
3986   return gen_rtx_IF_THEN_ELSE
3987     (mode, gen_rtx_GE (BImode, mod,
3988 		       gen_rtx_MINUS (mode, op1, mod)),
3989      const1_rtx, const0_rtx);
3990 }
3991 
3992 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3993    any rtl.  */
3994 
3995 static rtx
3996 convert_debug_memory_address (scalar_int_mode mode, rtx x,
3997 			      addr_space_t as)
3998 {
3999 #ifndef POINTERS_EXTEND_UNSIGNED
4000   gcc_assert (mode == Pmode
4001 	      || mode == targetm.addr_space.address_mode (as));
4002   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
4003 #else
4004   rtx temp;
4005 
4006   gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
4007 
4008   if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4009     return x;
4010 
4011   /* X must have some form of address mode already.  */
4012   scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4013   if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4014     x = lowpart_subreg (mode, x, xmode);
4015   else if (POINTERS_EXTEND_UNSIGNED > 0)
4016     x = gen_rtx_ZERO_EXTEND (mode, x);
4017   else if (!POINTERS_EXTEND_UNSIGNED)
4018     x = gen_rtx_SIGN_EXTEND (mode, x);
4019   else
4020     {
4021       switch (GET_CODE (x))
4022 	{
4023 	case SUBREG:
4024 	  if ((SUBREG_PROMOTED_VAR_P (x)
4025 	       || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4026 	       || (GET_CODE (SUBREG_REG (x)) == PLUS
4027 		   && REG_P (XEXP (SUBREG_REG (x), 0))
4028 		   && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4029 		   && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4030 	      && GET_MODE (SUBREG_REG (x)) == mode)
4031 	    return SUBREG_REG (x);
4032 	  break;
4033 	case LABEL_REF:
4034 	  temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4035 	  LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4036 	  return temp;
4037 	case SYMBOL_REF:
4038 	  temp = shallow_copy_rtx (x);
4039 	  PUT_MODE (temp, mode);
4040 	  return temp;
4041 	case CONST:
4042 	  temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4043 	  if (temp)
4044 	    temp = gen_rtx_CONST (mode, temp);
4045 	  return temp;
4046 	case PLUS:
4047 	case MINUS:
4048 	  if (CONST_INT_P (XEXP (x, 1)))
4049 	    {
4050 	      temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4051 	      if (temp)
4052 		return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4053 	    }
4054 	  break;
4055 	default:
4056 	  break;
4057 	}
4058       /* Don't know how to express ptr_extend as operation in debug info.  */
4059       return NULL;
4060     }
4061 #endif /* POINTERS_EXTEND_UNSIGNED */
4062 
4063   return x;
4064 }
4065 
4066 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4067    by avoid_deep_ter_for_debug.  */
4068 
4069 static hash_map<tree, tree> *deep_ter_debug_map;
4070 
4071 /* Split too deep TER chains for debug stmts using debug temporaries.  */
4072 
4073 static void
4074 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4075 {
4076   use_operand_p use_p;
4077   ssa_op_iter iter;
4078   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4079     {
4080       tree use = USE_FROM_PTR (use_p);
4081       if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4082 	continue;
4083       gimple *g = get_gimple_for_ssa_name (use);
4084       if (g == NULL)
4085 	continue;
4086       if (depth > 6 && !stmt_ends_bb_p (g))
4087 	{
4088 	  if (deep_ter_debug_map == NULL)
4089 	    deep_ter_debug_map = new hash_map<tree, tree>;
4090 
4091 	  tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4092 	  if (vexpr != NULL)
4093 	    continue;
4094 	  vexpr = make_node (DEBUG_EXPR_DECL);
4095 	  gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4096 	  DECL_ARTIFICIAL (vexpr) = 1;
4097 	  TREE_TYPE (vexpr) = TREE_TYPE (use);
4098 	  SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4099 	  gimple_stmt_iterator gsi = gsi_for_stmt (g);
4100 	  gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4101 	  avoid_deep_ter_for_debug (def_temp, 0);
4102 	}
4103       else
4104 	avoid_deep_ter_for_debug (g, depth + 1);
4105     }
4106 }
4107 
4108 /* Return an RTX equivalent to the value of the parameter DECL.  */
4109 
4110 static rtx
4111 expand_debug_parm_decl (tree decl)
4112 {
4113   rtx incoming = DECL_INCOMING_RTL (decl);
4114 
4115   if (incoming
4116       && GET_MODE (incoming) != BLKmode
4117       && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4118 	  || (MEM_P (incoming)
4119 	      && REG_P (XEXP (incoming, 0))
4120 	      && HARD_REGISTER_P (XEXP (incoming, 0)))))
4121     {
4122       rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4123 
4124 #ifdef HAVE_window_save
4125       /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4126 	 If the target machine has an explicit window save instruction, the
4127 	 actual entry value is the corresponding OUTGOING_REGNO instead.  */
4128       if (REG_P (incoming)
4129 	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4130 	incoming
4131 	  = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4132 				OUTGOING_REGNO (REGNO (incoming)), 0);
4133       else if (MEM_P (incoming))
4134 	{
4135 	  rtx reg = XEXP (incoming, 0);
4136 	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4137 	    {
4138 	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4139 	      incoming = replace_equiv_address_nv (incoming, reg);
4140 	    }
4141 	  else
4142 	    incoming = copy_rtx (incoming);
4143 	}
4144 #endif
4145 
4146       ENTRY_VALUE_EXP (rtl) = incoming;
4147       return rtl;
4148     }
4149 
4150   if (incoming
4151       && GET_MODE (incoming) != BLKmode
4152       && !TREE_ADDRESSABLE (decl)
4153       && MEM_P (incoming)
4154       && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4155 	  || (GET_CODE (XEXP (incoming, 0)) == PLUS
4156 	      && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4157 	      && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4158     return copy_rtx (incoming);
4159 
4160   return NULL_RTX;
4161 }
4162 
4163 /* Return an RTX equivalent to the value of the tree expression EXP.  */
4164 
4165 static rtx
4166 expand_debug_expr (tree exp)
4167 {
4168   rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4169   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4170   machine_mode inner_mode = VOIDmode;
4171   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4172   addr_space_t as;
4173   scalar_int_mode op0_mode, op1_mode, addr_mode;
4174 
4175   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4176     {
4177     case tcc_expression:
4178       switch (TREE_CODE (exp))
4179 	{
4180 	case COND_EXPR:
4181 	case DOT_PROD_EXPR:
4182 	case SAD_EXPR:
4183 	case WIDEN_MULT_PLUS_EXPR:
4184 	case WIDEN_MULT_MINUS_EXPR:
4185 	case FMA_EXPR:
4186 	  goto ternary;
4187 
4188 	case TRUTH_ANDIF_EXPR:
4189 	case TRUTH_ORIF_EXPR:
4190 	case TRUTH_AND_EXPR:
4191 	case TRUTH_OR_EXPR:
4192 	case TRUTH_XOR_EXPR:
4193 	  goto binary;
4194 
4195 	case TRUTH_NOT_EXPR:
4196 	  goto unary;
4197 
4198 	default:
4199 	  break;
4200 	}
4201       break;
4202 
4203     ternary:
4204       op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4205       if (!op2)
4206 	return NULL_RTX;
4207       /* Fall through.  */
4208 
4209     binary:
4210     case tcc_binary:
4211       if (mode == BLKmode)
4212 	return NULL_RTX;
4213       op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4214       if (!op1)
4215 	return NULL_RTX;
4216       switch (TREE_CODE (exp))
4217 	{
4218 	case LSHIFT_EXPR:
4219 	case RSHIFT_EXPR:
4220 	case LROTATE_EXPR:
4221 	case RROTATE_EXPR:
4222 	case WIDEN_LSHIFT_EXPR:
4223 	  /* Ensure second operand isn't wider than the first one.  */
4224 	  inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4225 	  if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4226 	      && (GET_MODE_UNIT_PRECISION (mode)
4227 		  < GET_MODE_PRECISION (op1_mode)))
4228 	    op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4229 	  break;
4230 	default:
4231 	  break;
4232 	}
4233       /* Fall through.  */
4234 
4235     unary:
4236     case tcc_unary:
4237       if (mode == BLKmode)
4238 	return NULL_RTX;
4239       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4240       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4241       if (!op0)
4242 	return NULL_RTX;
4243       break;
4244 
4245     case tcc_comparison:
4246       unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4247       goto binary;
4248 
4249     case tcc_type:
4250     case tcc_statement:
4251       gcc_unreachable ();
4252 
4253     case tcc_constant:
4254     case tcc_exceptional:
4255     case tcc_declaration:
4256     case tcc_reference:
4257     case tcc_vl_exp:
4258       break;
4259     }
4260 
4261   switch (TREE_CODE (exp))
4262     {
4263     case STRING_CST:
4264       if (!lookup_constant_def (exp))
4265 	{
4266 	  if (strlen (TREE_STRING_POINTER (exp)) + 1
4267 	      != (size_t) TREE_STRING_LENGTH (exp))
4268 	    return NULL_RTX;
4269 	  op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4270 	  op0 = gen_rtx_MEM (BLKmode, op0);
4271 	  set_mem_attributes (op0, exp, 0);
4272 	  return op0;
4273 	}
4274       /* Fall through.  */
4275 
4276     case INTEGER_CST:
4277     case REAL_CST:
4278     case FIXED_CST:
4279       op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4280       return op0;
4281 
4282     case POLY_INT_CST:
4283       return immed_wide_int_const (poly_int_cst_value (exp), mode);
4284 
4285     case COMPLEX_CST:
4286       gcc_assert (COMPLEX_MODE_P (mode));
4287       op0 = expand_debug_expr (TREE_REALPART (exp));
4288       op1 = expand_debug_expr (TREE_IMAGPART (exp));
4289       return gen_rtx_CONCAT (mode, op0, op1);
4290 
4291     case DEBUG_EXPR_DECL:
4292       op0 = DECL_RTL_IF_SET (exp);
4293 
4294       if (op0)
4295 	return op0;
4296 
4297       op0 = gen_rtx_DEBUG_EXPR (mode);
4298       DEBUG_EXPR_TREE_DECL (op0) = exp;
4299       SET_DECL_RTL (exp, op0);
4300 
4301       return op0;
4302 
4303     case VAR_DECL:
4304     case PARM_DECL:
4305     case FUNCTION_DECL:
4306     case LABEL_DECL:
4307     case CONST_DECL:
4308     case RESULT_DECL:
4309       op0 = DECL_RTL_IF_SET (exp);
4310 
4311       /* This decl was probably optimized away.  */
4312       if (!op0)
4313 	{
4314 	  if (!VAR_P (exp)
4315 	      || DECL_EXTERNAL (exp)
4316 	      || !TREE_STATIC (exp)
4317 	      || !DECL_NAME (exp)
4318 	      || DECL_HARD_REGISTER (exp)
4319 	      || DECL_IN_CONSTANT_POOL (exp)
4320 	      || mode == VOIDmode)
4321 	    return NULL;
4322 
4323 	  op0 = make_decl_rtl_for_debug (exp);
4324 	  if (!MEM_P (op0)
4325 	      || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4326 	      || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4327 	    return NULL;
4328 	}
4329       else
4330 	op0 = copy_rtx (op0);
4331 
4332       if (GET_MODE (op0) == BLKmode
4333 	  /* If op0 is not BLKmode, but mode is, adjust_mode
4334 	     below would ICE.  While it is likely a FE bug,
4335 	     try to be robust here.  See PR43166.  */
4336 	  || mode == BLKmode
4337 	  || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4338 	{
4339 	  gcc_assert (MEM_P (op0));
4340 	  op0 = adjust_address_nv (op0, mode, 0);
4341 	  return op0;
4342 	}
4343 
4344       /* Fall through.  */
4345 
4346     adjust_mode:
4347     case PAREN_EXPR:
4348     CASE_CONVERT:
4349       {
4350 	inner_mode = GET_MODE (op0);
4351 
4352 	if (mode == inner_mode)
4353 	  return op0;
4354 
4355 	if (inner_mode == VOIDmode)
4356 	  {
4357 	    if (TREE_CODE (exp) == SSA_NAME)
4358 	      inner_mode = TYPE_MODE (TREE_TYPE (exp));
4359 	    else
4360 	      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4361 	    if (mode == inner_mode)
4362 	      return op0;
4363 	  }
4364 
4365 	if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4366 	  {
4367 	    if (GET_MODE_UNIT_BITSIZE (mode)
4368 		== GET_MODE_UNIT_BITSIZE (inner_mode))
4369 	      op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4370 	    else if (GET_MODE_UNIT_BITSIZE (mode)
4371 		     < GET_MODE_UNIT_BITSIZE (inner_mode))
4372 	      op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4373 	    else
4374 	      op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4375 	  }
4376 	else if (FLOAT_MODE_P (mode))
4377 	  {
4378 	    gcc_assert (TREE_CODE (exp) != SSA_NAME);
4379 	    if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4380 	      op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4381 	    else
4382 	      op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4383 	  }
4384 	else if (FLOAT_MODE_P (inner_mode))
4385 	  {
4386 	    if (unsignedp)
4387 	      op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4388 	    else
4389 	      op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4390 	  }
4391 	else if (GET_MODE_UNIT_PRECISION (mode)
4392 		 == GET_MODE_UNIT_PRECISION (inner_mode))
4393 	  op0 = lowpart_subreg (mode, op0, inner_mode);
4394 	else if (GET_MODE_UNIT_PRECISION (mode)
4395 		 < GET_MODE_UNIT_PRECISION (inner_mode))
4396 	  op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4397 	else if (UNARY_CLASS_P (exp)
4398 		 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4399 		 : unsignedp)
4400 	  op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4401 	else
4402 	  op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4403 
4404 	return op0;
4405       }
4406 
4407     case MEM_REF:
4408       if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4409 	{
4410 	  tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4411 				     TREE_OPERAND (exp, 0),
4412 				     TREE_OPERAND (exp, 1));
4413 	  if (newexp)
4414 	    return expand_debug_expr (newexp);
4415 	}
4416       /* FALLTHROUGH */
4417     case INDIRECT_REF:
4418       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4419       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4420       if (!op0)
4421 	return NULL;
4422 
4423       if (TREE_CODE (exp) == MEM_REF)
4424 	{
4425 	  if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4426 	      || (GET_CODE (op0) == PLUS
4427 		  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4428 	    /* (mem (debug_implicit_ptr)) might confuse aliasing.
4429 	       Instead just use get_inner_reference.  */
4430 	    goto component_ref;
4431 
4432 	  op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4433 	  if (!op1 || !CONST_INT_P (op1))
4434 	    return NULL;
4435 
4436 	  op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4437 	}
4438 
4439       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4440 
4441       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4442 					  op0, as);
4443       if (op0 == NULL_RTX)
4444 	return NULL;
4445 
4446       op0 = gen_rtx_MEM (mode, op0);
4447       set_mem_attributes (op0, exp, 0);
4448       if (TREE_CODE (exp) == MEM_REF
4449 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4450 	set_mem_expr (op0, NULL_TREE);
4451       set_mem_addr_space (op0, as);
4452 
4453       return op0;
4454 
4455     case TARGET_MEM_REF:
4456       if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4457 	  && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4458 	return NULL;
4459 
4460       op0 = expand_debug_expr
4461 	    (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4462       if (!op0)
4463 	return NULL;
4464 
4465       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4466       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4467 					  op0, as);
4468       if (op0 == NULL_RTX)
4469 	return NULL;
4470 
4471       op0 = gen_rtx_MEM (mode, op0);
4472 
4473       set_mem_attributes (op0, exp, 0);
4474       set_mem_addr_space (op0, as);
4475 
4476       return op0;
4477 
4478     component_ref:
4479     case ARRAY_REF:
4480     case ARRAY_RANGE_REF:
4481     case COMPONENT_REF:
4482     case BIT_FIELD_REF:
4483     case REALPART_EXPR:
4484     case IMAGPART_EXPR:
4485     case VIEW_CONVERT_EXPR:
4486       {
4487 	machine_mode mode1;
4488 	poly_int64 bitsize, bitpos;
4489 	tree offset;
4490 	int reversep, volatilep = 0;
4491 	tree tem
4492 	  = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4493 				 &unsignedp, &reversep, &volatilep);
4494 	rtx orig_op0;
4495 
4496 	if (known_eq (bitsize, 0))
4497 	  return NULL;
4498 
4499 	orig_op0 = op0 = expand_debug_expr (tem);
4500 
4501 	if (!op0)
4502 	  return NULL;
4503 
4504 	if (offset)
4505 	  {
4506 	    machine_mode addrmode, offmode;
4507 
4508 	    if (!MEM_P (op0))
4509 	      return NULL;
4510 
4511 	    op0 = XEXP (op0, 0);
4512 	    addrmode = GET_MODE (op0);
4513 	    if (addrmode == VOIDmode)
4514 	      addrmode = Pmode;
4515 
4516 	    op1 = expand_debug_expr (offset);
4517 	    if (!op1)
4518 	      return NULL;
4519 
4520 	    offmode = GET_MODE (op1);
4521 	    if (offmode == VOIDmode)
4522 	      offmode = TYPE_MODE (TREE_TYPE (offset));
4523 
4524 	    if (addrmode != offmode)
4525 	      op1 = lowpart_subreg (addrmode, op1, offmode);
4526 
4527 	    /* Don't use offset_address here, we don't need a
4528 	       recognizable address, and we don't want to generate
4529 	       code.  */
4530 	    op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4531 							  op0, op1));
4532 	  }
4533 
4534 	if (MEM_P (op0))
4535 	  {
4536 	    if (mode1 == VOIDmode)
4537 	      {
4538 		if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4539 		  return NULL;
4540 		/* Bitfield.  */
4541 		mode1 = smallest_int_mode_for_size (bitsize);
4542 	      }
4543 	    poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4544 	    if (maybe_ne (bytepos, 0))
4545 	      {
4546 		op0 = adjust_address_nv (op0, mode1, bytepos);
4547 		bitpos = num_trailing_bits (bitpos);
4548 	      }
4549 	    else if (known_eq (bitpos, 0)
4550 		     && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4551 	      op0 = adjust_address_nv (op0, mode, 0);
4552 	    else if (GET_MODE (op0) != mode1)
4553 	      op0 = adjust_address_nv (op0, mode1, 0);
4554 	    else
4555 	      op0 = copy_rtx (op0);
4556 	    if (op0 == orig_op0)
4557 	      op0 = shallow_copy_rtx (op0);
4558 	    set_mem_attributes (op0, exp, 0);
4559 	  }
4560 
4561 	if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4562 	  return op0;
4563 
4564 	if (maybe_lt (bitpos, 0))
4565           return NULL;
4566 
4567 	if (GET_MODE (op0) == BLKmode || mode == BLKmode)
4568 	  return NULL;
4569 
4570 	poly_int64 bytepos;
4571 	if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4572 	    && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4573 	  {
4574 	    machine_mode opmode = GET_MODE (op0);
4575 
4576 	    if (opmode == VOIDmode)
4577 	      opmode = TYPE_MODE (TREE_TYPE (tem));
4578 
4579 	    /* This condition may hold if we're expanding the address
4580 	       right past the end of an array that turned out not to
4581 	       be addressable (i.e., the address was only computed in
4582 	       debug stmts).  The gen_subreg below would rightfully
4583 	       crash, and the address doesn't really exist, so just
4584 	       drop it.  */
4585 	    if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4586 	      return NULL;
4587 
4588 	    if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4589 	      return simplify_gen_subreg (mode, op0, opmode, bytepos);
4590 	  }
4591 
4592 	return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4593 				     && TYPE_UNSIGNED (TREE_TYPE (exp))
4594 				     ? SIGN_EXTRACT
4595 				     : ZERO_EXTRACT, mode,
4596 				     GET_MODE (op0) != VOIDmode
4597 				     ? GET_MODE (op0)
4598 				     : TYPE_MODE (TREE_TYPE (tem)),
4599 				     op0, gen_int_mode (bitsize, word_mode),
4600 				     gen_int_mode (bitpos, word_mode));
4601       }
4602 
4603     case ABS_EXPR:
4604       return simplify_gen_unary (ABS, mode, op0, mode);
4605 
4606     case NEGATE_EXPR:
4607       return simplify_gen_unary (NEG, mode, op0, mode);
4608 
4609     case BIT_NOT_EXPR:
4610       return simplify_gen_unary (NOT, mode, op0, mode);
4611 
4612     case FLOAT_EXPR:
4613       return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4614 									 0)))
4615 				 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4616 				 inner_mode);
4617 
4618     case FIX_TRUNC_EXPR:
4619       return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4620 				 inner_mode);
4621 
4622     case POINTER_PLUS_EXPR:
4623       /* For the rare target where pointers are not the same size as
4624 	 size_t, we need to check for mis-matched modes and correct
4625 	 the addend.  */
4626       if (op0 && op1
4627 	  && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4628 	  && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4629 	  && op0_mode != op1_mode)
4630 	{
4631 	  if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4632 	      /* If OP0 is a partial mode, then we must truncate, even
4633 		 if it has the same bitsize as OP1 as GCC's
4634 		 representation of partial modes is opaque.  */
4635 	      || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4636 		  && (GET_MODE_BITSIZE (op0_mode)
4637 		      == GET_MODE_BITSIZE (op1_mode))))
4638 	    op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4639 	  else
4640 	    /* We always sign-extend, regardless of the signedness of
4641 	       the operand, because the operand is always unsigned
4642 	       here even if the original C expression is signed.  */
4643 	    op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4644 	}
4645       /* Fall through.  */
4646     case PLUS_EXPR:
4647       return simplify_gen_binary (PLUS, mode, op0, op1);
4648 
4649     case MINUS_EXPR:
4650     case POINTER_DIFF_EXPR:
4651       return simplify_gen_binary (MINUS, mode, op0, op1);
4652 
4653     case MULT_EXPR:
4654       return simplify_gen_binary (MULT, mode, op0, op1);
4655 
4656     case RDIV_EXPR:
4657     case TRUNC_DIV_EXPR:
4658     case EXACT_DIV_EXPR:
4659       if (unsignedp)
4660 	return simplify_gen_binary (UDIV, mode, op0, op1);
4661       else
4662 	return simplify_gen_binary (DIV, mode, op0, op1);
4663 
4664     case TRUNC_MOD_EXPR:
4665       return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4666 
4667     case FLOOR_DIV_EXPR:
4668       if (unsignedp)
4669 	return simplify_gen_binary (UDIV, mode, op0, op1);
4670       else
4671 	{
4672 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4673 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4674 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4675 	  return simplify_gen_binary (PLUS, mode, div, adj);
4676 	}
4677 
4678     case FLOOR_MOD_EXPR:
4679       if (unsignedp)
4680 	return simplify_gen_binary (UMOD, mode, op0, op1);
4681       else
4682 	{
4683 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4684 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4685 	  adj = simplify_gen_unary (NEG, mode,
4686 				    simplify_gen_binary (MULT, mode, adj, op1),
4687 				    mode);
4688 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4689 	}
4690 
4691     case CEIL_DIV_EXPR:
4692       if (unsignedp)
4693 	{
4694 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4695 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4696 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4697 	  return simplify_gen_binary (PLUS, mode, div, adj);
4698 	}
4699       else
4700 	{
4701 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4702 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4703 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4704 	  return simplify_gen_binary (PLUS, mode, div, adj);
4705 	}
4706 
4707     case CEIL_MOD_EXPR:
4708       if (unsignedp)
4709 	{
4710 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4711 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4712 	  adj = simplify_gen_unary (NEG, mode,
4713 				    simplify_gen_binary (MULT, mode, adj, op1),
4714 				    mode);
4715 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4716 	}
4717       else
4718 	{
4719 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4720 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4721 	  adj = simplify_gen_unary (NEG, mode,
4722 				    simplify_gen_binary (MULT, mode, adj, op1),
4723 				    mode);
4724 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4725 	}
4726 
4727     case ROUND_DIV_EXPR:
4728       if (unsignedp)
4729 	{
4730 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4731 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4732 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4733 	  return simplify_gen_binary (PLUS, mode, div, adj);
4734 	}
4735       else
4736 	{
4737 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4738 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4739 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4740 	  return simplify_gen_binary (PLUS, mode, div, adj);
4741 	}
4742 
4743     case ROUND_MOD_EXPR:
4744       if (unsignedp)
4745 	{
4746 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4747 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4748 	  adj = simplify_gen_unary (NEG, mode,
4749 				    simplify_gen_binary (MULT, mode, adj, op1),
4750 				    mode);
4751 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4752 	}
4753       else
4754 	{
4755 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4756 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4757 	  adj = simplify_gen_unary (NEG, mode,
4758 				    simplify_gen_binary (MULT, mode, adj, op1),
4759 				    mode);
4760 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4761 	}
4762 
4763     case LSHIFT_EXPR:
4764       return simplify_gen_binary (ASHIFT, mode, op0, op1);
4765 
4766     case RSHIFT_EXPR:
4767       if (unsignedp)
4768 	return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4769       else
4770 	return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4771 
4772     case LROTATE_EXPR:
4773       return simplify_gen_binary (ROTATE, mode, op0, op1);
4774 
4775     case RROTATE_EXPR:
4776       return simplify_gen_binary (ROTATERT, mode, op0, op1);
4777 
4778     case MIN_EXPR:
4779       return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4780 
4781     case MAX_EXPR:
4782       return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4783 
4784     case BIT_AND_EXPR:
4785     case TRUTH_AND_EXPR:
4786       return simplify_gen_binary (AND, mode, op0, op1);
4787 
4788     case BIT_IOR_EXPR:
4789     case TRUTH_OR_EXPR:
4790       return simplify_gen_binary (IOR, mode, op0, op1);
4791 
4792     case BIT_XOR_EXPR:
4793     case TRUTH_XOR_EXPR:
4794       return simplify_gen_binary (XOR, mode, op0, op1);
4795 
4796     case TRUTH_ANDIF_EXPR:
4797       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4798 
4799     case TRUTH_ORIF_EXPR:
4800       return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4801 
4802     case TRUTH_NOT_EXPR:
4803       return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4804 
4805     case LT_EXPR:
4806       return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4807 				      op0, op1);
4808 
4809     case LE_EXPR:
4810       return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4811 				      op0, op1);
4812 
4813     case GT_EXPR:
4814       return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4815 				      op0, op1);
4816 
4817     case GE_EXPR:
4818       return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4819 				      op0, op1);
4820 
4821     case EQ_EXPR:
4822       return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4823 
4824     case NE_EXPR:
4825       return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4826 
4827     case UNORDERED_EXPR:
4828       return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4829 
4830     case ORDERED_EXPR:
4831       return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4832 
4833     case UNLT_EXPR:
4834       return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4835 
4836     case UNLE_EXPR:
4837       return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4838 
4839     case UNGT_EXPR:
4840       return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4841 
4842     case UNGE_EXPR:
4843       return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4844 
4845     case UNEQ_EXPR:
4846       return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4847 
4848     case LTGT_EXPR:
4849       return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4850 
4851     case COND_EXPR:
4852       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4853 
4854     case COMPLEX_EXPR:
4855       gcc_assert (COMPLEX_MODE_P (mode));
4856       if (GET_MODE (op0) == VOIDmode)
4857 	op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4858       if (GET_MODE (op1) == VOIDmode)
4859 	op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4860       return gen_rtx_CONCAT (mode, op0, op1);
4861 
4862     case CONJ_EXPR:
4863       if (GET_CODE (op0) == CONCAT)
4864 	return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4865 			       simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4866 						   XEXP (op0, 1),
4867 						   GET_MODE_INNER (mode)));
4868       else
4869 	{
4870 	  scalar_mode imode = GET_MODE_INNER (mode);
4871 	  rtx re, im;
4872 
4873 	  if (MEM_P (op0))
4874 	    {
4875 	      re = adjust_address_nv (op0, imode, 0);
4876 	      im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4877 	    }
4878 	  else
4879 	    {
4880 	      scalar_int_mode ifmode;
4881 	      scalar_int_mode ihmode;
4882 	      rtx halfsize;
4883 	      if (!int_mode_for_mode (mode).exists (&ifmode)
4884 		  || !int_mode_for_mode (imode).exists (&ihmode))
4885 		return NULL;
4886 	      halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4887 	      re = op0;
4888 	      if (mode != ifmode)
4889 		re = gen_rtx_SUBREG (ifmode, re, 0);
4890 	      re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4891 	      if (imode != ihmode)
4892 		re = gen_rtx_SUBREG (imode, re, 0);
4893 	      im = copy_rtx (op0);
4894 	      if (mode != ifmode)
4895 		im = gen_rtx_SUBREG (ifmode, im, 0);
4896 	      im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4897 	      if (imode != ihmode)
4898 		im = gen_rtx_SUBREG (imode, im, 0);
4899 	    }
4900 	  im = gen_rtx_NEG (imode, im);
4901 	  return gen_rtx_CONCAT (mode, re, im);
4902 	}
4903 
4904     case ADDR_EXPR:
4905       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4906       if (!op0 || !MEM_P (op0))
4907 	{
4908 	  if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4909 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4910 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4911 	      && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4912 		  || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4913 	    return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4914 
4915 	  if (handled_component_p (TREE_OPERAND (exp, 0)))
4916 	    {
4917 	      poly_int64 bitoffset, bitsize, maxsize, byteoffset;
4918 	      bool reverse;
4919 	      tree decl
4920 		= get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4921 					   &bitsize, &maxsize, &reverse);
4922 	      if ((VAR_P (decl)
4923 		   || TREE_CODE (decl) == PARM_DECL
4924 		   || TREE_CODE (decl) == RESULT_DECL)
4925 		  && (!TREE_ADDRESSABLE (decl)
4926 		      || target_for_debug_bind (decl))
4927 		  && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
4928 		  && known_gt (bitsize, 0)
4929 		  && known_eq (bitsize, maxsize))
4930 		{
4931 		  rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4932 		  return plus_constant (mode, base, byteoffset);
4933 		}
4934 	    }
4935 
4936 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4937 	      && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4938 		 == ADDR_EXPR)
4939 	    {
4940 	      op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4941 						     0));
4942 	      if (op0 != NULL
4943 		  && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4944 		      || (GET_CODE (op0) == PLUS
4945 			  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4946 			  && CONST_INT_P (XEXP (op0, 1)))))
4947 		{
4948 		  op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4949 							 1));
4950 		  if (!op1 || !CONST_INT_P (op1))
4951 		    return NULL;
4952 
4953 		  return plus_constant (mode, op0, INTVAL (op1));
4954 		}
4955 	    }
4956 
4957 	  return NULL;
4958 	}
4959 
4960       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4961       addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
4962       op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
4963 
4964       return op0;
4965 
4966     case VECTOR_CST:
4967       {
4968 	unsigned HOST_WIDE_INT i, nelts;
4969 
4970 	if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
4971 	  return NULL;
4972 
4973 	op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
4974 
4975 	for (i = 0; i < nelts; ++i)
4976 	  {
4977 	    op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4978 	    if (!op1)
4979 	      return NULL;
4980 	    XVECEXP (op0, 0, i) = op1;
4981 	  }
4982 
4983 	return op0;
4984       }
4985 
4986     case CONSTRUCTOR:
4987       if (TREE_CLOBBER_P (exp))
4988 	return NULL;
4989       else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4990 	{
4991 	  unsigned i;
4992 	  unsigned HOST_WIDE_INT nelts;
4993 	  tree val;
4994 
4995 	  if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
4996 	    goto flag_unsupported;
4997 
4998 	  op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
4999 
5000 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
5001 	    {
5002 	      op1 = expand_debug_expr (val);
5003 	      if (!op1)
5004 		return NULL;
5005 	      XVECEXP (op0, 0, i) = op1;
5006 	    }
5007 
5008 	  if (i < nelts)
5009 	    {
5010 	      op1 = expand_debug_expr
5011 		(build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
5012 
5013 	      if (!op1)
5014 		return NULL;
5015 
5016 	      for (; i < nelts; i++)
5017 		XVECEXP (op0, 0, i) = op1;
5018 	    }
5019 
5020 	  return op0;
5021 	}
5022       else
5023 	goto flag_unsupported;
5024 
5025     case CALL_EXPR:
5026       /* ??? Maybe handle some builtins?  */
5027       return NULL;
5028 
5029     case SSA_NAME:
5030       {
5031 	gimple *g = get_gimple_for_ssa_name (exp);
5032 	if (g)
5033 	  {
5034 	    tree t = NULL_TREE;
5035 	    if (deep_ter_debug_map)
5036 	      {
5037 		tree *slot = deep_ter_debug_map->get (exp);
5038 		if (slot)
5039 		  t = *slot;
5040 	      }
5041 	    if (t == NULL_TREE)
5042 	      t = gimple_assign_rhs_to_tree (g);
5043 	    op0 = expand_debug_expr (t);
5044 	    if (!op0)
5045 	      return NULL;
5046 	  }
5047 	else
5048 	  {
5049 	    /* If this is a reference to an incoming value of
5050 	       parameter that is never used in the code or where the
5051 	       incoming value is never used in the code, use
5052 	       PARM_DECL's DECL_RTL if set.  */
5053 	    if (SSA_NAME_IS_DEFAULT_DEF (exp)
5054 		&& SSA_NAME_VAR (exp)
5055 		&& TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5056 		&& has_zero_uses (exp))
5057 	      {
5058 		op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5059 		if (op0)
5060 		  goto adjust_mode;
5061 		op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5062 		if (op0)
5063 		  goto adjust_mode;
5064 	      }
5065 
5066 	    int part = var_to_partition (SA.map, exp);
5067 
5068 	    if (part == NO_PARTITION)
5069 	      return NULL;
5070 
5071 	    gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5072 
5073 	    op0 = copy_rtx (SA.partition_to_pseudo[part]);
5074 	  }
5075 	goto adjust_mode;
5076       }
5077 
5078     case ERROR_MARK:
5079       return NULL;
5080 
5081     /* Vector stuff.  For most of the codes we don't have rtl codes.  */
5082     case REALIGN_LOAD_EXPR:
5083     case VEC_COND_EXPR:
5084     case VEC_PACK_FIX_TRUNC_EXPR:
5085     case VEC_PACK_SAT_EXPR:
5086     case VEC_PACK_TRUNC_EXPR:
5087     case VEC_UNPACK_FLOAT_HI_EXPR:
5088     case VEC_UNPACK_FLOAT_LO_EXPR:
5089     case VEC_UNPACK_HI_EXPR:
5090     case VEC_UNPACK_LO_EXPR:
5091     case VEC_WIDEN_MULT_HI_EXPR:
5092     case VEC_WIDEN_MULT_LO_EXPR:
5093     case VEC_WIDEN_MULT_EVEN_EXPR:
5094     case VEC_WIDEN_MULT_ODD_EXPR:
5095     case VEC_WIDEN_LSHIFT_HI_EXPR:
5096     case VEC_WIDEN_LSHIFT_LO_EXPR:
5097     case VEC_PERM_EXPR:
5098     case VEC_DUPLICATE_EXPR:
5099     case VEC_SERIES_EXPR:
5100       return NULL;
5101 
5102     /* Misc codes.  */
5103     case ADDR_SPACE_CONVERT_EXPR:
5104     case FIXED_CONVERT_EXPR:
5105     case OBJ_TYPE_REF:
5106     case WITH_SIZE_EXPR:
5107     case BIT_INSERT_EXPR:
5108       return NULL;
5109 
5110     case DOT_PROD_EXPR:
5111       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5112 	  && SCALAR_INT_MODE_P (mode))
5113 	{
5114 	  op0
5115 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5116 									  0)))
5117 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5118 				  inner_mode);
5119 	  op1
5120 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5121 									  1)))
5122 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5123 				  inner_mode);
5124 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5125 	  return simplify_gen_binary (PLUS, mode, op0, op2);
5126 	}
5127       return NULL;
5128 
5129     case WIDEN_MULT_EXPR:
5130     case WIDEN_MULT_PLUS_EXPR:
5131     case WIDEN_MULT_MINUS_EXPR:
5132       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5133 	  && SCALAR_INT_MODE_P (mode))
5134 	{
5135 	  inner_mode = GET_MODE (op0);
5136 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5137 	    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5138 	  else
5139 	    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5140 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5141 	    op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5142 	  else
5143 	    op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5144 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5145 	  if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5146 	    return op0;
5147 	  else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5148 	    return simplify_gen_binary (PLUS, mode, op0, op2);
5149 	  else
5150 	    return simplify_gen_binary (MINUS, mode, op2, op0);
5151 	}
5152       return NULL;
5153 
5154     case MULT_HIGHPART_EXPR:
5155       /* ??? Similar to the above.  */
5156       return NULL;
5157 
5158     case WIDEN_SUM_EXPR:
5159     case WIDEN_LSHIFT_EXPR:
5160       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5161 	  && SCALAR_INT_MODE_P (mode))
5162 	{
5163 	  op0
5164 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5165 									  0)))
5166 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5167 				  inner_mode);
5168 	  return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5169 				      ? ASHIFT : PLUS, mode, op0, op1);
5170 	}
5171       return NULL;
5172 
5173     case FMA_EXPR:
5174       return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
5175 
5176     default:
5177     flag_unsupported:
5178       if (flag_checking)
5179 	{
5180 	  debug_tree (exp);
5181 	  gcc_unreachable ();
5182 	}
5183       return NULL;
5184     }
5185 }
5186 
5187 /* Return an RTX equivalent to the source bind value of the tree expression
5188    EXP.  */
5189 
5190 static rtx
5191 expand_debug_source_expr (tree exp)
5192 {
5193   rtx op0 = NULL_RTX;
5194   machine_mode mode = VOIDmode, inner_mode;
5195 
5196   switch (TREE_CODE (exp))
5197     {
5198     case PARM_DECL:
5199       {
5200 	mode = DECL_MODE (exp);
5201 	op0 = expand_debug_parm_decl (exp);
5202 	if (op0)
5203 	   break;
5204 	/* See if this isn't an argument that has been completely
5205 	   optimized out.  */
5206 	if (!DECL_RTL_SET_P (exp)
5207 	    && !DECL_INCOMING_RTL (exp)
5208 	    && DECL_ABSTRACT_ORIGIN (current_function_decl))
5209 	  {
5210 	    tree aexp = DECL_ORIGIN (exp);
5211 	    if (DECL_CONTEXT (aexp)
5212 		== DECL_ABSTRACT_ORIGIN (current_function_decl))
5213 	      {
5214 		vec<tree, va_gc> **debug_args;
5215 		unsigned int ix;
5216 		tree ddecl;
5217 		debug_args = decl_debug_args_lookup (current_function_decl);
5218 		if (debug_args != NULL)
5219 		  {
5220 		    for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5221 			 ix += 2)
5222 		      if (ddecl == aexp)
5223 			return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5224 		  }
5225 	      }
5226 	  }
5227 	break;
5228       }
5229     default:
5230       break;
5231     }
5232 
5233   if (op0 == NULL_RTX)
5234     return NULL_RTX;
5235 
5236   inner_mode = GET_MODE (op0);
5237   if (mode == inner_mode)
5238     return op0;
5239 
5240   if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5241     {
5242       if (GET_MODE_UNIT_BITSIZE (mode)
5243 	  == GET_MODE_UNIT_BITSIZE (inner_mode))
5244 	op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5245       else if (GET_MODE_UNIT_BITSIZE (mode)
5246 	       < GET_MODE_UNIT_BITSIZE (inner_mode))
5247 	op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5248       else
5249 	op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5250     }
5251   else if (FLOAT_MODE_P (mode))
5252     gcc_unreachable ();
5253   else if (FLOAT_MODE_P (inner_mode))
5254     {
5255       if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5256 	op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5257       else
5258 	op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5259     }
5260   else if (GET_MODE_UNIT_PRECISION (mode)
5261 	   == GET_MODE_UNIT_PRECISION (inner_mode))
5262     op0 = lowpart_subreg (mode, op0, inner_mode);
5263   else if (GET_MODE_UNIT_PRECISION (mode)
5264 	   < GET_MODE_UNIT_PRECISION (inner_mode))
5265     op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5266   else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5267     op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5268   else
5269     op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5270 
5271   return op0;
5272 }
5273 
5274 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5275    Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5276    deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN.  */
5277 
5278 static void
5279 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5280 {
5281   rtx exp = *exp_p;
5282 
5283   if (exp == NULL_RTX)
5284     return;
5285 
5286   if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5287     return;
5288 
5289   if (depth == 4)
5290     {
5291       /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
5292       rtx dval = make_debug_expr_from_rtl (exp);
5293 
5294       /* Emit a debug bind insn before INSN.  */
5295       rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5296 				       DEBUG_EXPR_TREE_DECL (dval), exp,
5297 				       VAR_INIT_STATUS_INITIALIZED);
5298 
5299       emit_debug_insn_before (bind, insn);
5300       *exp_p = dval;
5301       return;
5302     }
5303 
5304   const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5305   int i, j;
5306   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5307     switch (*format_ptr++)
5308       {
5309       case 'e':
5310 	avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5311 	break;
5312 
5313       case 'E':
5314       case 'V':
5315 	for (j = 0; j < XVECLEN (exp, i); j++)
5316 	  avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5317 	break;
5318 
5319       default:
5320 	break;
5321       }
5322 }
5323 
5324 /* Expand the _LOCs in debug insns.  We run this after expanding all
5325    regular insns, so that any variables referenced in the function
5326    will have their DECL_RTLs set.  */
5327 
5328 static void
5329 expand_debug_locations (void)
5330 {
5331   rtx_insn *insn;
5332   rtx_insn *last = get_last_insn ();
5333   int save_strict_alias = flag_strict_aliasing;
5334 
5335   /* New alias sets while setting up memory attributes cause
5336      -fcompare-debug failures, even though it doesn't bring about any
5337      codegen changes.  */
5338   flag_strict_aliasing = 0;
5339 
5340   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5341     if (DEBUG_BIND_INSN_P (insn))
5342       {
5343 	tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5344 	rtx val;
5345 	rtx_insn *prev_insn, *insn2;
5346 	machine_mode mode;
5347 
5348 	if (value == NULL_TREE)
5349 	  val = NULL_RTX;
5350 	else
5351 	  {
5352 	    if (INSN_VAR_LOCATION_STATUS (insn)
5353 		== VAR_INIT_STATUS_UNINITIALIZED)
5354 	      val = expand_debug_source_expr (value);
5355 	    /* The avoid_deep_ter_for_debug function inserts
5356 	       debug bind stmts after SSA_NAME definition, with the
5357 	       SSA_NAME as the whole bind location.  Disable temporarily
5358 	       expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5359 	       being defined in this DEBUG_INSN.  */
5360 	    else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5361 	      {
5362 		tree *slot = deep_ter_debug_map->get (value);
5363 		if (slot)
5364 		  {
5365 		    if (*slot == INSN_VAR_LOCATION_DECL (insn))
5366 		      *slot = NULL_TREE;
5367 		    else
5368 		      slot = NULL;
5369 		  }
5370 		val = expand_debug_expr (value);
5371 		if (slot)
5372 		  *slot = INSN_VAR_LOCATION_DECL (insn);
5373 	      }
5374 	    else
5375 	      val = expand_debug_expr (value);
5376 	    gcc_assert (last == get_last_insn ());
5377 	  }
5378 
5379 	if (!val)
5380 	  val = gen_rtx_UNKNOWN_VAR_LOC ();
5381 	else
5382 	  {
5383 	    mode = GET_MODE (INSN_VAR_LOCATION (insn));
5384 
5385 	    gcc_assert (mode == GET_MODE (val)
5386 			|| (GET_MODE (val) == VOIDmode
5387 			    && (CONST_SCALAR_INT_P (val)
5388 				|| GET_CODE (val) == CONST_FIXED
5389 				|| GET_CODE (val) == LABEL_REF)));
5390 	  }
5391 
5392 	INSN_VAR_LOCATION_LOC (insn) = val;
5393 	prev_insn = PREV_INSN (insn);
5394 	for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5395 	  avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5396       }
5397 
5398   flag_strict_aliasing = save_strict_alias;
5399 }
5400 
5401 /* Performs swapping operands of commutative operations to expand
5402    the expensive one first.  */
5403 
5404 static void
5405 reorder_operands (basic_block bb)
5406 {
5407   unsigned int *lattice;  /* Hold cost of each statement.  */
5408   unsigned int i = 0, n = 0;
5409   gimple_stmt_iterator gsi;
5410   gimple_seq stmts;
5411   gimple *stmt;
5412   bool swap;
5413   tree op0, op1;
5414   ssa_op_iter iter;
5415   use_operand_p use_p;
5416   gimple *def0, *def1;
5417 
5418   /* Compute cost of each statement using estimate_num_insns.  */
5419   stmts = bb_seq (bb);
5420   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5421     {
5422       stmt = gsi_stmt (gsi);
5423       if (!is_gimple_debug (stmt))
5424         gimple_set_uid (stmt, n++);
5425     }
5426   lattice = XNEWVEC (unsigned int, n);
5427   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5428     {
5429       unsigned cost;
5430       stmt = gsi_stmt (gsi);
5431       if (is_gimple_debug (stmt))
5432 	continue;
5433       cost = estimate_num_insns (stmt, &eni_size_weights);
5434       lattice[i] = cost;
5435       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5436 	{
5437 	  tree use = USE_FROM_PTR (use_p);
5438 	  gimple *def_stmt;
5439 	  if (TREE_CODE (use) != SSA_NAME)
5440 	    continue;
5441 	  def_stmt = get_gimple_for_ssa_name (use);
5442 	  if (!def_stmt)
5443 	    continue;
5444 	  lattice[i] += lattice[gimple_uid (def_stmt)];
5445 	}
5446       i++;
5447       if (!is_gimple_assign (stmt)
5448 	  || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5449 	continue;
5450       op0 = gimple_op (stmt, 1);
5451       op1 = gimple_op (stmt, 2);
5452       if (TREE_CODE (op0) != SSA_NAME
5453 	  || TREE_CODE (op1) != SSA_NAME)
5454 	continue;
5455       /* Swap operands if the second one is more expensive.  */
5456       def0 = get_gimple_for_ssa_name (op0);
5457       def1 = get_gimple_for_ssa_name (op1);
5458       if (!def1)
5459 	continue;
5460       swap = false;
5461       if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5462 	swap = true;
5463       if (swap)
5464 	{
5465 	  if (dump_file && (dump_flags & TDF_DETAILS))
5466 	    {
5467 	      fprintf (dump_file, "Swap operands in stmt:\n");
5468 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5469 	      fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5470 		       def0 ? lattice[gimple_uid (def0)] : 0,
5471 		       lattice[gimple_uid (def1)]);
5472 	    }
5473 	  swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5474 			     gimple_assign_rhs2_ptr (stmt));
5475 	}
5476     }
5477   XDELETE (lattice);
5478 }
5479 
5480 /* Expand basic block BB from GIMPLE trees to RTL.  */
5481 
5482 static basic_block
5483 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5484 {
5485   gimple_stmt_iterator gsi;
5486   gimple_seq stmts;
5487   gimple *stmt = NULL;
5488   rtx_note *note = NULL;
5489   rtx_insn *last;
5490   edge e;
5491   edge_iterator ei;
5492 
5493   if (dump_file)
5494     fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5495 	     bb->index);
5496 
5497   /* Note that since we are now transitioning from GIMPLE to RTL, we
5498      cannot use the gsi_*_bb() routines because they expect the basic
5499      block to be in GIMPLE, instead of RTL.  Therefore, we need to
5500      access the BB sequence directly.  */
5501   if (optimize)
5502     reorder_operands (bb);
5503   stmts = bb_seq (bb);
5504   bb->il.gimple.seq = NULL;
5505   bb->il.gimple.phi_nodes = NULL;
5506   rtl_profile_for_bb (bb);
5507   init_rtl_bb_info (bb);
5508   bb->flags |= BB_RTL;
5509 
5510   /* Remove the RETURN_EXPR if we may fall though to the exit
5511      instead.  */
5512   gsi = gsi_last (stmts);
5513   if (!gsi_end_p (gsi)
5514       && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5515     {
5516       greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5517 
5518       gcc_assert (single_succ_p (bb));
5519       gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5520 
5521       if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5522 	  && !gimple_return_retval (ret_stmt))
5523 	{
5524 	  gsi_remove (&gsi, false);
5525 	  single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5526 	}
5527     }
5528 
5529   gsi = gsi_start (stmts);
5530   if (!gsi_end_p (gsi))
5531     {
5532       stmt = gsi_stmt (gsi);
5533       if (gimple_code (stmt) != GIMPLE_LABEL)
5534 	stmt = NULL;
5535     }
5536 
5537   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5538 
5539   if (stmt || elt)
5540     {
5541       gcc_checking_assert (!note);
5542       last = get_last_insn ();
5543 
5544       if (stmt)
5545 	{
5546 	  expand_gimple_stmt (stmt);
5547 	  gsi_next (&gsi);
5548 	}
5549 
5550       if (elt)
5551 	emit_label (*elt);
5552 
5553       BB_HEAD (bb) = NEXT_INSN (last);
5554       if (NOTE_P (BB_HEAD (bb)))
5555 	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5556       gcc_assert (LABEL_P (BB_HEAD (bb)));
5557       note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5558 
5559       maybe_dump_rtl_for_gimple_stmt (stmt, last);
5560     }
5561   else
5562     BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5563 
5564   if (note)
5565     NOTE_BASIC_BLOCK (note) = bb;
5566 
5567   for (; !gsi_end_p (gsi); gsi_next (&gsi))
5568     {
5569       basic_block new_bb;
5570 
5571       stmt = gsi_stmt (gsi);
5572 
5573       /* If this statement is a non-debug one, and we generate debug
5574 	 insns, then this one might be the last real use of a TERed
5575 	 SSA_NAME, but where there are still some debug uses further
5576 	 down.  Expanding the current SSA name in such further debug
5577 	 uses by their RHS might lead to wrong debug info, as coalescing
5578 	 might make the operands of such RHS be placed into the same
5579 	 pseudo as something else.  Like so:
5580 	   a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
5581 	   use(a_1);
5582 	   a_2 = ...
5583            #DEBUG ... => a_1
5584 	 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5585 	 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5586 	 the write to a_2 would actually have clobbered the place which
5587 	 formerly held a_0.
5588 
5589 	 So, instead of that, we recognize the situation, and generate
5590 	 debug temporaries at the last real use of TERed SSA names:
5591 	   a_1 = a_0 + 1;
5592            #DEBUG #D1 => a_1
5593 	   use(a_1);
5594 	   a_2 = ...
5595            #DEBUG ... => #D1
5596 	 */
5597       if (MAY_HAVE_DEBUG_BIND_INSNS
5598 	  && SA.values
5599 	  && !is_gimple_debug (stmt))
5600 	{
5601 	  ssa_op_iter iter;
5602 	  tree op;
5603 	  gimple *def;
5604 
5605 	  location_t sloc = curr_insn_location ();
5606 
5607 	  /* Look for SSA names that have their last use here (TERed
5608 	     names always have only one real use).  */
5609 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5610 	    if ((def = get_gimple_for_ssa_name (op)))
5611 	      {
5612 		imm_use_iterator imm_iter;
5613 		use_operand_p use_p;
5614 		bool have_debug_uses = false;
5615 
5616 		FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5617 		  {
5618 		    if (gimple_debug_bind_p (USE_STMT (use_p)))
5619 		      {
5620 			have_debug_uses = true;
5621 			break;
5622 		      }
5623 		  }
5624 
5625 		if (have_debug_uses)
5626 		  {
5627 		    /* OP is a TERed SSA name, with DEF its defining
5628 		       statement, and where OP is used in further debug
5629 		       instructions.  Generate a debug temporary, and
5630 		       replace all uses of OP in debug insns with that
5631 		       temporary.  */
5632 		    gimple *debugstmt;
5633 		    tree value = gimple_assign_rhs_to_tree (def);
5634 		    tree vexpr = make_node (DEBUG_EXPR_DECL);
5635 		    rtx val;
5636 		    machine_mode mode;
5637 
5638 		    set_curr_insn_location (gimple_location (def));
5639 
5640 		    DECL_ARTIFICIAL (vexpr) = 1;
5641 		    TREE_TYPE (vexpr) = TREE_TYPE (value);
5642 		    if (DECL_P (value))
5643 		      mode = DECL_MODE (value);
5644 		    else
5645 		      mode = TYPE_MODE (TREE_TYPE (value));
5646 		    SET_DECL_MODE (vexpr, mode);
5647 
5648 		    val = gen_rtx_VAR_LOCATION
5649 			(mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5650 
5651 		    emit_debug_insn (val);
5652 
5653 		    FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5654 		      {
5655 			if (!gimple_debug_bind_p (debugstmt))
5656 			  continue;
5657 
5658 			FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5659 			  SET_USE (use_p, vexpr);
5660 
5661 			update_stmt (debugstmt);
5662 		      }
5663 		  }
5664 	      }
5665 	  set_curr_insn_location (sloc);
5666 	}
5667 
5668       currently_expanding_gimple_stmt = stmt;
5669 
5670       /* Expand this statement, then evaluate the resulting RTL and
5671 	 fixup the CFG accordingly.  */
5672       if (gimple_code (stmt) == GIMPLE_COND)
5673 	{
5674 	  new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5675 	  if (new_bb)
5676 	    return new_bb;
5677 	}
5678       else if (is_gimple_debug (stmt))
5679 	{
5680 	  location_t sloc = curr_insn_location ();
5681 	  gimple_stmt_iterator nsi = gsi;
5682 
5683 	  for (;;)
5684 	    {
5685 	      tree var;
5686 	      tree value = NULL_TREE;
5687 	      rtx val = NULL_RTX;
5688 	      machine_mode mode;
5689 
5690 	      if (!gimple_debug_nonbind_marker_p (stmt))
5691 		{
5692 		  if (gimple_debug_bind_p (stmt))
5693 		    {
5694 		      var = gimple_debug_bind_get_var (stmt);
5695 
5696 		      if (TREE_CODE (var) != DEBUG_EXPR_DECL
5697 			  && TREE_CODE (var) != LABEL_DECL
5698 			  && !target_for_debug_bind (var))
5699 			goto delink_debug_stmt;
5700 
5701 		      if (DECL_P (var))
5702 			mode = DECL_MODE (var);
5703 		      else
5704 			mode = TYPE_MODE (TREE_TYPE (var));
5705 
5706 		      if (gimple_debug_bind_has_value_p (stmt))
5707 			value = gimple_debug_bind_get_value (stmt);
5708 
5709 		      val = gen_rtx_VAR_LOCATION
5710 			(mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5711 		    }
5712 		  else if (gimple_debug_source_bind_p (stmt))
5713 		    {
5714 		      var = gimple_debug_source_bind_get_var (stmt);
5715 
5716 		      value = gimple_debug_source_bind_get_value (stmt);
5717 
5718 		      mode = DECL_MODE (var);
5719 
5720 		      val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5721 						  VAR_INIT_STATUS_UNINITIALIZED);
5722 		    }
5723 		  else
5724 		    gcc_unreachable ();
5725 		}
5726 	      /* If this function was first compiled with markers
5727 		 enabled, but they're now disable (e.g. LTO), drop
5728 		 them on the floor.  */
5729 	      else if (gimple_debug_nonbind_marker_p (stmt)
5730 		       && !MAY_HAVE_DEBUG_MARKER_INSNS)
5731 		goto delink_debug_stmt;
5732 	      else if (gimple_debug_begin_stmt_p (stmt))
5733 		val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5734 	      else if (gimple_debug_inline_entry_p (stmt))
5735 		{
5736 		  tree block = gimple_block (stmt);
5737 
5738 		  if (block)
5739 		    val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5740 		  else
5741 		    goto delink_debug_stmt;
5742 		}
5743 	      else
5744 		gcc_unreachable ();
5745 
5746 	      last = get_last_insn ();
5747 
5748 	      set_curr_insn_location (gimple_location (stmt));
5749 
5750 	      emit_debug_insn (val);
5751 
5752 	      if (dump_file && (dump_flags & TDF_DETAILS))
5753 		{
5754 		  /* We can't dump the insn with a TREE where an RTX
5755 		     is expected.  */
5756 		  if (GET_CODE (val) == VAR_LOCATION)
5757 		    {
5758 		      gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5759 		      PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5760 		    }
5761 		  maybe_dump_rtl_for_gimple_stmt (stmt, last);
5762 		  if (GET_CODE (val) == VAR_LOCATION)
5763 		    PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5764 		}
5765 
5766 	    delink_debug_stmt:
5767 	      /* In order not to generate too many debug temporaries,
5768 	         we delink all uses of debug statements we already expanded.
5769 		 Therefore debug statements between definition and real
5770 		 use of TERed SSA names will continue to use the SSA name,
5771 		 and not be replaced with debug temps.  */
5772 	      delink_stmt_imm_use (stmt);
5773 
5774 	      gsi = nsi;
5775 	      gsi_next (&nsi);
5776 	      if (gsi_end_p (nsi))
5777 		break;
5778 	      stmt = gsi_stmt (nsi);
5779 	      if (!is_gimple_debug (stmt))
5780 		break;
5781 	    }
5782 
5783 	  set_curr_insn_location (sloc);
5784 	}
5785       else
5786 	{
5787 	  gcall *call_stmt = dyn_cast <gcall *> (stmt);
5788 	  if (call_stmt
5789 	      && gimple_call_tail_p (call_stmt)
5790 	      && disable_tail_calls)
5791 	    gimple_call_set_tail (call_stmt, false);
5792 
5793 	  if (call_stmt && gimple_call_tail_p (call_stmt))
5794 	    {
5795 	      bool can_fallthru;
5796 	      new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5797 	      if (new_bb)
5798 		{
5799 		  if (can_fallthru)
5800 		    bb = new_bb;
5801 		  else
5802 		    return new_bb;
5803 		}
5804 	    }
5805 	  else
5806 	    {
5807 	      def_operand_p def_p;
5808 	      def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5809 
5810 	      if (def_p != NULL)
5811 		{
5812 		  /* Ignore this stmt if it is in the list of
5813 		     replaceable expressions.  */
5814 		  if (SA.values
5815 		      && bitmap_bit_p (SA.values,
5816 				       SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5817 		    continue;
5818 		}
5819 	      last = expand_gimple_stmt (stmt);
5820 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5821 	    }
5822 	}
5823     }
5824 
5825   currently_expanding_gimple_stmt = NULL;
5826 
5827   /* Expand implicit goto and convert goto_locus.  */
5828   FOR_EACH_EDGE (e, ei, bb->succs)
5829     {
5830       if (e->goto_locus != UNKNOWN_LOCATION)
5831 	set_curr_insn_location (e->goto_locus);
5832       if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5833 	{
5834 	  emit_jump (label_rtx_for_bb (e->dest));
5835 	  e->flags &= ~EDGE_FALLTHRU;
5836 	}
5837     }
5838 
5839   /* Expanded RTL can create a jump in the last instruction of block.
5840      This later might be assumed to be a jump to successor and break edge insertion.
5841      We need to insert dummy move to prevent this. PR41440. */
5842   if (single_succ_p (bb)
5843       && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5844       && (last = get_last_insn ())
5845       && (JUMP_P (last)
5846 	  || (DEBUG_INSN_P (last)
5847 	      && JUMP_P (prev_nondebug_insn (last)))))
5848     {
5849       rtx dummy = gen_reg_rtx (SImode);
5850       emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5851     }
5852 
5853   do_pending_stack_adjust ();
5854 
5855   /* Find the block tail.  The last insn in the block is the insn
5856      before a barrier and/or table jump insn.  */
5857   last = get_last_insn ();
5858   if (BARRIER_P (last))
5859     last = PREV_INSN (last);
5860   if (JUMP_TABLE_DATA_P (last))
5861     last = PREV_INSN (PREV_INSN (last));
5862   BB_END (bb) = last;
5863 
5864   update_bb_for_insn (bb);
5865 
5866   return bb;
5867 }
5868 
5869 
5870 /* Create a basic block for initialization code.  */
5871 
5872 static basic_block
5873 construct_init_block (void)
5874 {
5875   basic_block init_block, first_block;
5876   edge e = NULL;
5877   int flags;
5878 
5879   /* Multiple entry points not supported yet.  */
5880   gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5881   init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5882   init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5883   ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5884   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5885 
5886   e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5887 
5888   /* When entry edge points to first basic block, we don't need jump,
5889      otherwise we have to jump into proper target.  */
5890   if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5891     {
5892       tree label = gimple_block_label (e->dest);
5893 
5894       emit_jump (jump_target_rtx (label));
5895       flags = 0;
5896     }
5897   else
5898     flags = EDGE_FALLTHRU;
5899 
5900   init_block = create_basic_block (NEXT_INSN (get_insns ()),
5901 				   get_last_insn (),
5902 				   ENTRY_BLOCK_PTR_FOR_FN (cfun));
5903   init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5904   add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5905   if (e)
5906     {
5907       first_block = e->dest;
5908       redirect_edge_succ (e, init_block);
5909       e = make_single_succ_edge (init_block, first_block, flags);
5910     }
5911   else
5912     e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5913 			       EDGE_FALLTHRU);
5914 
5915   update_bb_for_insn (init_block);
5916   return init_block;
5917 }
5918 
5919 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5920    found in the block tree.  */
5921 
5922 static void
5923 set_block_levels (tree block, int level)
5924 {
5925   while (block)
5926     {
5927       BLOCK_NUMBER (block) = level;
5928       set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5929       block = BLOCK_CHAIN (block);
5930     }
5931 }
5932 
5933 /* Create a block containing landing pads and similar stuff.  */
5934 
5935 static void
5936 construct_exit_block (void)
5937 {
5938   rtx_insn *head = get_last_insn ();
5939   rtx_insn *end;
5940   basic_block exit_block;
5941   edge e, e2;
5942   unsigned ix;
5943   edge_iterator ei;
5944   basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5945   rtx_insn *orig_end = BB_END (prev_bb);
5946 
5947   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5948 
5949   /* Make sure the locus is set to the end of the function, so that
5950      epilogue line numbers and warnings are set properly.  */
5951   if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5952     input_location = cfun->function_end_locus;
5953 
5954   /* Generate rtl for function exit.  */
5955   expand_function_end ();
5956 
5957   end = get_last_insn ();
5958   if (head == end)
5959     return;
5960   /* While emitting the function end we could move end of the last basic
5961      block.  */
5962   BB_END (prev_bb) = orig_end;
5963   while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5964     head = NEXT_INSN (head);
5965   /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5966      bb count counting will be confused.  Any instructions before that
5967      label are emitted for the case where PREV_BB falls through into the
5968      exit block, so append those instructions to prev_bb in that case.  */
5969   if (NEXT_INSN (head) != return_label)
5970     {
5971       while (NEXT_INSN (head) != return_label)
5972 	{
5973 	  if (!NOTE_P (NEXT_INSN (head)))
5974 	    BB_END (prev_bb) = NEXT_INSN (head);
5975 	  head = NEXT_INSN (head);
5976 	}
5977     }
5978   exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5979   exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5980   add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5981 
5982   ix = 0;
5983   while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5984     {
5985       e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5986       if (!(e->flags & EDGE_ABNORMAL))
5987 	redirect_edge_succ (e, exit_block);
5988       else
5989 	ix++;
5990     }
5991 
5992   e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5993 			     EDGE_FALLTHRU);
5994   FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5995     if (e2 != e)
5996       {
5997 	exit_block->count -= e2->count ();
5998       }
5999   update_bb_for_insn (exit_block);
6000 }
6001 
6002 /* Helper function for discover_nonconstant_array_refs.
6003    Look for ARRAY_REF nodes with non-constant indexes and mark them
6004    addressable.  */
6005 
6006 static tree
6007 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
6008 				   void *data ATTRIBUTE_UNUSED)
6009 {
6010   tree t = *tp;
6011 
6012   if (IS_TYPE_OR_DECL_P (t))
6013     *walk_subtrees = 0;
6014   else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6015     {
6016       while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6017 	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6018 	      && (!TREE_OPERAND (t, 2)
6019 		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6020 	     || (TREE_CODE (t) == COMPONENT_REF
6021 		 && (!TREE_OPERAND (t,2)
6022 		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6023 	     || TREE_CODE (t) == BIT_FIELD_REF
6024 	     || TREE_CODE (t) == REALPART_EXPR
6025 	     || TREE_CODE (t) == IMAGPART_EXPR
6026 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
6027 	     || CONVERT_EXPR_P (t))
6028 	t = TREE_OPERAND (t, 0);
6029 
6030       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6031 	{
6032 	  t = get_base_address (t);
6033 	  if (t && DECL_P (t)
6034               && DECL_MODE (t) != BLKmode)
6035 	    TREE_ADDRESSABLE (t) = 1;
6036 	}
6037 
6038       *walk_subtrees = 0;
6039     }
6040 
6041   return NULL_TREE;
6042 }
6043 
6044 /* RTL expansion is not able to compile array references with variable
6045    offsets for arrays stored in single register.  Discover such
6046    expressions and mark variables as addressable to avoid this
6047    scenario.  */
6048 
6049 static void
6050 discover_nonconstant_array_refs (void)
6051 {
6052   basic_block bb;
6053   gimple_stmt_iterator gsi;
6054 
6055   FOR_EACH_BB_FN (bb, cfun)
6056     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6057       {
6058 	gimple *stmt = gsi_stmt (gsi);
6059 	if (!is_gimple_debug (stmt))
6060 	  walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6061       }
6062 }
6063 
6064 /* This function sets crtl->args.internal_arg_pointer to a virtual
6065    register if DRAP is needed.  Local register allocator will replace
6066    virtual_incoming_args_rtx with the virtual register.  */
6067 
6068 static void
6069 expand_stack_alignment (void)
6070 {
6071   rtx drap_rtx;
6072   unsigned int preferred_stack_boundary;
6073 
6074   if (! SUPPORTS_STACK_ALIGNMENT)
6075     return;
6076 
6077   if (cfun->calls_alloca
6078       || cfun->has_nonlocal_label
6079       || crtl->has_nonlocal_goto)
6080     crtl->need_drap = true;
6081 
6082   /* Call update_stack_boundary here again to update incoming stack
6083      boundary.  It may set incoming stack alignment to a different
6084      value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
6085      use the minimum incoming stack alignment to check if it is OK
6086      to perform sibcall optimization since sibcall optimization will
6087      only align the outgoing stack to incoming stack boundary.  */
6088   if (targetm.calls.update_stack_boundary)
6089     targetm.calls.update_stack_boundary ();
6090 
6091   /* The incoming stack frame has to be aligned at least at
6092      parm_stack_boundary.  */
6093   gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6094 
6095   /* Update crtl->stack_alignment_estimated and use it later to align
6096      stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
6097      exceptions since callgraph doesn't collect incoming stack alignment
6098      in this case.  */
6099   if (cfun->can_throw_non_call_exceptions
6100       && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6101     preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6102   else
6103     preferred_stack_boundary = crtl->preferred_stack_boundary;
6104   if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6105     crtl->stack_alignment_estimated = preferred_stack_boundary;
6106   if (preferred_stack_boundary > crtl->stack_alignment_needed)
6107     crtl->stack_alignment_needed = preferred_stack_boundary;
6108 
6109   gcc_assert (crtl->stack_alignment_needed
6110 	      <= crtl->stack_alignment_estimated);
6111 
6112   crtl->stack_realign_needed
6113     = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6114   crtl->stack_realign_tried = crtl->stack_realign_needed;
6115 
6116   crtl->stack_realign_processed = true;
6117 
6118   /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6119      alignment.  */
6120   gcc_assert (targetm.calls.get_drap_rtx != NULL);
6121   drap_rtx = targetm.calls.get_drap_rtx ();
6122 
6123   /* stack_realign_drap and drap_rtx must match.  */
6124   gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6125 
6126   /* Do nothing if NULL is returned, which means DRAP is not needed.  */
6127   if (drap_rtx != NULL)
6128     {
6129       crtl->args.internal_arg_pointer = drap_rtx;
6130 
6131       /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6132          needed. */
6133       fixup_tail_calls ();
6134     }
6135 }
6136 
6137 
6138 static void
6139 expand_main_function (void)
6140 {
6141 #if (defined(INVOKE__main)				\
6142      || (!defined(HAS_INIT_SECTION)			\
6143 	 && !defined(INIT_SECTION_ASM_OP)		\
6144 	 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6145   emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6146 #endif
6147 }
6148 
6149 
6150 /* Expand code to initialize the stack_protect_guard.  This is invoked at
6151    the beginning of a function to be protected.  */
6152 
6153 static void
6154 stack_protect_prologue (void)
6155 {
6156   tree guard_decl = targetm.stack_protect_guard ();
6157   rtx x, y;
6158 
6159   x = expand_normal (crtl->stack_protect_guard);
6160   if (guard_decl)
6161     y = expand_normal (guard_decl);
6162   else
6163     y = const0_rtx;
6164 
6165   /* Allow the target to copy from Y to X without leaking Y into a
6166      register.  */
6167   if (targetm.have_stack_protect_set ())
6168     if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6169       {
6170 	emit_insn (insn);
6171 	return;
6172       }
6173 
6174   /* Otherwise do a straight move.  */
6175   emit_move_insn (x, y);
6176 }
6177 
6178 /* Translate the intermediate representation contained in the CFG
6179    from GIMPLE trees to RTL.
6180 
6181    We do conversion per basic block and preserve/update the tree CFG.
6182    This implies we have to do some magic as the CFG can simultaneously
6183    consist of basic blocks containing RTL and GIMPLE trees.  This can
6184    confuse the CFG hooks, so be careful to not manipulate CFG during
6185    the expansion.  */
6186 
6187 namespace {
6188 
6189 const pass_data pass_data_expand =
6190 {
6191   RTL_PASS, /* type */
6192   "expand", /* name */
6193   OPTGROUP_NONE, /* optinfo_flags */
6194   TV_EXPAND, /* tv_id */
6195   ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6196     | PROP_gimple_lcx
6197     | PROP_gimple_lvec
6198     | PROP_gimple_lva), /* properties_required */
6199   PROP_rtl, /* properties_provided */
6200   ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6201   0, /* todo_flags_start */
6202   0, /* todo_flags_finish */
6203 };
6204 
6205 class pass_expand : public rtl_opt_pass
6206 {
6207 public:
6208   pass_expand (gcc::context *ctxt)
6209     : rtl_opt_pass (pass_data_expand, ctxt)
6210   {}
6211 
6212   /* opt_pass methods: */
6213   virtual unsigned int execute (function *);
6214 
6215 }; // class pass_expand
6216 
6217 unsigned int
6218 pass_expand::execute (function *fun)
6219 {
6220   basic_block bb, init_block;
6221   edge_iterator ei;
6222   edge e;
6223   rtx_insn *var_seq, *var_ret_seq;
6224   unsigned i;
6225 
6226   timevar_push (TV_OUT_OF_SSA);
6227   rewrite_out_of_ssa (&SA);
6228   timevar_pop (TV_OUT_OF_SSA);
6229   SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6230 
6231   if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6232     {
6233       gimple_stmt_iterator gsi;
6234       FOR_EACH_BB_FN (bb, cfun)
6235 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6236 	  if (gimple_debug_bind_p (gsi_stmt (gsi)))
6237 	    avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6238     }
6239 
6240   /* Make sure all values used by the optimization passes have sane
6241      defaults.  */
6242   reg_renumber = 0;
6243 
6244   /* Some backends want to know that we are expanding to RTL.  */
6245   currently_expanding_to_rtl = 1;
6246   /* Dominators are not kept up-to-date as we may create new basic-blocks.  */
6247   free_dominance_info (CDI_DOMINATORS);
6248 
6249   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6250 
6251   if (chkp_function_instrumented_p (current_function_decl))
6252     chkp_reset_rtl_bounds ();
6253 
6254   insn_locations_init ();
6255   if (!DECL_IS_BUILTIN (current_function_decl))
6256     {
6257       /* Eventually, all FEs should explicitly set function_start_locus.  */
6258       if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6259 	set_curr_insn_location
6260 	  (DECL_SOURCE_LOCATION (current_function_decl));
6261       else
6262 	set_curr_insn_location (fun->function_start_locus);
6263     }
6264   else
6265     set_curr_insn_location (UNKNOWN_LOCATION);
6266   prologue_location = curr_insn_location ();
6267 
6268 #ifdef INSN_SCHEDULING
6269   init_sched_attrs ();
6270 #endif
6271 
6272   /* Make sure first insn is a note even if we don't want linenums.
6273      This makes sure the first insn will never be deleted.
6274      Also, final expects a note to appear there.  */
6275   emit_note (NOTE_INSN_DELETED);
6276 
6277   /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
6278   discover_nonconstant_array_refs ();
6279 
6280   targetm.expand_to_rtl_hook ();
6281   crtl->init_stack_alignment ();
6282   fun->cfg->max_jumptable_ents = 0;
6283 
6284   /* Resovle the function section.  Some targets, like ARM EABI rely on knowledge
6285      of the function section at exapnsion time to predict distance of calls.  */
6286   resolve_unique_section (current_function_decl, 0, flag_function_sections);
6287 
6288   /* Expand the variables recorded during gimple lowering.  */
6289   timevar_push (TV_VAR_EXPAND);
6290   start_sequence ();
6291 
6292   var_ret_seq = expand_used_vars ();
6293 
6294   var_seq = get_insns ();
6295   end_sequence ();
6296   timevar_pop (TV_VAR_EXPAND);
6297 
6298   /* Honor stack protection warnings.  */
6299   if (warn_stack_protect)
6300     {
6301       if (fun->calls_alloca)
6302 	warning (OPT_Wstack_protector,
6303 		 "stack protector not protecting local variables: "
6304 		 "variable length buffer");
6305       if (has_short_buffer && !crtl->stack_protect_guard)
6306 	warning (OPT_Wstack_protector,
6307 		 "stack protector not protecting function: "
6308 		 "all local arrays are less than %d bytes long",
6309 		 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6310     }
6311 
6312   /* Set up parameters and prepare for return, for the function.  */
6313   expand_function_start (current_function_decl);
6314 
6315   /* If we emitted any instructions for setting up the variables,
6316      emit them before the FUNCTION_START note.  */
6317   if (var_seq)
6318     {
6319       emit_insn_before (var_seq, parm_birth_insn);
6320 
6321       /* In expand_function_end we'll insert the alloca save/restore
6322 	 before parm_birth_insn.  We've just insertted an alloca call.
6323 	 Adjust the pointer to match.  */
6324       parm_birth_insn = var_seq;
6325     }
6326 
6327   /* Now propagate the RTL assignment of each partition to the
6328      underlying var of each SSA_NAME.  */
6329   tree name;
6330 
6331   FOR_EACH_SSA_NAME (i, name, cfun)
6332     {
6333       /* We might have generated new SSA names in
6334 	 update_alias_info_with_stack_vars.  They will have a NULL
6335 	 defining statements, and won't be part of the partitioning,
6336 	 so ignore those.  */
6337       if (!SSA_NAME_DEF_STMT (name))
6338 	continue;
6339 
6340       adjust_one_expanded_partition_var (name);
6341     }
6342 
6343   /* Clean up RTL of variables that straddle across multiple
6344      partitions, and check that the rtl of any PARM_DECLs that are not
6345      cleaned up is that of their default defs.  */
6346   FOR_EACH_SSA_NAME (i, name, cfun)
6347     {
6348       int part;
6349 
6350       /* We might have generated new SSA names in
6351 	 update_alias_info_with_stack_vars.  They will have a NULL
6352 	 defining statements, and won't be part of the partitioning,
6353 	 so ignore those.  */
6354       if (!SSA_NAME_DEF_STMT (name))
6355 	continue;
6356       part = var_to_partition (SA.map, name);
6357       if (part == NO_PARTITION)
6358 	continue;
6359 
6360       /* If this decl was marked as living in multiple places, reset
6361 	 this now to NULL.  */
6362       tree var = SSA_NAME_VAR (name);
6363       if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6364 	SET_DECL_RTL (var, NULL);
6365       /* Check that the pseudos chosen by assign_parms are those of
6366 	 the corresponding default defs.  */
6367       else if (SSA_NAME_IS_DEFAULT_DEF (name)
6368 	       && (TREE_CODE (var) == PARM_DECL
6369 		   || TREE_CODE (var) == RESULT_DECL))
6370 	{
6371 	  rtx in = DECL_RTL_IF_SET (var);
6372 	  gcc_assert (in);
6373 	  rtx out = SA.partition_to_pseudo[part];
6374 	  gcc_assert (in == out);
6375 
6376 	  /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6377 	     those expected by debug backends for each parm and for
6378 	     the result.  This is particularly important for stabs,
6379 	     whose register elimination from parm's DECL_RTL may cause
6380 	     -fcompare-debug differences as SET_DECL_RTL changes reg's
6381 	     attrs.  So, make sure the RTL already has the parm as the
6382 	     EXPR, so that it won't change.  */
6383 	  SET_DECL_RTL (var, NULL_RTX);
6384 	  if (MEM_P (in))
6385 	    set_mem_attributes (in, var, true);
6386 	  SET_DECL_RTL (var, in);
6387 	}
6388     }
6389 
6390   /* If this function is `main', emit a call to `__main'
6391      to run global initializers, etc.  */
6392   if (DECL_NAME (current_function_decl)
6393       && MAIN_NAME_P (DECL_NAME (current_function_decl))
6394       && DECL_FILE_SCOPE_P (current_function_decl))
6395     expand_main_function ();
6396 
6397   /* Initialize the stack_protect_guard field.  This must happen after the
6398      call to __main (if any) so that the external decl is initialized.  */
6399   if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6400     stack_protect_prologue ();
6401 
6402   expand_phi_nodes (&SA);
6403 
6404   /* Release any stale SSA redirection data.  */
6405   redirect_edge_var_map_empty ();
6406 
6407   /* Register rtl specific functions for cfg.  */
6408   rtl_register_cfg_hooks ();
6409 
6410   init_block = construct_init_block ();
6411 
6412   /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
6413      remaining edges later.  */
6414   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6415     e->flags &= ~EDGE_EXECUTABLE;
6416 
6417   /* If the function has too many markers, drop them while expanding.  */
6418   if (cfun->debug_marker_count
6419       >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
6420     cfun->debug_nonbind_markers = false;
6421 
6422   lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6423   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6424 		  next_bb)
6425     bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6426 
6427   if (MAY_HAVE_DEBUG_BIND_INSNS)
6428     expand_debug_locations ();
6429 
6430   if (deep_ter_debug_map)
6431     {
6432       delete deep_ter_debug_map;
6433       deep_ter_debug_map = NULL;
6434     }
6435 
6436   /* Free stuff we no longer need after GIMPLE optimizations.  */
6437   free_dominance_info (CDI_DOMINATORS);
6438   free_dominance_info (CDI_POST_DOMINATORS);
6439   delete_tree_cfg_annotations (fun);
6440 
6441   timevar_push (TV_OUT_OF_SSA);
6442   finish_out_of_ssa (&SA);
6443   timevar_pop (TV_OUT_OF_SSA);
6444 
6445   timevar_push (TV_POST_EXPAND);
6446   /* We are no longer in SSA form.  */
6447   fun->gimple_df->in_ssa_p = false;
6448   loops_state_clear (LOOP_CLOSED_SSA);
6449 
6450   /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6451      conservatively to true until they are all profile aware.  */
6452   delete lab_rtx_for_bb;
6453   free_histograms (fun);
6454 
6455   construct_exit_block ();
6456   insn_locations_finalize ();
6457 
6458   if (var_ret_seq)
6459     {
6460       rtx_insn *after = return_label;
6461       rtx_insn *next = NEXT_INSN (after);
6462       if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6463 	after = next;
6464       emit_insn_after (var_ret_seq, after);
6465     }
6466 
6467   /* Zap the tree EH table.  */
6468   set_eh_throw_stmt_table (fun, NULL);
6469 
6470   /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6471      split edges which edge insertions might do.  */
6472   rebuild_jump_labels (get_insns ());
6473 
6474   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6475 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6476     {
6477       edge e;
6478       edge_iterator ei;
6479       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6480 	{
6481 	  if (e->insns.r)
6482 	    {
6483 	      rebuild_jump_labels_chain (e->insns.r);
6484 	      /* Put insns after parm birth, but before
6485 		 NOTE_INSNS_FUNCTION_BEG.  */
6486 	      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6487 		  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6488 		{
6489 		  rtx_insn *insns = e->insns.r;
6490 		  e->insns.r = NULL;
6491 		  if (NOTE_P (parm_birth_insn)
6492 		      && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6493 		    emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6494 		  else
6495 		    emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6496 		}
6497 	      else
6498 		commit_one_edge_insertion (e);
6499 	    }
6500 	  else
6501 	    ei_next (&ei);
6502 	}
6503     }
6504 
6505   /* We're done expanding trees to RTL.  */
6506   currently_expanding_to_rtl = 0;
6507 
6508   flush_mark_addressable_queue ();
6509 
6510   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6511 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6512     {
6513       edge e;
6514       edge_iterator ei;
6515       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6516 	{
6517 	  /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
6518 	  e->flags &= ~EDGE_EXECUTABLE;
6519 
6520 	  /* At the moment not all abnormal edges match the RTL
6521 	     representation.  It is safe to remove them here as
6522 	     find_many_sub_basic_blocks will rediscover them.
6523 	     In the future we should get this fixed properly.  */
6524 	  if ((e->flags & EDGE_ABNORMAL)
6525 	      && !(e->flags & EDGE_SIBCALL))
6526 	    remove_edge (e);
6527 	  else
6528 	    ei_next (&ei);
6529 	}
6530     }
6531 
6532   auto_sbitmap blocks (last_basic_block_for_fn (fun));
6533   bitmap_ones (blocks);
6534   find_many_sub_basic_blocks (blocks);
6535   purge_all_dead_edges ();
6536 
6537   expand_stack_alignment ();
6538 
6539   /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6540      function.  */
6541   if (crtl->tail_call_emit)
6542     fixup_tail_calls ();
6543 
6544   /* After initial rtl generation, call back to finish generating
6545      exception support code.  We need to do this before cleaning up
6546      the CFG as the code does not expect dead landing pads.  */
6547   if (fun->eh->region_tree != NULL)
6548     finish_eh_generation ();
6549 
6550   /* BB subdivision may have created basic blocks that are are only reachable
6551      from unlikely bbs but not marked as such in the profile.  */
6552   if (optimize)
6553     propagate_unlikely_bbs_forward ();
6554 
6555   /* Remove unreachable blocks, otherwise we cannot compute dominators
6556      which are needed for loop state verification.  As a side-effect
6557      this also compacts blocks.
6558      ???  We cannot remove trivially dead insns here as for example
6559      the DRAP reg on i?86 is not magically live at this point.
6560      gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise.  */
6561   cleanup_cfg (CLEANUP_NO_INSN_DEL);
6562 
6563   checking_verify_flow_info ();
6564 
6565   /* Initialize pseudos allocated for hard registers.  */
6566   emit_initial_value_sets ();
6567 
6568   /* And finally unshare all RTL.  */
6569   unshare_all_rtl ();
6570 
6571   /* There's no need to defer outputting this function any more; we
6572      know we want to output it.  */
6573   DECL_DEFER_OUTPUT (current_function_decl) = 0;
6574 
6575   /* Now that we're done expanding trees to RTL, we shouldn't have any
6576      more CONCATs anywhere.  */
6577   generating_concat_p = 0;
6578 
6579   if (dump_file)
6580     {
6581       fprintf (dump_file,
6582 	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6583       /* And the pass manager will dump RTL for us.  */
6584     }
6585 
6586   /* If we're emitting a nested function, make sure its parent gets
6587      emitted as well.  Doing otherwise confuses debug info.  */
6588     {
6589       tree parent;
6590       for (parent = DECL_CONTEXT (current_function_decl);
6591 	   parent != NULL_TREE;
6592 	   parent = get_containing_scope (parent))
6593 	if (TREE_CODE (parent) == FUNCTION_DECL)
6594 	  TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6595     }
6596 
6597   TREE_ASM_WRITTEN (current_function_decl) = 1;
6598 
6599   /* After expanding, the return labels are no longer needed. */
6600   return_label = NULL;
6601   naked_return_label = NULL;
6602 
6603   /* After expanding, the tm_restart map is no longer needed.  */
6604   if (fun->gimple_df->tm_restart)
6605     fun->gimple_df->tm_restart = NULL;
6606 
6607   /* Tag the blocks with a depth number so that change_scope can find
6608      the common parent easily.  */
6609   set_block_levels (DECL_INITIAL (fun->decl), 0);
6610   default_rtl_profile ();
6611 
6612   /* For -dx discard loops now, otherwise IL verify in clean_state will
6613      ICE.  */
6614   if (rtl_dump_and_exit)
6615     {
6616       cfun->curr_properties &= ~PROP_loops;
6617       loop_optimizer_finalize ();
6618     }
6619 
6620   timevar_pop (TV_POST_EXPAND);
6621 
6622   return 0;
6623 }
6624 
6625 } // anon namespace
6626 
6627 rtl_opt_pass *
6628 make_pass_expand (gcc::context *ctxt)
6629 {
6630   return new pass_expand (ctxt);
6631 }
6632