1 /* A pass for lowering trees to RTL.
2    Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber.  */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "params.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
69 #include "cfgloop.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING.  */
71 #include "stringpool.h"
72 #include "attribs.h"
73 #include "asan.h"
74 #include "tree-ssa-address.h"
75 #include "output.h"
76 #include "builtins.h"
77 #include "tree-chkp.h"
78 #include "rtl-chkp.h"
79 
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81    cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82    give the same symbol without quotes for an alternative entry point.  You
83    must define both, or neither.  */
84 #ifndef NAME__MAIN
85 #define NAME__MAIN "__main"
86 #endif
87 
88 /* This variable holds information helping the rewriting of SSA trees
89    into RTL.  */
90 struct ssaexpand SA;
91 
92 /* This variable holds the currently expanded gimple statement for purposes
93    of comminucating the profile info to the builtin expanders.  */
94 gimple *currently_expanding_gimple_stmt;
95 
96 static rtx expand_debug_expr (tree);
97 
98 static bool defer_stack_allocation (tree, bool);
99 
100 static void record_alignment_for_reg_var (unsigned int);
101 
102 /* Return an expression tree corresponding to the RHS of GIMPLE
103    statement STMT.  */
104 
105 tree
gimple_assign_rhs_to_tree(gimple * stmt)106 gimple_assign_rhs_to_tree (gimple *stmt)
107 {
108   tree t;
109   enum gimple_rhs_class grhs_class;
110 
111   grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
112 
113   if (grhs_class == GIMPLE_TERNARY_RHS)
114     t = build3 (gimple_assign_rhs_code (stmt),
115 		TREE_TYPE (gimple_assign_lhs (stmt)),
116 		gimple_assign_rhs1 (stmt),
117 		gimple_assign_rhs2 (stmt),
118 		gimple_assign_rhs3 (stmt));
119   else if (grhs_class == GIMPLE_BINARY_RHS)
120     t = build2 (gimple_assign_rhs_code (stmt),
121 		TREE_TYPE (gimple_assign_lhs (stmt)),
122 		gimple_assign_rhs1 (stmt),
123 		gimple_assign_rhs2 (stmt));
124   else if (grhs_class == GIMPLE_UNARY_RHS)
125     t = build1 (gimple_assign_rhs_code (stmt),
126 		TREE_TYPE (gimple_assign_lhs (stmt)),
127 		gimple_assign_rhs1 (stmt));
128   else if (grhs_class == GIMPLE_SINGLE_RHS)
129     {
130       t = gimple_assign_rhs1 (stmt);
131       /* Avoid modifying this tree in place below.  */
132       if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
133 	   && gimple_location (stmt) != EXPR_LOCATION (t))
134 	  || (gimple_block (stmt)
135 	      && currently_expanding_to_rtl
136 	      && EXPR_P (t)))
137 	t = copy_node (t);
138     }
139   else
140     gcc_unreachable ();
141 
142   if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
143     SET_EXPR_LOCATION (t, gimple_location (stmt));
144 
145   return t;
146 }
147 
148 
149 #ifndef STACK_ALIGNMENT_NEEDED
150 #define STACK_ALIGNMENT_NEEDED 1
151 #endif
152 
153 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
154 
155 /* Choose either CUR or NEXT as the leader DECL for a partition.
156    Prefer ignored decls, to simplify debug dumps and reduce ambiguity
157    out of the same user variable being in multiple partitions (this is
158    less likely for compiler-introduced temps).  */
159 
160 static tree
leader_merge(tree cur,tree next)161 leader_merge (tree cur, tree next)
162 {
163   if (cur == NULL || cur == next)
164     return next;
165 
166   if (DECL_P (cur) && DECL_IGNORED_P (cur))
167     return cur;
168 
169   if (DECL_P (next) && DECL_IGNORED_P (next))
170     return next;
171 
172   return cur;
173 }
174 
175 /* Associate declaration T with storage space X.  If T is no
176    SSA name this is exactly SET_DECL_RTL, otherwise make the
177    partition of T associated with X.  */
178 static inline void
set_rtl(tree t,rtx x)179 set_rtl (tree t, rtx x)
180 {
181   gcc_checking_assert (!x
182 		       || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
183 		       || (use_register_for_decl (t)
184 			   ? (REG_P (x)
185 			      || (GET_CODE (x) == CONCAT
186 				  && (REG_P (XEXP (x, 0))
187 				      || SUBREG_P (XEXP (x, 0)))
188 				  && (REG_P (XEXP (x, 1))
189 				      || SUBREG_P (XEXP (x, 1))))
190 			      /* We need to accept PARALLELs for RESUT_DECLs
191 				 because of vector types with BLKmode returned
192 				 in multiple registers, but they are supposed
193 				 to be uncoalesced.  */
194 			      || (GET_CODE (x) == PARALLEL
195 				  && SSAVAR (t)
196 				  && TREE_CODE (SSAVAR (t)) == RESULT_DECL
197 				  && (GET_MODE (x) == BLKmode
198 				      || !flag_tree_coalesce_vars)))
199 			   : (MEM_P (x) || x == pc_rtx
200 			      || (GET_CODE (x) == CONCAT
201 				  && MEM_P (XEXP (x, 0))
202 				  && MEM_P (XEXP (x, 1))))));
203   /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
204      RESULT_DECLs has the expected mode.  For memory, we accept
205      unpromoted modes, since that's what we're likely to get.  For
206      PARM_DECLs and RESULT_DECLs, we'll have been called by
207      set_parm_rtl, which will give us the default def, so we don't
208      have to compute it ourselves.  For RESULT_DECLs, we accept mode
209      mismatches too, as long as we have BLKmode or are not coalescing
210      across variables, so that we don't reject BLKmode PARALLELs or
211      unpromoted REGs.  */
212   gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
213 		       || (SSAVAR (t)
214 			   && TREE_CODE (SSAVAR (t)) == RESULT_DECL
215 			   && (promote_ssa_mode (t, NULL) == BLKmode
216 			       || !flag_tree_coalesce_vars))
217 		       || !use_register_for_decl (t)
218 		       || GET_MODE (x) == promote_ssa_mode (t, NULL));
219 
220   if (x)
221     {
222       bool skip = false;
223       tree cur = NULL_TREE;
224       rtx xm = x;
225 
226     retry:
227       if (MEM_P (xm))
228 	cur = MEM_EXPR (xm);
229       else if (REG_P (xm))
230 	cur = REG_EXPR (xm);
231       else if (SUBREG_P (xm))
232 	{
233 	  gcc_assert (subreg_lowpart_p (xm));
234 	  xm = SUBREG_REG (xm);
235 	  goto retry;
236 	}
237       else if (GET_CODE (xm) == CONCAT)
238 	{
239 	  xm = XEXP (xm, 0);
240 	  goto retry;
241 	}
242       else if (GET_CODE (xm) == PARALLEL)
243 	{
244 	  xm = XVECEXP (xm, 0, 0);
245 	  gcc_assert (GET_CODE (xm) == EXPR_LIST);
246 	  xm = XEXP (xm, 0);
247 	  goto retry;
248 	}
249       else if (xm == pc_rtx)
250 	skip = true;
251       else
252 	gcc_unreachable ();
253 
254       tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
255 
256       if (cur != next)
257 	{
258 	  if (MEM_P (x))
259 	    set_mem_attributes (x,
260 				next && TREE_CODE (next) == SSA_NAME
261 				? TREE_TYPE (next)
262 				: next, true);
263 	  else
264 	    set_reg_attrs_for_decl_rtl (next, x);
265 	}
266     }
267 
268   if (TREE_CODE (t) == SSA_NAME)
269     {
270       int part = var_to_partition (SA.map, t);
271       if (part != NO_PARTITION)
272 	{
273 	  if (SA.partition_to_pseudo[part])
274 	    gcc_assert (SA.partition_to_pseudo[part] == x);
275 	  else if (x != pc_rtx)
276 	    SA.partition_to_pseudo[part] = x;
277 	}
278       /* For the benefit of debug information at -O0 (where
279          vartracking doesn't run) record the place also in the base
280          DECL.  For PARMs and RESULTs, do so only when setting the
281          default def.  */
282       if (x && x != pc_rtx && SSA_NAME_VAR (t)
283 	  && (VAR_P (SSA_NAME_VAR (t))
284 	      || SSA_NAME_IS_DEFAULT_DEF (t)))
285 	{
286 	  tree var = SSA_NAME_VAR (t);
287 	  /* If we don't yet have something recorded, just record it now.  */
288 	  if (!DECL_RTL_SET_P (var))
289 	    SET_DECL_RTL (var, x);
290 	  /* If we have it set already to "multiple places" don't
291 	     change this.  */
292 	  else if (DECL_RTL (var) == pc_rtx)
293 	    ;
294 	  /* If we have something recorded and it's not the same place
295 	     as we want to record now, we have multiple partitions for the
296 	     same base variable, with different places.  We can't just
297 	     randomly chose one, hence we have to say that we don't know.
298 	     This only happens with optimization, and there var-tracking
299 	     will figure out the right thing.  */
300 	  else if (DECL_RTL (var) != x)
301 	    SET_DECL_RTL (var, pc_rtx);
302 	}
303     }
304   else
305     SET_DECL_RTL (t, x);
306 }
307 
308 /* This structure holds data relevant to one variable that will be
309    placed in a stack slot.  */
310 struct stack_var
311 {
312   /* The Variable.  */
313   tree decl;
314 
315   /* Initially, the size of the variable.  Later, the size of the partition,
316      if this variable becomes it's partition's representative.  */
317   poly_uint64 size;
318 
319   /* The *byte* alignment required for this variable.  Or as, with the
320      size, the alignment for this partition.  */
321   unsigned int alignb;
322 
323   /* The partition representative.  */
324   size_t representative;
325 
326   /* The next stack variable in the partition, or EOC.  */
327   size_t next;
328 
329   /* The numbers of conflicting stack variables.  */
330   bitmap conflicts;
331 };
332 
333 #define EOC  ((size_t)-1)
334 
335 /* We have an array of such objects while deciding allocation.  */
336 static struct stack_var *stack_vars;
337 static size_t stack_vars_alloc;
338 static size_t stack_vars_num;
339 static hash_map<tree, size_t> *decl_to_stack_part;
340 
341 /* Conflict bitmaps go on this obstack.  This allows us to destroy
342    all of them in one big sweep.  */
343 static bitmap_obstack stack_var_bitmap_obstack;
344 
345 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
346    is non-decreasing.  */
347 static size_t *stack_vars_sorted;
348 
349 /* The phase of the stack frame.  This is the known misalignment of
350    virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
351    (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
352 static int frame_phase;
353 
354 /* Used during expand_used_vars to remember if we saw any decls for
355    which we'd like to enable stack smashing protection.  */
356 static bool has_protected_decls;
357 
358 /* Used during expand_used_vars.  Remember if we say a character buffer
359    smaller than our cutoff threshold.  Used for -Wstack-protector.  */
360 static bool has_short_buffer;
361 
362 /* Compute the byte alignment to use for DECL.  Ignore alignment
363    we can't do with expected alignment of the stack boundary.  */
364 
365 static unsigned int
align_local_variable(tree decl)366 align_local_variable (tree decl)
367 {
368   unsigned int align;
369 
370   if (TREE_CODE (decl) == SSA_NAME)
371     align = TYPE_ALIGN (TREE_TYPE (decl));
372   else
373     {
374       align = LOCAL_DECL_ALIGNMENT (decl);
375       SET_DECL_ALIGN (decl, align);
376     }
377   return align / BITS_PER_UNIT;
378 }
379 
380 /* Align given offset BASE with ALIGN.  Truncate up if ALIGN_UP is true,
381    down otherwise.  Return truncated BASE value.  */
382 
383 static inline unsigned HOST_WIDE_INT
align_base(HOST_WIDE_INT base,unsigned HOST_WIDE_INT align,bool align_up)384 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
385 {
386   return align_up ? (base + align - 1) & -align : base & -align;
387 }
388 
389 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
390    Return the frame offset.  */
391 
392 static poly_int64
alloc_stack_frame_space(poly_int64 size,unsigned HOST_WIDE_INT align)393 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
394 {
395   poly_int64 offset, new_frame_offset;
396 
397   if (FRAME_GROWS_DOWNWARD)
398     {
399       new_frame_offset
400 	= aligned_lower_bound (frame_offset - frame_phase - size,
401 			       align) + frame_phase;
402       offset = new_frame_offset;
403     }
404   else
405     {
406       new_frame_offset
407 	= aligned_upper_bound (frame_offset - frame_phase,
408 			       align) + frame_phase;
409       offset = new_frame_offset;
410       new_frame_offset += size;
411     }
412   frame_offset = new_frame_offset;
413 
414   if (frame_offset_overflow (frame_offset, cfun->decl))
415     frame_offset = offset = 0;
416 
417   return offset;
418 }
419 
420 /* Accumulate DECL into STACK_VARS.  */
421 
422 static void
add_stack_var(tree decl)423 add_stack_var (tree decl)
424 {
425   struct stack_var *v;
426 
427   if (stack_vars_num >= stack_vars_alloc)
428     {
429       if (stack_vars_alloc)
430 	stack_vars_alloc = stack_vars_alloc * 3 / 2;
431       else
432 	stack_vars_alloc = 32;
433       stack_vars
434 	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
435     }
436   if (!decl_to_stack_part)
437     decl_to_stack_part = new hash_map<tree, size_t>;
438 
439   v = &stack_vars[stack_vars_num];
440   decl_to_stack_part->put (decl, stack_vars_num);
441 
442   v->decl = decl;
443   tree size = TREE_CODE (decl) == SSA_NAME
444     ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
445     : DECL_SIZE_UNIT (decl);
446   v->size = tree_to_poly_uint64 (size);
447   /* Ensure that all variables have size, so that &a != &b for any two
448      variables that are simultaneously live.  */
449   if (known_eq (v->size, 0U))
450     v->size = 1;
451   v->alignb = align_local_variable (decl);
452   /* An alignment of zero can mightily confuse us later.  */
453   gcc_assert (v->alignb != 0);
454 
455   /* All variables are initially in their own partition.  */
456   v->representative = stack_vars_num;
457   v->next = EOC;
458 
459   /* All variables initially conflict with no other.  */
460   v->conflicts = NULL;
461 
462   /* Ensure that this decl doesn't get put onto the list twice.  */
463   set_rtl (decl, pc_rtx);
464 
465   stack_vars_num++;
466 }
467 
468 /* Make the decls associated with luid's X and Y conflict.  */
469 
470 static void
add_stack_var_conflict(size_t x,size_t y)471 add_stack_var_conflict (size_t x, size_t y)
472 {
473   struct stack_var *a = &stack_vars[x];
474   struct stack_var *b = &stack_vars[y];
475   if (!a->conflicts)
476     a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
477   if (!b->conflicts)
478     b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
479   bitmap_set_bit (a->conflicts, y);
480   bitmap_set_bit (b->conflicts, x);
481 }
482 
483 /* Check whether the decls associated with luid's X and Y conflict.  */
484 
485 static bool
stack_var_conflict_p(size_t x,size_t y)486 stack_var_conflict_p (size_t x, size_t y)
487 {
488   struct stack_var *a = &stack_vars[x];
489   struct stack_var *b = &stack_vars[y];
490   if (x == y)
491     return false;
492   /* Partitions containing an SSA name result from gimple registers
493      with things like unsupported modes.  They are top-level and
494      hence conflict with everything else.  */
495   if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
496     return true;
497 
498   if (!a->conflicts || !b->conflicts)
499     return false;
500   return bitmap_bit_p (a->conflicts, y);
501 }
502 
503 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
504    enter its partition number into bitmap DATA.  */
505 
506 static bool
visit_op(gimple *,tree op,tree,void * data)507 visit_op (gimple *, tree op, tree, void *data)
508 {
509   bitmap active = (bitmap)data;
510   op = get_base_address (op);
511   if (op
512       && DECL_P (op)
513       && DECL_RTL_IF_SET (op) == pc_rtx)
514     {
515       size_t *v = decl_to_stack_part->get (op);
516       if (v)
517 	bitmap_set_bit (active, *v);
518     }
519   return false;
520 }
521 
522 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
523    record conflicts between it and all currently active other partitions
524    from bitmap DATA.  */
525 
526 static bool
visit_conflict(gimple *,tree op,tree,void * data)527 visit_conflict (gimple *, tree op, tree, void *data)
528 {
529   bitmap active = (bitmap)data;
530   op = get_base_address (op);
531   if (op
532       && DECL_P (op)
533       && DECL_RTL_IF_SET (op) == pc_rtx)
534     {
535       size_t *v = decl_to_stack_part->get (op);
536       if (v && bitmap_set_bit (active, *v))
537 	{
538 	  size_t num = *v;
539 	  bitmap_iterator bi;
540 	  unsigned i;
541 	  gcc_assert (num < stack_vars_num);
542 	  EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
543 	    add_stack_var_conflict (num, i);
544 	}
545     }
546   return false;
547 }
548 
549 /* Helper routine for add_scope_conflicts, calculating the active partitions
550    at the end of BB, leaving the result in WORK.  We're called to generate
551    conflicts when FOR_CONFLICT is true, otherwise we're just tracking
552    liveness.  */
553 
554 static void
add_scope_conflicts_1(basic_block bb,bitmap work,bool for_conflict)555 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
556 {
557   edge e;
558   edge_iterator ei;
559   gimple_stmt_iterator gsi;
560   walk_stmt_load_store_addr_fn visit;
561 
562   bitmap_clear (work);
563   FOR_EACH_EDGE (e, ei, bb->preds)
564     bitmap_ior_into (work, (bitmap)e->src->aux);
565 
566   visit = visit_op;
567 
568   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
569     {
570       gimple *stmt = gsi_stmt (gsi);
571       walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
572     }
573   for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
574     {
575       gimple *stmt = gsi_stmt (gsi);
576 
577       if (gimple_clobber_p (stmt))
578 	{
579 	  tree lhs = gimple_assign_lhs (stmt);
580 	  size_t *v;
581 	  /* Nested function lowering might introduce LHSs
582 	     that are COMPONENT_REFs.  */
583 	  if (!VAR_P (lhs))
584 	    continue;
585 	  if (DECL_RTL_IF_SET (lhs) == pc_rtx
586 	      && (v = decl_to_stack_part->get (lhs)))
587 	    bitmap_clear_bit (work, *v);
588 	}
589       else if (!is_gimple_debug (stmt))
590 	{
591 	  if (for_conflict
592 	      && visit == visit_op)
593 	    {
594 	      /* If this is the first real instruction in this BB we need
595 	         to add conflicts for everything live at this point now.
596 		 Unlike classical liveness for named objects we can't
597 		 rely on seeing a def/use of the names we're interested in.
598 		 There might merely be indirect loads/stores.  We'd not add any
599 		 conflicts for such partitions.  */
600 	      bitmap_iterator bi;
601 	      unsigned i;
602 	      EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
603 		{
604 		  struct stack_var *a = &stack_vars[i];
605 		  if (!a->conflicts)
606 		    a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
607 		  bitmap_ior_into (a->conflicts, work);
608 		}
609 	      visit = visit_conflict;
610 	    }
611 	  walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
612 	}
613     }
614 }
615 
616 /* Generate stack partition conflicts between all partitions that are
617    simultaneously live.  */
618 
619 static void
add_scope_conflicts(void)620 add_scope_conflicts (void)
621 {
622   basic_block bb;
623   bool changed;
624   bitmap work = BITMAP_ALLOC (NULL);
625   int *rpo;
626   int n_bbs;
627 
628   /* We approximate the live range of a stack variable by taking the first
629      mention of its name as starting point(s), and by the end-of-scope
630      death clobber added by gimplify as ending point(s) of the range.
631      This overapproximates in the case we for instance moved an address-taken
632      operation upward, without also moving a dereference to it upwards.
633      But it's conservatively correct as a variable never can hold values
634      before its name is mentioned at least once.
635 
636      We then do a mostly classical bitmap liveness algorithm.  */
637 
638   FOR_ALL_BB_FN (bb, cfun)
639     bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
640 
641   rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
642   n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
643 
644   changed = true;
645   while (changed)
646     {
647       int i;
648       changed = false;
649       for (i = 0; i < n_bbs; i++)
650 	{
651 	  bitmap active;
652 	  bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
653 	  active = (bitmap)bb->aux;
654 	  add_scope_conflicts_1 (bb, work, false);
655 	  if (bitmap_ior_into (active, work))
656 	    changed = true;
657 	}
658     }
659 
660   FOR_EACH_BB_FN (bb, cfun)
661     add_scope_conflicts_1 (bb, work, true);
662 
663   free (rpo);
664   BITMAP_FREE (work);
665   FOR_ALL_BB_FN (bb, cfun)
666     BITMAP_FREE (bb->aux);
667 }
668 
669 /* A subroutine of partition_stack_vars.  A comparison function for qsort,
670    sorting an array of indices by the properties of the object.  */
671 
672 static int
stack_var_cmp(const void * a,const void * b)673 stack_var_cmp (const void *a, const void *b)
674 {
675   size_t ia = *(const size_t *)a;
676   size_t ib = *(const size_t *)b;
677   unsigned int aligna = stack_vars[ia].alignb;
678   unsigned int alignb = stack_vars[ib].alignb;
679   poly_int64 sizea = stack_vars[ia].size;
680   poly_int64 sizeb = stack_vars[ib].size;
681   tree decla = stack_vars[ia].decl;
682   tree declb = stack_vars[ib].decl;
683   bool largea, largeb;
684   unsigned int uida, uidb;
685 
686   /* Primary compare on "large" alignment.  Large comes first.  */
687   largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
688   largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
689   if (largea != largeb)
690     return (int)largeb - (int)largea;
691 
692   /* Secondary compare on size, decreasing  */
693   int diff = compare_sizes_for_sort (sizeb, sizea);
694   if (diff != 0)
695     return diff;
696 
697   /* Tertiary compare on true alignment, decreasing.  */
698   if (aligna < alignb)
699     return -1;
700   if (aligna > alignb)
701     return 1;
702 
703   /* Final compare on ID for sort stability, increasing.
704      Two SSA names are compared by their version, SSA names come before
705      non-SSA names, and two normal decls are compared by their DECL_UID.  */
706   if (TREE_CODE (decla) == SSA_NAME)
707     {
708       if (TREE_CODE (declb) == SSA_NAME)
709 	uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
710       else
711 	return -1;
712     }
713   else if (TREE_CODE (declb) == SSA_NAME)
714     return 1;
715   else
716     uida = DECL_UID (decla), uidb = DECL_UID (declb);
717   if (uida < uidb)
718     return 1;
719   if (uida > uidb)
720     return -1;
721   return 0;
722 }
723 
724 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
725 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
726 
727 /* If the points-to solution *PI points to variables that are in a partition
728    together with other variables add all partition members to the pointed-to
729    variables bitmap.  */
730 
731 static void
add_partitioned_vars_to_ptset(struct pt_solution * pt,part_hashmap * decls_to_partitions,hash_set<bitmap> * visited,bitmap temp)732 add_partitioned_vars_to_ptset (struct pt_solution *pt,
733 			       part_hashmap *decls_to_partitions,
734 			       hash_set<bitmap> *visited, bitmap temp)
735 {
736   bitmap_iterator bi;
737   unsigned i;
738   bitmap *part;
739 
740   if (pt->anything
741       || pt->vars == NULL
742       /* The pointed-to vars bitmap is shared, it is enough to
743 	 visit it once.  */
744       || visited->add (pt->vars))
745     return;
746 
747   bitmap_clear (temp);
748 
749   /* By using a temporary bitmap to store all members of the partitions
750      we have to add we make sure to visit each of the partitions only
751      once.  */
752   EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
753     if ((!temp
754 	 || !bitmap_bit_p (temp, i))
755 	&& (part = decls_to_partitions->get (i)))
756       bitmap_ior_into (temp, *part);
757   if (!bitmap_empty_p (temp))
758     bitmap_ior_into (pt->vars, temp);
759 }
760 
761 /* Update points-to sets based on partition info, so we can use them on RTL.
762    The bitmaps representing stack partitions will be saved until expand,
763    where partitioned decls used as bases in memory expressions will be
764    rewritten.  */
765 
766 static void
update_alias_info_with_stack_vars(void)767 update_alias_info_with_stack_vars (void)
768 {
769   part_hashmap *decls_to_partitions = NULL;
770   size_t i, j;
771   tree var = NULL_TREE;
772 
773   for (i = 0; i < stack_vars_num; i++)
774     {
775       bitmap part = NULL;
776       tree name;
777       struct ptr_info_def *pi;
778 
779       /* Not interested in partitions with single variable.  */
780       if (stack_vars[i].representative != i
781           || stack_vars[i].next == EOC)
782         continue;
783 
784       if (!decls_to_partitions)
785 	{
786 	  decls_to_partitions = new part_hashmap;
787 	  cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
788 	}
789 
790       /* Create an SSA_NAME that points to the partition for use
791          as base during alias-oracle queries on RTL for bases that
792 	 have been partitioned.  */
793       if (var == NULL_TREE)
794 	var = create_tmp_var (ptr_type_node);
795       name = make_ssa_name (var);
796 
797       /* Create bitmaps representing partitions.  They will be used for
798          points-to sets later, so use GGC alloc.  */
799       part = BITMAP_GGC_ALLOC ();
800       for (j = i; j != EOC; j = stack_vars[j].next)
801 	{
802 	  tree decl = stack_vars[j].decl;
803 	  unsigned int uid = DECL_PT_UID (decl);
804 	  bitmap_set_bit (part, uid);
805 	  decls_to_partitions->put (uid, part);
806 	  cfun->gimple_df->decls_to_pointers->put (decl, name);
807 	  if (TREE_ADDRESSABLE (decl))
808 	    TREE_ADDRESSABLE (name) = 1;
809 	}
810 
811       /* Make the SSA name point to all partition members.  */
812       pi = get_ptr_info (name);
813       pt_solution_set (&pi->pt, part, false);
814     }
815 
816   /* Make all points-to sets that contain one member of a partition
817      contain all members of the partition.  */
818   if (decls_to_partitions)
819     {
820       unsigned i;
821       tree name;
822       hash_set<bitmap> visited;
823       bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
824 
825       FOR_EACH_SSA_NAME (i, name, cfun)
826 	{
827 	  struct ptr_info_def *pi;
828 
829 	  if (POINTER_TYPE_P (TREE_TYPE (name))
830 	      && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
831 	    add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
832 					   &visited, temp);
833 	}
834 
835       add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
836 				     decls_to_partitions, &visited, temp);
837 
838       delete decls_to_partitions;
839       BITMAP_FREE (temp);
840     }
841 }
842 
843 /* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
844    partitioning algorithm.  Partitions A and B are known to be non-conflicting.
845    Merge them into a single partition A.  */
846 
847 static void
union_stack_vars(size_t a,size_t b)848 union_stack_vars (size_t a, size_t b)
849 {
850   struct stack_var *vb = &stack_vars[b];
851   bitmap_iterator bi;
852   unsigned u;
853 
854   gcc_assert (stack_vars[b].next == EOC);
855    /* Add B to A's partition.  */
856   stack_vars[b].next = stack_vars[a].next;
857   stack_vars[b].representative = a;
858   stack_vars[a].next = b;
859 
860   /* Update the required alignment of partition A to account for B.  */
861   if (stack_vars[a].alignb < stack_vars[b].alignb)
862     stack_vars[a].alignb = stack_vars[b].alignb;
863 
864   /* Update the interference graph and merge the conflicts.  */
865   if (vb->conflicts)
866     {
867       EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
868 	add_stack_var_conflict (a, stack_vars[u].representative);
869       BITMAP_FREE (vb->conflicts);
870     }
871 }
872 
873 /* A subroutine of expand_used_vars.  Binpack the variables into
874    partitions constrained by the interference graph.  The overall
875    algorithm used is as follows:
876 
877 	Sort the objects by size in descending order.
878 	For each object A {
879 	  S = size(A)
880 	  O = 0
881 	  loop {
882 	    Look for the largest non-conflicting object B with size <= S.
883 	    UNION (A, B)
884 	  }
885 	}
886 */
887 
888 static void
partition_stack_vars(void)889 partition_stack_vars (void)
890 {
891   size_t si, sj, n = stack_vars_num;
892 
893   stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
894   for (si = 0; si < n; ++si)
895     stack_vars_sorted[si] = si;
896 
897   if (n == 1)
898     return;
899 
900   qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
901 
902   for (si = 0; si < n; ++si)
903     {
904       size_t i = stack_vars_sorted[si];
905       unsigned int ialign = stack_vars[i].alignb;
906       poly_int64 isize = stack_vars[i].size;
907 
908       /* Ignore objects that aren't partition representatives. If we
909          see a var that is not a partition representative, it must
910          have been merged earlier.  */
911       if (stack_vars[i].representative != i)
912         continue;
913 
914       for (sj = si + 1; sj < n; ++sj)
915 	{
916 	  size_t j = stack_vars_sorted[sj];
917 	  unsigned int jalign = stack_vars[j].alignb;
918 	  poly_int64 jsize = stack_vars[j].size;
919 
920 	  /* Ignore objects that aren't partition representatives.  */
921 	  if (stack_vars[j].representative != j)
922 	    continue;
923 
924 	  /* Do not mix objects of "small" (supported) alignment
925 	     and "large" (unsupported) alignment.  */
926 	  if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
927 	      != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
928 	    break;
929 
930 	  /* For Address Sanitizer do not mix objects with different
931 	     sizes, as the shorter vars wouldn't be adequately protected.
932 	     Don't do that for "large" (unsupported) alignment objects,
933 	     those aren't protected anyway.  */
934 	  if (asan_sanitize_stack_p ()
935 	      && maybe_ne (isize, jsize)
936 	      && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 	    break;
938 
939 	  /* Ignore conflicting objects.  */
940 	  if (stack_var_conflict_p (i, j))
941 	    continue;
942 
943 	  /* UNION the objects, placing J at OFFSET.  */
944 	  union_stack_vars (i, j);
945 	}
946     }
947 
948   update_alias_info_with_stack_vars ();
949 }
950 
951 /* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
952 
953 static void
dump_stack_var_partition(void)954 dump_stack_var_partition (void)
955 {
956   size_t si, i, j, n = stack_vars_num;
957 
958   for (si = 0; si < n; ++si)
959     {
960       i = stack_vars_sorted[si];
961 
962       /* Skip variables that aren't partition representatives, for now.  */
963       if (stack_vars[i].representative != i)
964 	continue;
965 
966       fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
967       print_dec (stack_vars[i].size, dump_file);
968       fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
969 
970       for (j = i; j != EOC; j = stack_vars[j].next)
971 	{
972 	  fputc ('\t', dump_file);
973 	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
974 	}
975       fputc ('\n', dump_file);
976     }
977 }
978 
979 /* Assign rtl to DECL at BASE + OFFSET.  */
980 
981 static void
expand_one_stack_var_at(tree decl,rtx base,unsigned base_align,poly_int64 offset)982 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
983 			 poly_int64 offset)
984 {
985   unsigned align;
986   rtx x;
987 
988   /* If this fails, we've overflowed the stack frame.  Error nicely?  */
989   gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
990 
991   x = plus_constant (Pmode, base, offset);
992   x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
993 		   ? TYPE_MODE (TREE_TYPE (decl))
994 		   : DECL_MODE (SSAVAR (decl)), x);
995 
996   if (TREE_CODE (decl) != SSA_NAME)
997     {
998       /* Set alignment we actually gave this decl if it isn't an SSA name.
999          If it is we generate stack slots only accidentally so it isn't as
1000 	 important, we'll simply use the alignment that is already set.  */
1001       if (base == virtual_stack_vars_rtx)
1002 	offset -= frame_phase;
1003       align = known_alignment (offset);
1004       align *= BITS_PER_UNIT;
1005       if (align == 0 || align > base_align)
1006 	align = base_align;
1007 
1008       /* One would think that we could assert that we're not decreasing
1009 	 alignment here, but (at least) the i386 port does exactly this
1010 	 via the MINIMUM_ALIGNMENT hook.  */
1011 
1012       SET_DECL_ALIGN (decl, align);
1013       DECL_USER_ALIGN (decl) = 0;
1014     }
1015 
1016   set_rtl (decl, x);
1017 }
1018 
1019 struct stack_vars_data
1020 {
1021   /* Vector of offset pairs, always end of some padding followed
1022      by start of the padding that needs Address Sanitizer protection.
1023      The vector is in reversed, highest offset pairs come first.  */
1024   auto_vec<HOST_WIDE_INT> asan_vec;
1025 
1026   /* Vector of partition representative decls in between the paddings.  */
1027   auto_vec<tree> asan_decl_vec;
1028 
1029   /* Base pseudo register for Address Sanitizer protected automatic vars.  */
1030   rtx asan_base;
1031 
1032   /* Alignment needed for the Address Sanitizer protected automatic vars.  */
1033   unsigned int asan_alignb;
1034 };
1035 
1036 /* A subroutine of expand_used_vars.  Give each partition representative
1037    a unique location within the stack frame.  Update each partition member
1038    with that location.  */
1039 
1040 static void
expand_stack_vars(bool (* pred)(size_t),struct stack_vars_data * data)1041 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1042 {
1043   size_t si, i, j, n = stack_vars_num;
1044   poly_uint64 large_size = 0, large_alloc = 0;
1045   rtx large_base = NULL;
1046   unsigned large_align = 0;
1047   bool large_allocation_done = false;
1048   tree decl;
1049 
1050   /* Determine if there are any variables requiring "large" alignment.
1051      Since these are dynamically allocated, we only process these if
1052      no predicate involved.  */
1053   large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1054   if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1055     {
1056       /* Find the total size of these variables.  */
1057       for (si = 0; si < n; ++si)
1058 	{
1059 	  unsigned alignb;
1060 
1061 	  i = stack_vars_sorted[si];
1062 	  alignb = stack_vars[i].alignb;
1063 
1064 	  /* All "large" alignment decls come before all "small" alignment
1065 	     decls, but "large" alignment decls are not sorted based on
1066 	     their alignment.  Increase large_align to track the largest
1067 	     required alignment.  */
1068 	  if ((alignb * BITS_PER_UNIT) > large_align)
1069 	    large_align = alignb * BITS_PER_UNIT;
1070 
1071 	  /* Stop when we get to the first decl with "small" alignment.  */
1072 	  if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1073 	    break;
1074 
1075 	  /* Skip variables that aren't partition representatives.  */
1076 	  if (stack_vars[i].representative != i)
1077 	    continue;
1078 
1079 	  /* Skip variables that have already had rtl assigned.  See also
1080 	     add_stack_var where we perpetrate this pc_rtx hack.  */
1081 	  decl = stack_vars[i].decl;
1082 	  if (TREE_CODE (decl) == SSA_NAME
1083 	      ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1084 	      : DECL_RTL (decl) != pc_rtx)
1085 	    continue;
1086 
1087 	  large_size = aligned_upper_bound (large_size, alignb);
1088 	  large_size += stack_vars[i].size;
1089 	}
1090     }
1091 
1092   for (si = 0; si < n; ++si)
1093     {
1094       rtx base;
1095       unsigned base_align, alignb;
1096       poly_int64 offset;
1097 
1098       i = stack_vars_sorted[si];
1099 
1100       /* Skip variables that aren't partition representatives, for now.  */
1101       if (stack_vars[i].representative != i)
1102 	continue;
1103 
1104       /* Skip variables that have already had rtl assigned.  See also
1105 	 add_stack_var where we perpetrate this pc_rtx hack.  */
1106       decl = stack_vars[i].decl;
1107       if (TREE_CODE (decl) == SSA_NAME
1108 	  ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1109 	  : DECL_RTL (decl) != pc_rtx)
1110 	continue;
1111 
1112       /* Check the predicate to see whether this variable should be
1113 	 allocated in this pass.  */
1114       if (pred && !pred (i))
1115 	continue;
1116 
1117       alignb = stack_vars[i].alignb;
1118       if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1119 	{
1120 	  base = virtual_stack_vars_rtx;
1121 	  /* ASAN description strings don't yet have a syntax for expressing
1122 	     polynomial offsets.  */
1123 	  HOST_WIDE_INT prev_offset;
1124 	  if (asan_sanitize_stack_p ()
1125 	      && pred
1126 	      && frame_offset.is_constant (&prev_offset)
1127 	      && stack_vars[i].size.is_constant ())
1128 	    {
1129 	      prev_offset = align_base (prev_offset,
1130 					MAX (alignb, ASAN_RED_ZONE_SIZE),
1131 					!FRAME_GROWS_DOWNWARD);
1132 	      tree repr_decl = NULL_TREE;
1133 	      offset
1134 		= alloc_stack_frame_space (stack_vars[i].size
1135 					   + ASAN_RED_ZONE_SIZE,
1136 					   MAX (alignb, ASAN_RED_ZONE_SIZE));
1137 
1138 	      data->asan_vec.safe_push (prev_offset);
1139 	      /* Allocating a constant amount of space from a constant
1140 		 starting offset must give a constant result.  */
1141 	      data->asan_vec.safe_push ((offset + stack_vars[i].size)
1142 					.to_constant ());
1143 	      /* Find best representative of the partition.
1144 		 Prefer those with DECL_NAME, even better
1145 		 satisfying asan_protect_stack_decl predicate.  */
1146 	      for (j = i; j != EOC; j = stack_vars[j].next)
1147 		if (asan_protect_stack_decl (stack_vars[j].decl)
1148 		    && DECL_NAME (stack_vars[j].decl))
1149 		  {
1150 		    repr_decl = stack_vars[j].decl;
1151 		    break;
1152 		  }
1153 		else if (repr_decl == NULL_TREE
1154 			 && DECL_P (stack_vars[j].decl)
1155 			 && DECL_NAME (stack_vars[j].decl))
1156 		  repr_decl = stack_vars[j].decl;
1157 	      if (repr_decl == NULL_TREE)
1158 		repr_decl = stack_vars[i].decl;
1159 	      data->asan_decl_vec.safe_push (repr_decl);
1160 
1161 	      /* Make sure a representative is unpoison if another
1162 		 variable in the partition is handled by
1163 		 use-after-scope sanitization.  */
1164 	      if (asan_handled_variables != NULL
1165 		  && !asan_handled_variables->contains (repr_decl))
1166 		{
1167 		  for (j = i; j != EOC; j = stack_vars[j].next)
1168 		    if (asan_handled_variables->contains (stack_vars[j].decl))
1169 		      break;
1170 		  if (j != EOC)
1171 		    asan_handled_variables->add (repr_decl);
1172 		}
1173 
1174 	      data->asan_alignb = MAX (data->asan_alignb, alignb);
1175 	      if (data->asan_base == NULL)
1176 		data->asan_base = gen_reg_rtx (Pmode);
1177 	      base = data->asan_base;
1178 
1179 	      if (!STRICT_ALIGNMENT)
1180 		base_align = crtl->max_used_stack_slot_alignment;
1181 	      else
1182 		base_align = MAX (crtl->max_used_stack_slot_alignment,
1183 				  GET_MODE_ALIGNMENT (SImode)
1184 				  << ASAN_SHADOW_SHIFT);
1185 	    }
1186 	  else
1187 	    {
1188 	      offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1189 	      base_align = crtl->max_used_stack_slot_alignment;
1190 	    }
1191 	}
1192       else
1193 	{
1194 	  /* Large alignment is only processed in the last pass.  */
1195 	  if (pred)
1196 	    continue;
1197 
1198 	  /* If there were any variables requiring "large" alignment, allocate
1199 	     space.  */
1200 	  if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1201 	    {
1202 	      poly_int64 loffset;
1203 	      rtx large_allocsize;
1204 
1205 	      large_allocsize = gen_int_mode (large_size, Pmode);
1206 	      get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1207 	      loffset = alloc_stack_frame_space
1208 		(rtx_to_poly_int64 (large_allocsize),
1209 		 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1210 	      large_base = get_dynamic_stack_base (loffset, large_align);
1211 	      large_allocation_done = true;
1212 	    }
1213 	  gcc_assert (large_base != NULL);
1214 
1215 	  large_alloc = aligned_upper_bound (large_alloc, alignb);
1216 	  offset = large_alloc;
1217 	  large_alloc += stack_vars[i].size;
1218 
1219 	  base = large_base;
1220 	  base_align = large_align;
1221 	}
1222 
1223       /* Create rtl for each variable based on their location within the
1224 	 partition.  */
1225       for (j = i; j != EOC; j = stack_vars[j].next)
1226 	{
1227 	  expand_one_stack_var_at (stack_vars[j].decl,
1228 				   base, base_align,
1229 				   offset);
1230 	}
1231     }
1232 
1233   gcc_assert (known_eq (large_alloc, large_size));
1234 }
1235 
1236 /* Take into account all sizes of partitions and reset DECL_RTLs.  */
1237 static poly_uint64
account_stack_vars(void)1238 account_stack_vars (void)
1239 {
1240   size_t si, j, i, n = stack_vars_num;
1241   poly_uint64 size = 0;
1242 
1243   for (si = 0; si < n; ++si)
1244     {
1245       i = stack_vars_sorted[si];
1246 
1247       /* Skip variables that aren't partition representatives, for now.  */
1248       if (stack_vars[i].representative != i)
1249 	continue;
1250 
1251       size += stack_vars[i].size;
1252       for (j = i; j != EOC; j = stack_vars[j].next)
1253 	set_rtl (stack_vars[j].decl, NULL);
1254     }
1255   return size;
1256 }
1257 
1258 /* Record the RTL assignment X for the default def of PARM.  */
1259 
1260 extern void
set_parm_rtl(tree parm,rtx x)1261 set_parm_rtl (tree parm, rtx x)
1262 {
1263   gcc_assert (TREE_CODE (parm) == PARM_DECL
1264 	      || TREE_CODE (parm) == RESULT_DECL);
1265 
1266   if (x && !MEM_P (x))
1267     {
1268       unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1269 					      TYPE_MODE (TREE_TYPE (parm)),
1270 					      TYPE_ALIGN (TREE_TYPE (parm)));
1271 
1272       /* If the variable alignment is very large we'll dynamicaly
1273 	 allocate it, which means that in-frame portion is just a
1274 	 pointer.  ??? We've got a pseudo for sure here, do we
1275 	 actually dynamically allocate its spilling area if needed?
1276 	 ??? Isn't it a problem when Pmode alignment also exceeds
1277 	 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32?  */
1278       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1279 	align = GET_MODE_ALIGNMENT (Pmode);
1280 
1281       record_alignment_for_reg_var (align);
1282     }
1283 
1284   tree ssa = ssa_default_def (cfun, parm);
1285   if (!ssa)
1286     return set_rtl (parm, x);
1287 
1288   int part = var_to_partition (SA.map, ssa);
1289   gcc_assert (part != NO_PARTITION);
1290 
1291   bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1292   gcc_assert (changed);
1293 
1294   set_rtl (ssa, x);
1295   gcc_assert (DECL_RTL (parm) == x);
1296 }
1297 
1298 /* A subroutine of expand_one_var.  Called to immediately assign rtl
1299    to a variable to be allocated in the stack frame.  */
1300 
1301 static void
expand_one_stack_var_1(tree var)1302 expand_one_stack_var_1 (tree var)
1303 {
1304   poly_uint64 size;
1305   poly_int64 offset;
1306   unsigned byte_align;
1307 
1308   if (TREE_CODE (var) == SSA_NAME)
1309     {
1310       tree type = TREE_TYPE (var);
1311       size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1312       byte_align = TYPE_ALIGN_UNIT (type);
1313     }
1314   else
1315     {
1316       size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1317       byte_align = align_local_variable (var);
1318     }
1319 
1320   /* We handle highly aligned variables in expand_stack_vars.  */
1321   gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1322 
1323   offset = alloc_stack_frame_space (size, byte_align);
1324 
1325   expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1326 			   crtl->max_used_stack_slot_alignment, offset);
1327 }
1328 
1329 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1330    already assigned some MEM.  */
1331 
1332 static void
expand_one_stack_var(tree var)1333 expand_one_stack_var (tree var)
1334 {
1335   if (TREE_CODE (var) == SSA_NAME)
1336     {
1337       int part = var_to_partition (SA.map, var);
1338       if (part != NO_PARTITION)
1339 	{
1340 	  rtx x = SA.partition_to_pseudo[part];
1341 	  gcc_assert (x);
1342 	  gcc_assert (MEM_P (x));
1343 	  return;
1344 	}
1345     }
1346 
1347   return expand_one_stack_var_1 (var);
1348 }
1349 
1350 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1351    that will reside in a hard register.  */
1352 
1353 static void
expand_one_hard_reg_var(tree var)1354 expand_one_hard_reg_var (tree var)
1355 {
1356   rest_of_decl_compilation (var, 0, 0);
1357 }
1358 
1359 /* Record the alignment requirements of some variable assigned to a
1360    pseudo.  */
1361 
1362 static void
record_alignment_for_reg_var(unsigned int align)1363 record_alignment_for_reg_var (unsigned int align)
1364 {
1365   if (SUPPORTS_STACK_ALIGNMENT
1366       && crtl->stack_alignment_estimated < align)
1367     {
1368       /* stack_alignment_estimated shouldn't change after stack
1369          realign decision made */
1370       gcc_assert (!crtl->stack_realign_processed);
1371       crtl->stack_alignment_estimated = align;
1372     }
1373 
1374   /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1375      So here we only make sure stack_alignment_needed >= align.  */
1376   if (crtl->stack_alignment_needed < align)
1377     crtl->stack_alignment_needed = align;
1378   if (crtl->max_used_stack_slot_alignment < align)
1379     crtl->max_used_stack_slot_alignment = align;
1380 }
1381 
1382 /* Create RTL for an SSA partition.  */
1383 
1384 static void
expand_one_ssa_partition(tree var)1385 expand_one_ssa_partition (tree var)
1386 {
1387   int part = var_to_partition (SA.map, var);
1388   gcc_assert (part != NO_PARTITION);
1389 
1390   if (SA.partition_to_pseudo[part])
1391     return;
1392 
1393   unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1394 					  TYPE_MODE (TREE_TYPE (var)),
1395 					  TYPE_ALIGN (TREE_TYPE (var)));
1396 
1397   /* If the variable alignment is very large we'll dynamicaly allocate
1398      it, which means that in-frame portion is just a pointer.  */
1399   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1400     align = GET_MODE_ALIGNMENT (Pmode);
1401 
1402   record_alignment_for_reg_var (align);
1403 
1404   if (!use_register_for_decl (var))
1405     {
1406       if (defer_stack_allocation (var, true))
1407 	add_stack_var (var);
1408       else
1409 	expand_one_stack_var_1 (var);
1410       return;
1411     }
1412 
1413   machine_mode reg_mode = promote_ssa_mode (var, NULL);
1414   rtx x = gen_reg_rtx (reg_mode);
1415 
1416   set_rtl (var, x);
1417 
1418   /* For a promoted variable, X will not be used directly but wrapped in a
1419      SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1420      will assume that its upper bits can be inferred from its lower bits.
1421      Therefore, if X isn't initialized on every path from the entry, then
1422      we must do it manually in order to fulfill the above assumption.  */
1423   if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1424       && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1425     emit_move_insn (x, CONST0_RTX (reg_mode));
1426 }
1427 
1428 /* Record the association between the RTL generated for partition PART
1429    and the underlying variable of the SSA_NAME VAR.  */
1430 
1431 static void
adjust_one_expanded_partition_var(tree var)1432 adjust_one_expanded_partition_var (tree var)
1433 {
1434   if (!var)
1435     return;
1436 
1437   tree decl = SSA_NAME_VAR (var);
1438 
1439   int part = var_to_partition (SA.map, var);
1440   if (part == NO_PARTITION)
1441     return;
1442 
1443   rtx x = SA.partition_to_pseudo[part];
1444 
1445   gcc_assert (x);
1446 
1447   set_rtl (var, x);
1448 
1449   if (!REG_P (x))
1450     return;
1451 
1452   /* Note if the object is a user variable.  */
1453   if (decl && !DECL_ARTIFICIAL (decl))
1454     mark_user_reg (x);
1455 
1456   if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1457     mark_reg_pointer (x, get_pointer_alignment (var));
1458 }
1459 
1460 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1461    that will reside in a pseudo register.  */
1462 
1463 static void
expand_one_register_var(tree var)1464 expand_one_register_var (tree var)
1465 {
1466   if (TREE_CODE (var) == SSA_NAME)
1467     {
1468       int part = var_to_partition (SA.map, var);
1469       if (part != NO_PARTITION)
1470 	{
1471 	  rtx x = SA.partition_to_pseudo[part];
1472 	  gcc_assert (x);
1473 	  gcc_assert (REG_P (x));
1474 	  return;
1475 	}
1476       gcc_unreachable ();
1477     }
1478 
1479   tree decl = var;
1480   tree type = TREE_TYPE (decl);
1481   machine_mode reg_mode = promote_decl_mode (decl, NULL);
1482   rtx x = gen_reg_rtx (reg_mode);
1483 
1484   set_rtl (var, x);
1485 
1486   /* Note if the object is a user variable.  */
1487   if (!DECL_ARTIFICIAL (decl))
1488     mark_user_reg (x);
1489 
1490   if (POINTER_TYPE_P (type))
1491     mark_reg_pointer (x, get_pointer_alignment (var));
1492 }
1493 
1494 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
1495    has some associated error, e.g. its type is error-mark.  We just need
1496    to pick something that won't crash the rest of the compiler.  */
1497 
1498 static void
expand_one_error_var(tree var)1499 expand_one_error_var (tree var)
1500 {
1501   machine_mode mode = DECL_MODE (var);
1502   rtx x;
1503 
1504   if (mode == BLKmode)
1505     x = gen_rtx_MEM (BLKmode, const0_rtx);
1506   else if (mode == VOIDmode)
1507     x = const0_rtx;
1508   else
1509     x = gen_reg_rtx (mode);
1510 
1511   SET_DECL_RTL (var, x);
1512 }
1513 
1514 /* A subroutine of expand_one_var.  VAR is a variable that will be
1515    allocated to the local stack frame.  Return true if we wish to
1516    add VAR to STACK_VARS so that it will be coalesced with other
1517    variables.  Return false to allocate VAR immediately.
1518 
1519    This function is used to reduce the number of variables considered
1520    for coalescing, which reduces the size of the quadratic problem.  */
1521 
1522 static bool
defer_stack_allocation(tree var,bool toplevel)1523 defer_stack_allocation (tree var, bool toplevel)
1524 {
1525   tree size_unit = TREE_CODE (var) == SSA_NAME
1526     ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1527     : DECL_SIZE_UNIT (var);
1528   poly_uint64 size;
1529 
1530   /* Whether the variable is small enough for immediate allocation not to be
1531      a problem with regard to the frame size.  */
1532   bool smallish
1533     = (poly_int_tree_p (size_unit, &size)
1534        && (estimated_poly_value (size)
1535 	   < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
1536 
1537   /* If stack protection is enabled, *all* stack variables must be deferred,
1538      so that we can re-order the strings to the top of the frame.
1539      Similarly for Address Sanitizer.  */
1540   if (flag_stack_protect || asan_sanitize_stack_p ())
1541     return true;
1542 
1543   unsigned int align = TREE_CODE (var) == SSA_NAME
1544     ? TYPE_ALIGN (TREE_TYPE (var))
1545     : DECL_ALIGN (var);
1546 
1547   /* We handle "large" alignment via dynamic allocation.  We want to handle
1548      this extra complication in only one place, so defer them.  */
1549   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1550     return true;
1551 
1552   bool ignored = TREE_CODE (var) == SSA_NAME
1553     ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1554     : DECL_IGNORED_P (var);
1555 
1556   /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1557      might be detached from their block and appear at toplevel when we reach
1558      here.  We want to coalesce them with variables from other blocks when
1559      the immediate contribution to the frame size would be noticeable.  */
1560   if (toplevel && optimize > 0 && ignored && !smallish)
1561     return true;
1562 
1563   /* Variables declared in the outermost scope automatically conflict
1564      with every other variable.  The only reason to want to defer them
1565      at all is that, after sorting, we can more efficiently pack
1566      small variables in the stack frame.  Continue to defer at -O2.  */
1567   if (toplevel && optimize < 2)
1568     return false;
1569 
1570   /* Without optimization, *most* variables are allocated from the
1571      stack, which makes the quadratic problem large exactly when we
1572      want compilation to proceed as quickly as possible.  On the
1573      other hand, we don't want the function's stack frame size to
1574      get completely out of hand.  So we avoid adding scalars and
1575      "small" aggregates to the list at all.  */
1576   if (optimize == 0 && smallish)
1577     return false;
1578 
1579   return true;
1580 }
1581 
1582 /* A subroutine of expand_used_vars.  Expand one variable according to
1583    its flavor.  Variables to be placed on the stack are not actually
1584    expanded yet, merely recorded.
1585    When REALLY_EXPAND is false, only add stack values to be allocated.
1586    Return stack usage this variable is supposed to take.
1587 */
1588 
1589 static poly_uint64
expand_one_var(tree var,bool toplevel,bool really_expand)1590 expand_one_var (tree var, bool toplevel, bool really_expand)
1591 {
1592   unsigned int align = BITS_PER_UNIT;
1593   tree origvar = var;
1594 
1595   var = SSAVAR (var);
1596 
1597   if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1598     {
1599       if (is_global_var (var))
1600 	return 0;
1601 
1602       /* Because we don't know if VAR will be in register or on stack,
1603 	 we conservatively assume it will be on stack even if VAR is
1604 	 eventually put into register after RA pass.  For non-automatic
1605 	 variables, which won't be on stack, we collect alignment of
1606 	 type and ignore user specified alignment.  Similarly for
1607 	 SSA_NAMEs for which use_register_for_decl returns true.  */
1608       if (TREE_STATIC (var)
1609 	  || DECL_EXTERNAL (var)
1610 	  || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1611 	align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1612 				   TYPE_MODE (TREE_TYPE (var)),
1613 				   TYPE_ALIGN (TREE_TYPE (var)));
1614       else if (DECL_HAS_VALUE_EXPR_P (var)
1615 	       || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1616 	/* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1617 	   or variables which were assigned a stack slot already by
1618 	   expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1619 	   changed from the offset chosen to it.  */
1620 	align = crtl->stack_alignment_estimated;
1621       else
1622 	align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1623 
1624       /* If the variable alignment is very large we'll dynamicaly allocate
1625 	 it, which means that in-frame portion is just a pointer.  */
1626       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1627 	align = GET_MODE_ALIGNMENT (Pmode);
1628     }
1629 
1630   record_alignment_for_reg_var (align);
1631 
1632   poly_uint64 size;
1633   if (TREE_CODE (origvar) == SSA_NAME)
1634     {
1635       gcc_assert (!VAR_P (var)
1636 		  || (!DECL_EXTERNAL (var)
1637 		      && !DECL_HAS_VALUE_EXPR_P (var)
1638 		      && !TREE_STATIC (var)
1639 		      && TREE_TYPE (var) != error_mark_node
1640 		      && !DECL_HARD_REGISTER (var)
1641 		      && really_expand));
1642     }
1643   if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1644     ;
1645   else if (DECL_EXTERNAL (var))
1646     ;
1647   else if (DECL_HAS_VALUE_EXPR_P (var))
1648     ;
1649   else if (TREE_STATIC (var))
1650     ;
1651   else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1652     ;
1653   else if (TREE_TYPE (var) == error_mark_node)
1654     {
1655       if (really_expand)
1656         expand_one_error_var (var);
1657     }
1658   else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1659     {
1660       if (really_expand)
1661 	{
1662 	  expand_one_hard_reg_var (var);
1663 	  if (!DECL_HARD_REGISTER (var))
1664 	    /* Invalid register specification.  */
1665 	    expand_one_error_var (var);
1666 	}
1667     }
1668   else if (use_register_for_decl (var))
1669     {
1670       if (really_expand)
1671         expand_one_register_var (origvar);
1672     }
1673   else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1674 	   || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1675     {
1676       /* Reject variables which cover more than half of the address-space.  */
1677       if (really_expand)
1678 	{
1679 	  error ("size of variable %q+D is too large", var);
1680 	  expand_one_error_var (var);
1681 	}
1682     }
1683   else if (defer_stack_allocation (var, toplevel))
1684     add_stack_var (origvar);
1685   else
1686     {
1687       if (really_expand)
1688         {
1689           if (lookup_attribute ("naked",
1690                                 DECL_ATTRIBUTES (current_function_decl)))
1691             error ("cannot allocate stack for variable %q+D, naked function.",
1692                    var);
1693 
1694           expand_one_stack_var (origvar);
1695         }
1696       return size;
1697     }
1698   return 0;
1699 }
1700 
1701 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1702    expanding variables.  Those variables that can be put into registers
1703    are allocated pseudos; those that can't are put on the stack.
1704 
1705    TOPLEVEL is true if this is the outermost BLOCK.  */
1706 
1707 static void
expand_used_vars_for_block(tree block,bool toplevel)1708 expand_used_vars_for_block (tree block, bool toplevel)
1709 {
1710   tree t;
1711 
1712   /* Expand all variables at this level.  */
1713   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1714     if (TREE_USED (t)
1715         && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1716 	    || !DECL_NONSHAREABLE (t)))
1717       expand_one_var (t, toplevel, true);
1718 
1719   /* Expand all variables at containing levels.  */
1720   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1721     expand_used_vars_for_block (t, false);
1722 }
1723 
1724 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1725    and clear TREE_USED on all local variables.  */
1726 
1727 static void
clear_tree_used(tree block)1728 clear_tree_used (tree block)
1729 {
1730   tree t;
1731 
1732   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1733     /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1734     if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1735 	|| !DECL_NONSHAREABLE (t))
1736       TREE_USED (t) = 0;
1737 
1738   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1739     clear_tree_used (t);
1740 }
1741 
1742 enum {
1743   SPCT_FLAG_DEFAULT = 1,
1744   SPCT_FLAG_ALL = 2,
1745   SPCT_FLAG_STRONG = 3,
1746   SPCT_FLAG_EXPLICIT = 4
1747 };
1748 
1749 /* Examine TYPE and determine a bit mask of the following features.  */
1750 
1751 #define SPCT_HAS_LARGE_CHAR_ARRAY	1
1752 #define SPCT_HAS_SMALL_CHAR_ARRAY	2
1753 #define SPCT_HAS_ARRAY			4
1754 #define SPCT_HAS_AGGREGATE		8
1755 
1756 static unsigned int
stack_protect_classify_type(tree type)1757 stack_protect_classify_type (tree type)
1758 {
1759   unsigned int ret = 0;
1760   tree t;
1761 
1762   switch (TREE_CODE (type))
1763     {
1764     case ARRAY_TYPE:
1765       t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1766       if (t == char_type_node
1767 	  || t == signed_char_type_node
1768 	  || t == unsigned_char_type_node)
1769 	{
1770 	  unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1771 	  unsigned HOST_WIDE_INT len;
1772 
1773 	  if (!TYPE_SIZE_UNIT (type)
1774 	      || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1775 	    len = max;
1776 	  else
1777 	    len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1778 
1779 	  if (len < max)
1780 	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1781 	  else
1782 	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1783 	}
1784       else
1785 	ret = SPCT_HAS_ARRAY;
1786       break;
1787 
1788     case UNION_TYPE:
1789     case QUAL_UNION_TYPE:
1790     case RECORD_TYPE:
1791       ret = SPCT_HAS_AGGREGATE;
1792       for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1793 	if (TREE_CODE (t) == FIELD_DECL)
1794 	  ret |= stack_protect_classify_type (TREE_TYPE (t));
1795       break;
1796 
1797     default:
1798       break;
1799     }
1800 
1801   return ret;
1802 }
1803 
1804 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1805    part of the local stack frame.  Remember if we ever return nonzero for
1806    any variable in this function.  The return value is the phase number in
1807    which the variable should be allocated.  */
1808 
1809 static int
stack_protect_decl_phase(tree decl)1810 stack_protect_decl_phase (tree decl)
1811 {
1812   unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1813   int ret = 0;
1814 
1815   if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1816     has_short_buffer = true;
1817 
1818   if (flag_stack_protect == SPCT_FLAG_ALL
1819       || flag_stack_protect == SPCT_FLAG_STRONG
1820       || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1821 	  && lookup_attribute ("stack_protect",
1822 			       DECL_ATTRIBUTES (current_function_decl))))
1823     {
1824       if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1825 	  && !(bits & SPCT_HAS_AGGREGATE))
1826 	ret = 1;
1827       else if (bits & SPCT_HAS_ARRAY)
1828 	ret = 2;
1829     }
1830   else
1831     ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1832 
1833   if (ret)
1834     has_protected_decls = true;
1835 
1836   return ret;
1837 }
1838 
1839 /* Two helper routines that check for phase 1 and phase 2.  These are used
1840    as callbacks for expand_stack_vars.  */
1841 
1842 static bool
stack_protect_decl_phase_1(size_t i)1843 stack_protect_decl_phase_1 (size_t i)
1844 {
1845   return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1846 }
1847 
1848 static bool
stack_protect_decl_phase_2(size_t i)1849 stack_protect_decl_phase_2 (size_t i)
1850 {
1851   return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1852 }
1853 
1854 /* And helper function that checks for asan phase (with stack protector
1855    it is phase 3).  This is used as callback for expand_stack_vars.
1856    Returns true if any of the vars in the partition need to be protected.  */
1857 
1858 static bool
asan_decl_phase_3(size_t i)1859 asan_decl_phase_3 (size_t i)
1860 {
1861   while (i != EOC)
1862     {
1863       if (asan_protect_stack_decl (stack_vars[i].decl))
1864 	return true;
1865       i = stack_vars[i].next;
1866     }
1867   return false;
1868 }
1869 
1870 /* Ensure that variables in different stack protection phases conflict
1871    so that they are not merged and share the same stack slot.  */
1872 
1873 static void
add_stack_protection_conflicts(void)1874 add_stack_protection_conflicts (void)
1875 {
1876   size_t i, j, n = stack_vars_num;
1877   unsigned char *phase;
1878 
1879   phase = XNEWVEC (unsigned char, n);
1880   for (i = 0; i < n; ++i)
1881     phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1882 
1883   for (i = 0; i < n; ++i)
1884     {
1885       unsigned char ph_i = phase[i];
1886       for (j = i + 1; j < n; ++j)
1887 	if (ph_i != phase[j])
1888 	  add_stack_var_conflict (i, j);
1889     }
1890 
1891   XDELETEVEC (phase);
1892 }
1893 
1894 /* Create a decl for the guard at the top of the stack frame.  */
1895 
1896 static void
create_stack_guard(void)1897 create_stack_guard (void)
1898 {
1899   tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1900 			   VAR_DECL, NULL, ptr_type_node);
1901   TREE_THIS_VOLATILE (guard) = 1;
1902   TREE_USED (guard) = 1;
1903   expand_one_stack_var (guard);
1904   crtl->stack_protect_guard = guard;
1905 }
1906 
1907 /* Prepare for expanding variables.  */
1908 static void
init_vars_expansion(void)1909 init_vars_expansion (void)
1910 {
1911   /* Conflict bitmaps, and a few related temporary bitmaps, go here.  */
1912   bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1913 
1914   /* A map from decl to stack partition.  */
1915   decl_to_stack_part = new hash_map<tree, size_t>;
1916 
1917   /* Initialize local stack smashing state.  */
1918   has_protected_decls = false;
1919   has_short_buffer = false;
1920 }
1921 
1922 /* Free up stack variable graph data.  */
1923 static void
fini_vars_expansion(void)1924 fini_vars_expansion (void)
1925 {
1926   bitmap_obstack_release (&stack_var_bitmap_obstack);
1927   if (stack_vars)
1928     XDELETEVEC (stack_vars);
1929   if (stack_vars_sorted)
1930     XDELETEVEC (stack_vars_sorted);
1931   stack_vars = NULL;
1932   stack_vars_sorted = NULL;
1933   stack_vars_alloc = stack_vars_num = 0;
1934   delete decl_to_stack_part;
1935   decl_to_stack_part = NULL;
1936 }
1937 
1938 /* Make a fair guess for the size of the stack frame of the function
1939    in NODE.  This doesn't have to be exact, the result is only used in
1940    the inline heuristics.  So we don't want to run the full stack var
1941    packing algorithm (which is quadratic in the number of stack vars).
1942    Instead, we calculate the total size of all stack vars.  This turns
1943    out to be a pretty fair estimate -- packing of stack vars doesn't
1944    happen very often.  */
1945 
1946 HOST_WIDE_INT
estimated_stack_frame_size(struct cgraph_node * node)1947 estimated_stack_frame_size (struct cgraph_node *node)
1948 {
1949   poly_int64 size = 0;
1950   size_t i;
1951   tree var;
1952   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1953 
1954   push_cfun (fn);
1955 
1956   init_vars_expansion ();
1957 
1958   FOR_EACH_LOCAL_DECL (fn, i, var)
1959     if (auto_var_in_fn_p (var, fn->decl))
1960       size += expand_one_var (var, true, false);
1961 
1962   if (stack_vars_num > 0)
1963     {
1964       /* Fake sorting the stack vars for account_stack_vars ().  */
1965       stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1966       for (i = 0; i < stack_vars_num; ++i)
1967 	stack_vars_sorted[i] = i;
1968       size += account_stack_vars ();
1969     }
1970 
1971   fini_vars_expansion ();
1972   pop_cfun ();
1973   return estimated_poly_value (size);
1974 }
1975 
1976 /* Helper routine to check if a record or union contains an array field. */
1977 
1978 static int
record_or_union_type_has_array_p(const_tree tree_type)1979 record_or_union_type_has_array_p (const_tree tree_type)
1980 {
1981   tree fields = TYPE_FIELDS (tree_type);
1982   tree f;
1983 
1984   for (f = fields; f; f = DECL_CHAIN (f))
1985     if (TREE_CODE (f) == FIELD_DECL)
1986       {
1987 	tree field_type = TREE_TYPE (f);
1988 	if (RECORD_OR_UNION_TYPE_P (field_type)
1989 	    && record_or_union_type_has_array_p (field_type))
1990 	  return 1;
1991 	if (TREE_CODE (field_type) == ARRAY_TYPE)
1992 	  return 1;
1993       }
1994   return 0;
1995 }
1996 
1997 /* Check if the current function has local referenced variables that
1998    have their addresses taken, contain an array, or are arrays.  */
1999 
2000 static bool
stack_protect_decl_p()2001 stack_protect_decl_p ()
2002 {
2003   unsigned i;
2004   tree var;
2005 
2006   FOR_EACH_LOCAL_DECL (cfun, i, var)
2007     if (!is_global_var (var))
2008       {
2009 	tree var_type = TREE_TYPE (var);
2010 	if (VAR_P (var)
2011 	    && (TREE_CODE (var_type) == ARRAY_TYPE
2012 		|| TREE_ADDRESSABLE (var)
2013 		|| (RECORD_OR_UNION_TYPE_P (var_type)
2014 		    && record_or_union_type_has_array_p (var_type))))
2015 	  return true;
2016       }
2017   return false;
2018 }
2019 
2020 /* Check if the current function has calls that use a return slot.  */
2021 
2022 static bool
stack_protect_return_slot_p()2023 stack_protect_return_slot_p ()
2024 {
2025   basic_block bb;
2026 
2027   FOR_ALL_BB_FN (bb, cfun)
2028     for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2029 	 !gsi_end_p (gsi); gsi_next (&gsi))
2030       {
2031 	gimple *stmt = gsi_stmt (gsi);
2032 	/* This assumes that calls to internal-only functions never
2033 	   use a return slot.  */
2034 	if (is_gimple_call (stmt)
2035 	    && !gimple_call_internal_p (stmt)
2036 	    && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2037 				  gimple_call_fndecl (stmt)))
2038 	  return true;
2039       }
2040   return false;
2041 }
2042 
2043 /* Expand all variables used in the function.  */
2044 
2045 static rtx_insn *
expand_used_vars(void)2046 expand_used_vars (void)
2047 {
2048   tree var, outer_block = DECL_INITIAL (current_function_decl);
2049   auto_vec<tree> maybe_local_decls;
2050   rtx_insn *var_end_seq = NULL;
2051   unsigned i;
2052   unsigned len;
2053   bool gen_stack_protect_signal = false;
2054 
2055   /* Compute the phase of the stack frame for this function.  */
2056   {
2057     int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2058     int off = targetm.starting_frame_offset () % align;
2059     frame_phase = off ? align - off : 0;
2060   }
2061 
2062   /* Set TREE_USED on all variables in the local_decls.  */
2063   FOR_EACH_LOCAL_DECL (cfun, i, var)
2064     TREE_USED (var) = 1;
2065   /* Clear TREE_USED on all variables associated with a block scope.  */
2066   clear_tree_used (DECL_INITIAL (current_function_decl));
2067 
2068   init_vars_expansion ();
2069 
2070   if (targetm.use_pseudo_pic_reg ())
2071     pic_offset_table_rtx = gen_reg_rtx (Pmode);
2072 
2073   for (i = 0; i < SA.map->num_partitions; i++)
2074     {
2075       if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2076 	continue;
2077 
2078       tree var = partition_to_var (SA.map, i);
2079 
2080       gcc_assert (!virtual_operand_p (var));
2081 
2082       expand_one_ssa_partition (var);
2083     }
2084 
2085   if (flag_stack_protect == SPCT_FLAG_STRONG)
2086       gen_stack_protect_signal
2087 	= stack_protect_decl_p () || stack_protect_return_slot_p ();
2088 
2089   /* At this point all variables on the local_decls with TREE_USED
2090      set are not associated with any block scope.  Lay them out.  */
2091 
2092   len = vec_safe_length (cfun->local_decls);
2093   FOR_EACH_LOCAL_DECL (cfun, i, var)
2094     {
2095       bool expand_now = false;
2096 
2097       /* Expanded above already.  */
2098       if (is_gimple_reg (var))
2099 	{
2100 	  TREE_USED (var) = 0;
2101 	  goto next;
2102 	}
2103       /* We didn't set a block for static or extern because it's hard
2104 	 to tell the difference between a global variable (re)declared
2105 	 in a local scope, and one that's really declared there to
2106 	 begin with.  And it doesn't really matter much, since we're
2107 	 not giving them stack space.  Expand them now.  */
2108       else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2109 	expand_now = true;
2110 
2111       /* Expand variables not associated with any block now.  Those created by
2112 	 the optimizers could be live anywhere in the function.  Those that
2113 	 could possibly have been scoped originally and detached from their
2114 	 block will have their allocation deferred so we coalesce them with
2115 	 others when optimization is enabled.  */
2116       else if (TREE_USED (var))
2117 	expand_now = true;
2118 
2119       /* Finally, mark all variables on the list as used.  We'll use
2120 	 this in a moment when we expand those associated with scopes.  */
2121       TREE_USED (var) = 1;
2122 
2123       if (expand_now)
2124 	expand_one_var (var, true, true);
2125 
2126     next:
2127       if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2128 	{
2129 	  rtx rtl = DECL_RTL_IF_SET (var);
2130 
2131 	  /* Keep artificial non-ignored vars in cfun->local_decls
2132 	     chain until instantiate_decls.  */
2133 	  if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2134 	    add_local_decl (cfun, var);
2135 	  else if (rtl == NULL_RTX)
2136 	    /* If rtl isn't set yet, which can happen e.g. with
2137 	       -fstack-protector, retry before returning from this
2138 	       function.  */
2139 	    maybe_local_decls.safe_push (var);
2140 	}
2141     }
2142 
2143   /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2144 
2145      +-----------------+-----------------+
2146      | ...processed... | ...duplicates...|
2147      +-----------------+-----------------+
2148                        ^
2149 		       +-- LEN points here.
2150 
2151      We just want the duplicates, as those are the artificial
2152      non-ignored vars that we want to keep until instantiate_decls.
2153      Move them down and truncate the array.  */
2154   if (!vec_safe_is_empty (cfun->local_decls))
2155     cfun->local_decls->block_remove (0, len);
2156 
2157   /* At this point, all variables within the block tree with TREE_USED
2158      set are actually used by the optimized function.  Lay them out.  */
2159   expand_used_vars_for_block (outer_block, true);
2160 
2161   if (stack_vars_num > 0)
2162     {
2163       add_scope_conflicts ();
2164 
2165       /* If stack protection is enabled, we don't share space between
2166 	 vulnerable data and non-vulnerable data.  */
2167       if (flag_stack_protect != 0
2168 	  && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2169 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2170 		  && lookup_attribute ("stack_protect",
2171 				       DECL_ATTRIBUTES (current_function_decl)))))
2172 	add_stack_protection_conflicts ();
2173 
2174       /* Now that we have collected all stack variables, and have computed a
2175 	 minimal interference graph, attempt to save some stack space.  */
2176       partition_stack_vars ();
2177       if (dump_file)
2178 	dump_stack_var_partition ();
2179     }
2180 
2181   switch (flag_stack_protect)
2182     {
2183     case SPCT_FLAG_ALL:
2184       create_stack_guard ();
2185       break;
2186 
2187     case SPCT_FLAG_STRONG:
2188       if (gen_stack_protect_signal
2189 	  || cfun->calls_alloca || has_protected_decls
2190 	  || lookup_attribute ("stack_protect",
2191 			       DECL_ATTRIBUTES (current_function_decl)))
2192 	create_stack_guard ();
2193       break;
2194 
2195     case SPCT_FLAG_DEFAULT:
2196       if (cfun->calls_alloca || has_protected_decls
2197 	  || lookup_attribute ("stack_protect",
2198 			       DECL_ATTRIBUTES (current_function_decl)))
2199 	create_stack_guard ();
2200       break;
2201 
2202     case SPCT_FLAG_EXPLICIT:
2203       if (lookup_attribute ("stack_protect",
2204 			    DECL_ATTRIBUTES (current_function_decl)))
2205 	create_stack_guard ();
2206       break;
2207     default:
2208       ;
2209     }
2210 
2211   /* Assign rtl to each variable based on these partitions.  */
2212   if (stack_vars_num > 0)
2213     {
2214       struct stack_vars_data data;
2215 
2216       data.asan_base = NULL_RTX;
2217       data.asan_alignb = 0;
2218 
2219       /* Reorder decls to be protected by iterating over the variables
2220 	 array multiple times, and allocating out of each phase in turn.  */
2221       /* ??? We could probably integrate this into the qsort we did
2222 	 earlier, such that we naturally see these variables first,
2223 	 and thus naturally allocate things in the right order.  */
2224       if (has_protected_decls)
2225 	{
2226 	  /* Phase 1 contains only character arrays.  */
2227 	  expand_stack_vars (stack_protect_decl_phase_1, &data);
2228 
2229 	  /* Phase 2 contains other kinds of arrays.  */
2230 	  if (flag_stack_protect == SPCT_FLAG_ALL
2231 	      || flag_stack_protect == SPCT_FLAG_STRONG
2232 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2233 		  && lookup_attribute ("stack_protect",
2234 				       DECL_ATTRIBUTES (current_function_decl))))
2235 	    expand_stack_vars (stack_protect_decl_phase_2, &data);
2236 	}
2237 
2238       if (asan_sanitize_stack_p ())
2239 	/* Phase 3, any partitions that need asan protection
2240 	   in addition to phase 1 and 2.  */
2241 	expand_stack_vars (asan_decl_phase_3, &data);
2242 
2243       /* ASAN description strings don't yet have a syntax for expressing
2244 	 polynomial offsets.  */
2245       HOST_WIDE_INT prev_offset;
2246       if (!data.asan_vec.is_empty ()
2247 	  && frame_offset.is_constant (&prev_offset))
2248 	{
2249 	  HOST_WIDE_INT offset, sz, redzonesz;
2250 	  redzonesz = ASAN_RED_ZONE_SIZE;
2251 	  sz = data.asan_vec[0] - prev_offset;
2252 	  if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2253 	      && data.asan_alignb <= 4096
2254 	      && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2255 	    redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2256 			 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2257 	  /* Allocating a constant amount of space from a constant
2258 	     starting offset must give a constant result.  */
2259 	  offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2260 		    .to_constant ());
2261 	  data.asan_vec.safe_push (prev_offset);
2262 	  data.asan_vec.safe_push (offset);
2263 	  /* Leave space for alignment if STRICT_ALIGNMENT.  */
2264 	  if (STRICT_ALIGNMENT)
2265 	    alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2266 				      << ASAN_SHADOW_SHIFT)
2267 				     / BITS_PER_UNIT, 1);
2268 
2269 	  var_end_seq
2270 	    = asan_emit_stack_protection (virtual_stack_vars_rtx,
2271 					  data.asan_base,
2272 					  data.asan_alignb,
2273 					  data.asan_vec.address (),
2274 					  data.asan_decl_vec.address (),
2275 					  data.asan_vec.length ());
2276 	}
2277 
2278       expand_stack_vars (NULL, &data);
2279     }
2280 
2281   if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2282     var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2283 					      virtual_stack_vars_rtx,
2284 					      var_end_seq);
2285 
2286   fini_vars_expansion ();
2287 
2288   /* If there were any artificial non-ignored vars without rtl
2289      found earlier, see if deferred stack allocation hasn't assigned
2290      rtl to them.  */
2291   FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2292     {
2293       rtx rtl = DECL_RTL_IF_SET (var);
2294 
2295       /* Keep artificial non-ignored vars in cfun->local_decls
2296 	 chain until instantiate_decls.  */
2297       if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2298 	add_local_decl (cfun, var);
2299     }
2300 
2301   /* If the target requires that FRAME_OFFSET be aligned, do it.  */
2302   if (STACK_ALIGNMENT_NEEDED)
2303     {
2304       HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2305       if (FRAME_GROWS_DOWNWARD)
2306 	frame_offset = aligned_lower_bound (frame_offset, align);
2307       else
2308 	frame_offset = aligned_upper_bound (frame_offset, align);
2309     }
2310 
2311   return var_end_seq;
2312 }
2313 
2314 
2315 /* If we need to produce a detailed dump, print the tree representation
2316    for STMT to the dump file.  SINCE is the last RTX after which the RTL
2317    generated for STMT should have been appended.  */
2318 
2319 static void
maybe_dump_rtl_for_gimple_stmt(gimple * stmt,rtx_insn * since)2320 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2321 {
2322   if (dump_file && (dump_flags & TDF_DETAILS))
2323     {
2324       fprintf (dump_file, "\n;; ");
2325       print_gimple_stmt (dump_file, stmt, 0,
2326 			 TDF_SLIM | (dump_flags & TDF_LINENO));
2327       fprintf (dump_file, "\n");
2328 
2329       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2330     }
2331 }
2332 
2333 /* Maps the blocks that do not contain tree labels to rtx labels.  */
2334 
2335 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2336 
2337 /* Returns the label_rtx expression for a label starting basic block BB.  */
2338 
2339 static rtx_code_label *
label_rtx_for_bb(basic_block bb ATTRIBUTE_UNUSED)2340 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2341 {
2342   gimple_stmt_iterator gsi;
2343   tree lab;
2344 
2345   if (bb->flags & BB_RTL)
2346     return block_label (bb);
2347 
2348   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2349   if (elt)
2350     return *elt;
2351 
2352   /* Find the tree label if it is present.  */
2353 
2354   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2355     {
2356       glabel *lab_stmt;
2357 
2358       lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2359       if (!lab_stmt)
2360 	break;
2361 
2362       lab = gimple_label_label (lab_stmt);
2363       if (DECL_NONLOCAL (lab))
2364 	break;
2365 
2366       return jump_target_rtx (lab);
2367     }
2368 
2369   rtx_code_label *l = gen_label_rtx ();
2370   lab_rtx_for_bb->put (bb, l);
2371   return l;
2372 }
2373 
2374 
2375 /* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
2376    of a basic block where we just expanded the conditional at the end,
2377    possibly clean up the CFG and instruction sequence.  LAST is the
2378    last instruction before the just emitted jump sequence.  */
2379 
2380 static void
maybe_cleanup_end_of_block(edge e,rtx_insn * last)2381 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2382 {
2383   /* Special case: when jumpif decides that the condition is
2384      trivial it emits an unconditional jump (and the necessary
2385      barrier).  But we still have two edges, the fallthru one is
2386      wrong.  purge_dead_edges would clean this up later.  Unfortunately
2387      we have to insert insns (and split edges) before
2388      find_many_sub_basic_blocks and hence before purge_dead_edges.
2389      But splitting edges might create new blocks which depend on the
2390      fact that if there are two edges there's no barrier.  So the
2391      barrier would get lost and verify_flow_info would ICE.  Instead
2392      of auditing all edge splitters to care for the barrier (which
2393      normally isn't there in a cleaned CFG), fix it here.  */
2394   if (BARRIER_P (get_last_insn ()))
2395     {
2396       rtx_insn *insn;
2397       remove_edge (e);
2398       /* Now, we have a single successor block, if we have insns to
2399 	 insert on the remaining edge we potentially will insert
2400 	 it at the end of this block (if the dest block isn't feasible)
2401 	 in order to avoid splitting the edge.  This insertion will take
2402 	 place in front of the last jump.  But we might have emitted
2403 	 multiple jumps (conditional and one unconditional) to the
2404 	 same destination.  Inserting in front of the last one then
2405 	 is a problem.  See PR 40021.  We fix this by deleting all
2406 	 jumps except the last unconditional one.  */
2407       insn = PREV_INSN (get_last_insn ());
2408       /* Make sure we have an unconditional jump.  Otherwise we're
2409 	 confused.  */
2410       gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2411       for (insn = PREV_INSN (insn); insn != last;)
2412 	{
2413 	  insn = PREV_INSN (insn);
2414 	  if (JUMP_P (NEXT_INSN (insn)))
2415 	    {
2416 	      if (!any_condjump_p (NEXT_INSN (insn)))
2417 		{
2418 		  gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2419 		  delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2420 		}
2421 	      delete_insn (NEXT_INSN (insn));
2422 	    }
2423 	}
2424     }
2425 }
2426 
2427 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
2428    Returns a new basic block if we've terminated the current basic
2429    block and created a new one.  */
2430 
2431 static basic_block
expand_gimple_cond(basic_block bb,gcond * stmt)2432 expand_gimple_cond (basic_block bb, gcond *stmt)
2433 {
2434   basic_block new_bb, dest;
2435   edge true_edge;
2436   edge false_edge;
2437   rtx_insn *last2, *last;
2438   enum tree_code code;
2439   tree op0, op1;
2440 
2441   code = gimple_cond_code (stmt);
2442   op0 = gimple_cond_lhs (stmt);
2443   op1 = gimple_cond_rhs (stmt);
2444   /* We're sometimes presented with such code:
2445        D.123_1 = x < y;
2446        if (D.123_1 != 0)
2447          ...
2448      This would expand to two comparisons which then later might
2449      be cleaned up by combine.  But some pattern matchers like if-conversion
2450      work better when there's only one compare, so make up for this
2451      here as special exception if TER would have made the same change.  */
2452   if (SA.values
2453       && TREE_CODE (op0) == SSA_NAME
2454       && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2455       && TREE_CODE (op1) == INTEGER_CST
2456       && ((gimple_cond_code (stmt) == NE_EXPR
2457 	   && integer_zerop (op1))
2458 	  || (gimple_cond_code (stmt) == EQ_EXPR
2459 	      && integer_onep (op1)))
2460       && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2461     {
2462       gimple *second = SSA_NAME_DEF_STMT (op0);
2463       if (gimple_code (second) == GIMPLE_ASSIGN)
2464 	{
2465 	  enum tree_code code2 = gimple_assign_rhs_code (second);
2466 	  if (TREE_CODE_CLASS (code2) == tcc_comparison)
2467 	    {
2468 	      code = code2;
2469 	      op0 = gimple_assign_rhs1 (second);
2470 	      op1 = gimple_assign_rhs2 (second);
2471 	    }
2472 	  /* If jumps are cheap and the target does not support conditional
2473 	     compare, turn some more codes into jumpy sequences.  */
2474 	  else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2475 		   && targetm.gen_ccmp_first == NULL)
2476 	    {
2477 	      if ((code2 == BIT_AND_EXPR
2478 		   && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2479 		   && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2480 		  || code2 == TRUTH_AND_EXPR)
2481 		{
2482 		  code = TRUTH_ANDIF_EXPR;
2483 		  op0 = gimple_assign_rhs1 (second);
2484 		  op1 = gimple_assign_rhs2 (second);
2485 		}
2486 	      else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2487 		{
2488 		  code = TRUTH_ORIF_EXPR;
2489 		  op0 = gimple_assign_rhs1 (second);
2490 		  op1 = gimple_assign_rhs2 (second);
2491 		}
2492 	    }
2493 	}
2494     }
2495 
2496   last2 = last = get_last_insn ();
2497 
2498   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2499   set_curr_insn_location (gimple_location (stmt));
2500 
2501   /* These flags have no purpose in RTL land.  */
2502   true_edge->flags &= ~EDGE_TRUE_VALUE;
2503   false_edge->flags &= ~EDGE_FALSE_VALUE;
2504 
2505   /* We can either have a pure conditional jump with one fallthru edge or
2506      two-way jump that needs to be decomposed into two basic blocks.  */
2507   if (false_edge->dest == bb->next_bb)
2508     {
2509       jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2510 		true_edge->probability);
2511       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2512       if (true_edge->goto_locus != UNKNOWN_LOCATION)
2513 	set_curr_insn_location (true_edge->goto_locus);
2514       false_edge->flags |= EDGE_FALLTHRU;
2515       maybe_cleanup_end_of_block (false_edge, last);
2516       return NULL;
2517     }
2518   if (true_edge->dest == bb->next_bb)
2519     {
2520       jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2521 		   false_edge->probability);
2522       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2523       if (false_edge->goto_locus != UNKNOWN_LOCATION)
2524 	set_curr_insn_location (false_edge->goto_locus);
2525       true_edge->flags |= EDGE_FALLTHRU;
2526       maybe_cleanup_end_of_block (true_edge, last);
2527       return NULL;
2528     }
2529 
2530   jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2531 	    true_edge->probability);
2532   last = get_last_insn ();
2533   if (false_edge->goto_locus != UNKNOWN_LOCATION)
2534     set_curr_insn_location (false_edge->goto_locus);
2535   emit_jump (label_rtx_for_bb (false_edge->dest));
2536 
2537   BB_END (bb) = last;
2538   if (BARRIER_P (BB_END (bb)))
2539     BB_END (bb) = PREV_INSN (BB_END (bb));
2540   update_bb_for_insn (bb);
2541 
2542   new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2543   dest = false_edge->dest;
2544   redirect_edge_succ (false_edge, new_bb);
2545   false_edge->flags |= EDGE_FALLTHRU;
2546   new_bb->count = false_edge->count ();
2547   loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2548   add_bb_to_loop (new_bb, loop);
2549   if (loop->latch == bb
2550       && loop->header == dest)
2551     loop->latch = new_bb;
2552   make_single_succ_edge (new_bb, dest, 0);
2553   if (BARRIER_P (BB_END (new_bb)))
2554     BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2555   update_bb_for_insn (new_bb);
2556 
2557   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2558 
2559   if (true_edge->goto_locus != UNKNOWN_LOCATION)
2560     {
2561       set_curr_insn_location (true_edge->goto_locus);
2562       true_edge->goto_locus = curr_insn_location ();
2563     }
2564 
2565   return new_bb;
2566 }
2567 
2568 /* Mark all calls that can have a transaction restart.  */
2569 
2570 static void
mark_transaction_restart_calls(gimple * stmt)2571 mark_transaction_restart_calls (gimple *stmt)
2572 {
2573   struct tm_restart_node dummy;
2574   tm_restart_node **slot;
2575 
2576   if (!cfun->gimple_df->tm_restart)
2577     return;
2578 
2579   dummy.stmt = stmt;
2580   slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2581   if (slot)
2582     {
2583       struct tm_restart_node *n = *slot;
2584       tree list = n->label_or_list;
2585       rtx_insn *insn;
2586 
2587       for (insn = next_real_insn (get_last_insn ());
2588 	   !CALL_P (insn);
2589 	   insn = next_real_insn (insn))
2590 	continue;
2591 
2592       if (TREE_CODE (list) == LABEL_DECL)
2593 	add_reg_note (insn, REG_TM, label_rtx (list));
2594       else
2595 	for (; list ; list = TREE_CHAIN (list))
2596 	  add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2597     }
2598 }
2599 
2600 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2601    statement STMT.  */
2602 
2603 static void
expand_call_stmt(gcall * stmt)2604 expand_call_stmt (gcall *stmt)
2605 {
2606   tree exp, decl, lhs;
2607   bool builtin_p;
2608   size_t i;
2609 
2610   if (gimple_call_internal_p (stmt))
2611     {
2612       expand_internal_call (stmt);
2613       return;
2614     }
2615 
2616   /* If this is a call to a built-in function and it has no effect other
2617      than setting the lhs, try to implement it using an internal function
2618      instead.  */
2619   decl = gimple_call_fndecl (stmt);
2620   if (gimple_call_lhs (stmt)
2621       && !gimple_has_side_effects (stmt)
2622       && (optimize || (decl && called_as_built_in (decl))))
2623     {
2624       internal_fn ifn = replacement_internal_fn (stmt);
2625       if (ifn != IFN_LAST)
2626 	{
2627 	  expand_internal_call (ifn, stmt);
2628 	  return;
2629 	}
2630     }
2631 
2632   exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2633 
2634   CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2635   builtin_p = decl && DECL_BUILT_IN (decl);
2636 
2637   /* If this is not a builtin function, the function type through which the
2638      call is made may be different from the type of the function.  */
2639   if (!builtin_p)
2640     CALL_EXPR_FN (exp)
2641       = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2642 		      CALL_EXPR_FN (exp));
2643 
2644   TREE_TYPE (exp) = gimple_call_return_type (stmt);
2645   CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2646 
2647   for (i = 0; i < gimple_call_num_args (stmt); i++)
2648     {
2649       tree arg = gimple_call_arg (stmt, i);
2650       gimple *def;
2651       /* TER addresses into arguments of builtin functions so we have a
2652 	 chance to infer more correct alignment information.  See PR39954.  */
2653       if (builtin_p
2654 	  && TREE_CODE (arg) == SSA_NAME
2655 	  && (def = get_gimple_for_ssa_name (arg))
2656 	  && gimple_assign_rhs_code (def) == ADDR_EXPR)
2657 	arg = gimple_assign_rhs1 (def);
2658       CALL_EXPR_ARG (exp, i) = arg;
2659     }
2660 
2661   if (gimple_has_side_effects (stmt))
2662     TREE_SIDE_EFFECTS (exp) = 1;
2663 
2664   if (gimple_call_nothrow_p (stmt))
2665     TREE_NOTHROW (exp) = 1;
2666 
2667   if (gimple_no_warning_p (stmt))
2668     TREE_NO_WARNING (exp) = 1;
2669 
2670   CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2671   CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2672   CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2673   if (decl
2674       && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2675       && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2676     CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2677   else
2678     CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2679   CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2680   CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2681   SET_EXPR_LOCATION (exp, gimple_location (stmt));
2682   CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2683 
2684   /* Ensure RTL is created for debug args.  */
2685   if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2686     {
2687       vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2688       unsigned int ix;
2689       tree dtemp;
2690 
2691       if (debug_args)
2692 	for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2693 	  {
2694 	    gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2695 	    expand_debug_expr (dtemp);
2696 	  }
2697     }
2698 
2699   rtx_insn *before_call = get_last_insn ();
2700   lhs = gimple_call_lhs (stmt);
2701   if (lhs)
2702     expand_assignment (lhs, exp, false);
2703   else
2704     expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2705 
2706   /* If the gimple call is an indirect call and has 'nocf_check'
2707      attribute find a generated CALL insn to mark it as no
2708      control-flow verification is needed.  */
2709   if (gimple_call_nocf_check_p (stmt)
2710       && !gimple_call_fndecl (stmt))
2711     {
2712       rtx_insn *last = get_last_insn ();
2713       while (!CALL_P (last)
2714 	     && last != before_call)
2715 	last = PREV_INSN (last);
2716 
2717       if (last != before_call)
2718 	add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2719     }
2720 
2721   mark_transaction_restart_calls (stmt);
2722 }
2723 
2724 
2725 /* Generate RTL for an asm statement (explicit assembler code).
2726    STRING is a STRING_CST node containing the assembler code text,
2727    or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
2728    insn is volatile; don't optimize it.  */
2729 
2730 static void
expand_asm_loc(tree string,int vol,location_t locus)2731 expand_asm_loc (tree string, int vol, location_t locus)
2732 {
2733   rtx body;
2734 
2735   body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2736 				ggc_strdup (TREE_STRING_POINTER (string)),
2737 				locus);
2738 
2739   MEM_VOLATILE_P (body) = vol;
2740 
2741   /* Non-empty basic ASM implicitly clobbers memory.  */
2742   if (TREE_STRING_LENGTH (string) != 0)
2743     {
2744       rtx asm_op, clob;
2745       unsigned i, nclobbers;
2746       auto_vec<rtx> input_rvec, output_rvec;
2747       auto_vec<const char *> constraints;
2748       auto_vec<rtx> clobber_rvec;
2749       HARD_REG_SET clobbered_regs;
2750       CLEAR_HARD_REG_SET (clobbered_regs);
2751 
2752       clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2753       clobber_rvec.safe_push (clob);
2754 
2755       if (targetm.md_asm_adjust)
2756 	targetm.md_asm_adjust (output_rvec, input_rvec,
2757 			       constraints, clobber_rvec,
2758 			       clobbered_regs);
2759 
2760       asm_op = body;
2761       nclobbers = clobber_rvec.length ();
2762       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2763 
2764       XVECEXP (body, 0, 0) = asm_op;
2765       for (i = 0; i < nclobbers; i++)
2766 	XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2767     }
2768 
2769   emit_insn (body);
2770 }
2771 
2772 /* Return the number of times character C occurs in string S.  */
2773 static int
n_occurrences(int c,const char * s)2774 n_occurrences (int c, const char *s)
2775 {
2776   int n = 0;
2777   while (*s)
2778     n += (*s++ == c);
2779   return n;
2780 }
2781 
2782 /* A subroutine of expand_asm_operands.  Check that all operands have
2783    the same number of alternatives.  Return true if so.  */
2784 
2785 static bool
check_operand_nalternatives(const vec<const char * > & constraints)2786 check_operand_nalternatives (const vec<const char *> &constraints)
2787 {
2788   unsigned len = constraints.length();
2789   if (len > 0)
2790     {
2791       int nalternatives = n_occurrences (',', constraints[0]);
2792 
2793       if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2794 	{
2795 	  error ("too many alternatives in %<asm%>");
2796 	  return false;
2797 	}
2798 
2799       for (unsigned i = 1; i < len; ++i)
2800 	if (n_occurrences (',', constraints[i]) != nalternatives)
2801 	  {
2802 	    error ("operand constraints for %<asm%> differ "
2803 		   "in number of alternatives");
2804 	    return false;
2805 	  }
2806     }
2807   return true;
2808 }
2809 
2810 /* Check for overlap between registers marked in CLOBBERED_REGS and
2811    anything inappropriate in T.  Emit error and return the register
2812    variable definition for error, NULL_TREE for ok.  */
2813 
2814 static bool
tree_conflicts_with_clobbers_p(tree t,HARD_REG_SET * clobbered_regs)2815 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2816 {
2817   /* Conflicts between asm-declared register variables and the clobber
2818      list are not allowed.  */
2819   tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2820 
2821   if (overlap)
2822     {
2823       error ("asm-specifier for variable %qE conflicts with asm clobber list",
2824 	     DECL_NAME (overlap));
2825 
2826       /* Reset registerness to stop multiple errors emitted for a single
2827 	 variable.  */
2828       DECL_REGISTER (overlap) = 0;
2829       return true;
2830     }
2831 
2832   return false;
2833 }
2834 
2835 /* Generate RTL for an asm statement with arguments.
2836    STRING is the instruction template.
2837    OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2838    Each output or input has an expression in the TREE_VALUE and
2839    a tree list in TREE_PURPOSE which in turn contains a constraint
2840    name in TREE_VALUE (or NULL_TREE) and a constraint string
2841    in TREE_PURPOSE.
2842    CLOBBERS is a list of STRING_CST nodes each naming a hard register
2843    that is clobbered by this insn.
2844 
2845    LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2846    should be the fallthru basic block of the asm goto.
2847 
2848    Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2849    Some elements of OUTPUTS may be replaced with trees representing temporary
2850    values.  The caller should copy those temporary values to the originally
2851    specified lvalues.
2852 
2853    VOL nonzero means the insn is volatile; don't optimize it.  */
2854 
2855 static void
expand_asm_stmt(gasm * stmt)2856 expand_asm_stmt (gasm *stmt)
2857 {
2858   class save_input_location
2859   {
2860     location_t old;
2861 
2862   public:
2863     explicit save_input_location(location_t where)
2864     {
2865       old = input_location;
2866       input_location = where;
2867     }
2868 
2869     ~save_input_location()
2870     {
2871       input_location = old;
2872     }
2873   };
2874 
2875   location_t locus = gimple_location (stmt);
2876 
2877   if (gimple_asm_input_p (stmt))
2878     {
2879       const char *s = gimple_asm_string (stmt);
2880       tree string = build_string (strlen (s), s);
2881       expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2882       return;
2883     }
2884 
2885   /* There are some legacy diagnostics in here, and also avoids a
2886      sixth parameger to targetm.md_asm_adjust.  */
2887   save_input_location s_i_l(locus);
2888 
2889   unsigned noutputs = gimple_asm_noutputs (stmt);
2890   unsigned ninputs = gimple_asm_ninputs (stmt);
2891   unsigned nlabels = gimple_asm_nlabels (stmt);
2892   unsigned i;
2893 
2894   /* ??? Diagnose during gimplification?  */
2895   if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2896     {
2897       error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2898       return;
2899     }
2900 
2901   auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2902   auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2903   auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2904 
2905   /* Copy the gimple vectors into new vectors that we can manipulate.  */
2906 
2907   output_tvec.safe_grow (noutputs);
2908   input_tvec.safe_grow (ninputs);
2909   constraints.safe_grow (noutputs + ninputs);
2910 
2911   for (i = 0; i < noutputs; ++i)
2912     {
2913       tree t = gimple_asm_output_op (stmt, i);
2914       output_tvec[i] = TREE_VALUE (t);
2915       constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2916     }
2917   for (i = 0; i < ninputs; i++)
2918     {
2919       tree t = gimple_asm_input_op (stmt, i);
2920       input_tvec[i] = TREE_VALUE (t);
2921       constraints[i + noutputs]
2922 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2923     }
2924 
2925   /* ??? Diagnose during gimplification?  */
2926   if (! check_operand_nalternatives (constraints))
2927     return;
2928 
2929   /* Count the number of meaningful clobbered registers, ignoring what
2930      we would ignore later.  */
2931   auto_vec<rtx> clobber_rvec;
2932   HARD_REG_SET clobbered_regs;
2933   CLEAR_HARD_REG_SET (clobbered_regs);
2934 
2935   if (unsigned n = gimple_asm_nclobbers (stmt))
2936     {
2937       clobber_rvec.reserve (n);
2938       for (i = 0; i < n; i++)
2939 	{
2940 	  tree t = gimple_asm_clobber_op (stmt, i);
2941           const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2942 	  int nregs, j;
2943 
2944 	  j = decode_reg_name_and_count (regname, &nregs);
2945 	  if (j < 0)
2946 	    {
2947 	      if (j == -2)
2948 		{
2949 		  /* ??? Diagnose during gimplification?  */
2950 		  error ("unknown register name %qs in %<asm%>", regname);
2951 		}
2952 	      else if (j == -4)
2953 		{
2954 		  rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2955 		  clobber_rvec.safe_push (x);
2956 		}
2957 	      else
2958 		{
2959 		  /* Otherwise we should have -1 == empty string
2960 		     or -3 == cc, which is not a register.  */
2961 		  gcc_assert (j == -1 || j == -3);
2962 		}
2963 	    }
2964 	  else
2965 	    for (int reg = j; reg < j + nregs; reg++)
2966 	      {
2967 		/* Clobbering the PIC register is an error.  */
2968 		if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2969 		  {
2970 		    /* ??? Diagnose during gimplification?  */
2971 		    error ("PIC register clobbered by %qs in %<asm%>",
2972 			   regname);
2973 		    return;
2974 		  }
2975 
2976 	        SET_HARD_REG_BIT (clobbered_regs, reg);
2977 	        rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2978 		clobber_rvec.safe_push (x);
2979 	      }
2980 	}
2981     }
2982   unsigned nclobbers = clobber_rvec.length();
2983 
2984   /* First pass over inputs and outputs checks validity and sets
2985      mark_addressable if needed.  */
2986   /* ??? Diagnose during gimplification?  */
2987 
2988   for (i = 0; i < noutputs; ++i)
2989     {
2990       tree val = output_tvec[i];
2991       tree type = TREE_TYPE (val);
2992       const char *constraint;
2993       bool is_inout;
2994       bool allows_reg;
2995       bool allows_mem;
2996 
2997       /* Try to parse the output constraint.  If that fails, there's
2998 	 no point in going further.  */
2999       constraint = constraints[i];
3000       if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
3001 				    &allows_mem, &allows_reg, &is_inout))
3002 	return;
3003 
3004       if (! allows_reg
3005 	  && (allows_mem
3006 	      || is_inout
3007 	      || (DECL_P (val)
3008 		  && REG_P (DECL_RTL (val))
3009 		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
3010 	mark_addressable (val);
3011     }
3012 
3013   for (i = 0; i < ninputs; ++i)
3014     {
3015       bool allows_reg, allows_mem;
3016       const char *constraint;
3017 
3018       constraint = constraints[i + noutputs];
3019       if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3020 				    constraints.address (),
3021 				    &allows_mem, &allows_reg))
3022 	return;
3023 
3024       if (! allows_reg && allows_mem)
3025 	mark_addressable (input_tvec[i]);
3026     }
3027 
3028   /* Second pass evaluates arguments.  */
3029 
3030   /* Make sure stack is consistent for asm goto.  */
3031   if (nlabels > 0)
3032     do_pending_stack_adjust ();
3033   int old_generating_concat_p = generating_concat_p;
3034 
3035   /* Vector of RTX's of evaluated output operands.  */
3036   auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3037   auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3038   rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3039 
3040   output_rvec.safe_grow (noutputs);
3041 
3042   for (i = 0; i < noutputs; ++i)
3043     {
3044       tree val = output_tvec[i];
3045       tree type = TREE_TYPE (val);
3046       bool is_inout, allows_reg, allows_mem, ok;
3047       rtx op;
3048 
3049       ok = parse_output_constraint (&constraints[i], i, ninputs,
3050 				    noutputs, &allows_mem, &allows_reg,
3051 				    &is_inout);
3052       gcc_assert (ok);
3053 
3054       /* If an output operand is not a decl or indirect ref and our constraint
3055 	 allows a register, make a temporary to act as an intermediate.
3056 	 Make the asm insn write into that, then we will copy it to
3057 	 the real output operand.  Likewise for promoted variables.  */
3058 
3059       generating_concat_p = 0;
3060 
3061       if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
3062 	  || (DECL_P (val)
3063 	      && (allows_mem || REG_P (DECL_RTL (val)))
3064 	      && ! (REG_P (DECL_RTL (val))
3065 		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3066 	  || ! allows_reg
3067 	  || is_inout
3068 	  || TREE_ADDRESSABLE (type))
3069 	{
3070 	  op = expand_expr (val, NULL_RTX, VOIDmode,
3071 			    !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3072 	  if (MEM_P (op))
3073 	    op = validize_mem (op);
3074 
3075 	  if (! allows_reg && !MEM_P (op))
3076 	    error ("output number %d not directly addressable", i);
3077 	  if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
3078 	      || GET_CODE (op) == CONCAT)
3079 	    {
3080 	      rtx old_op = op;
3081 	      op = gen_reg_rtx (GET_MODE (op));
3082 
3083 	      generating_concat_p = old_generating_concat_p;
3084 
3085 	      if (is_inout)
3086 		emit_move_insn (op, old_op);
3087 
3088 	      push_to_sequence2 (after_rtl_seq, after_rtl_end);
3089 	      emit_move_insn (old_op, op);
3090 	      after_rtl_seq = get_insns ();
3091 	      after_rtl_end = get_last_insn ();
3092 	      end_sequence ();
3093 	    }
3094 	}
3095       else
3096 	{
3097 	  op = assign_temp (type, 0, 1);
3098 	  op = validize_mem (op);
3099 	  if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3100 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3101 
3102 	  generating_concat_p = old_generating_concat_p;
3103 
3104 	  push_to_sequence2 (after_rtl_seq, after_rtl_end);
3105 	  expand_assignment (val, make_tree (type, op), false);
3106 	  after_rtl_seq = get_insns ();
3107 	  after_rtl_end = get_last_insn ();
3108 	  end_sequence ();
3109 	}
3110       output_rvec[i] = op;
3111 
3112       if (is_inout)
3113 	inout_opnum.safe_push (i);
3114     }
3115 
3116   auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3117   auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3118 
3119   input_rvec.safe_grow (ninputs);
3120   input_mode.safe_grow (ninputs);
3121 
3122   generating_concat_p = 0;
3123 
3124   for (i = 0; i < ninputs; ++i)
3125     {
3126       tree val = input_tvec[i];
3127       tree type = TREE_TYPE (val);
3128       bool allows_reg, allows_mem, ok;
3129       const char *constraint;
3130       rtx op;
3131 
3132       constraint = constraints[i + noutputs];
3133       ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3134 				   constraints.address (),
3135 				   &allows_mem, &allows_reg);
3136       gcc_assert (ok);
3137 
3138       /* EXPAND_INITIALIZER will not generate code for valid initializer
3139 	 constants, but will still generate code for other types of operand.
3140 	 This is the behavior we want for constant constraints.  */
3141       op = expand_expr (val, NULL_RTX, VOIDmode,
3142 			allows_reg ? EXPAND_NORMAL
3143 			: allows_mem ? EXPAND_MEMORY
3144 			: EXPAND_INITIALIZER);
3145 
3146       /* Never pass a CONCAT to an ASM.  */
3147       if (GET_CODE (op) == CONCAT)
3148 	op = force_reg (GET_MODE (op), op);
3149       else if (MEM_P (op))
3150 	op = validize_mem (op);
3151 
3152       if (asm_operand_ok (op, constraint, NULL) <= 0)
3153 	{
3154 	  if (allows_reg && TYPE_MODE (type) != BLKmode)
3155 	    op = force_reg (TYPE_MODE (type), op);
3156 	  else if (!allows_mem)
3157 	    warning (0, "asm operand %d probably doesn%'t match constraints",
3158 		     i + noutputs);
3159 	  else if (MEM_P (op))
3160 	    {
3161 	      /* We won't recognize either volatile memory or memory
3162 		 with a queued address as available a memory_operand
3163 		 at this point.  Ignore it: clearly this *is* a memory.  */
3164 	    }
3165 	  else
3166 	    gcc_unreachable ();
3167 	}
3168       input_rvec[i] = op;
3169       input_mode[i] = TYPE_MODE (type);
3170     }
3171 
3172   /* For in-out operands, copy output rtx to input rtx.  */
3173   unsigned ninout = inout_opnum.length();
3174   for (i = 0; i < ninout; i++)
3175     {
3176       int j = inout_opnum[i];
3177       rtx o = output_rvec[j];
3178 
3179       input_rvec.safe_push (o);
3180       input_mode.safe_push (GET_MODE (o));
3181 
3182       char buffer[16];
3183       sprintf (buffer, "%d", j);
3184       constraints.safe_push (ggc_strdup (buffer));
3185     }
3186   ninputs += ninout;
3187 
3188   /* Sometimes we wish to automatically clobber registers across an asm.
3189      Case in point is when the i386 backend moved from cc0 to a hard reg --
3190      maintaining source-level compatibility means automatically clobbering
3191      the flags register.  */
3192   rtx_insn *after_md_seq = NULL;
3193   if (targetm.md_asm_adjust)
3194     after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3195 					  constraints, clobber_rvec,
3196 					  clobbered_regs);
3197 
3198   /* Do not allow the hook to change the output and input count,
3199      lest it mess up the operand numbering.  */
3200   gcc_assert (output_rvec.length() == noutputs);
3201   gcc_assert (input_rvec.length() == ninputs);
3202   gcc_assert (constraints.length() == noutputs + ninputs);
3203 
3204   /* But it certainly can adjust the clobbers.  */
3205   nclobbers = clobber_rvec.length();
3206 
3207   /* Third pass checks for easy conflicts.  */
3208   /* ??? Why are we doing this on trees instead of rtx.  */
3209 
3210   bool clobber_conflict_found = 0;
3211   for (i = 0; i < noutputs; ++i)
3212     if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3213 	clobber_conflict_found = 1;
3214   for (i = 0; i < ninputs - ninout; ++i)
3215     if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3216 	clobber_conflict_found = 1;
3217 
3218   /* Make vectors for the expression-rtx, constraint strings,
3219      and named operands.  */
3220 
3221   rtvec argvec = rtvec_alloc (ninputs);
3222   rtvec constraintvec = rtvec_alloc (ninputs);
3223   rtvec labelvec = rtvec_alloc (nlabels);
3224 
3225   rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3226 				    : GET_MODE (output_rvec[0])),
3227 				   ggc_strdup (gimple_asm_string (stmt)),
3228 				   "", 0, argvec, constraintvec,
3229 				   labelvec, locus);
3230   MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3231 
3232   for (i = 0; i < ninputs; ++i)
3233     {
3234       ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3235       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3236 	= gen_rtx_ASM_INPUT_loc (input_mode[i],
3237 				 constraints[i + noutputs],
3238 				 locus);
3239     }
3240 
3241   /* Copy labels to the vector.  */
3242   rtx_code_label *fallthru_label = NULL;
3243   if (nlabels > 0)
3244     {
3245       basic_block fallthru_bb = NULL;
3246       edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3247       if (fallthru)
3248 	fallthru_bb = fallthru->dest;
3249 
3250       for (i = 0; i < nlabels; ++i)
3251 	{
3252 	  tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3253 	  rtx_insn *r;
3254 	  /* If asm goto has any labels in the fallthru basic block, use
3255 	     a label that we emit immediately after the asm goto.  Expansion
3256 	     may insert further instructions into the same basic block after
3257 	     asm goto and if we don't do this, insertion of instructions on
3258 	     the fallthru edge might misbehave.  See PR58670.  */
3259 	  if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3260 	    {
3261 	      if (fallthru_label == NULL_RTX)
3262 	        fallthru_label = gen_label_rtx ();
3263 	      r = fallthru_label;
3264 	    }
3265 	  else
3266 	    r = label_rtx (label);
3267 	  ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3268 	}
3269     }
3270 
3271   /* Now, for each output, construct an rtx
3272      (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3273 			       ARGVEC CONSTRAINTS OPNAMES))
3274      If there is more than one, put them inside a PARALLEL.  */
3275 
3276   if (nlabels > 0 && nclobbers == 0)
3277     {
3278       gcc_assert (noutputs == 0);
3279       emit_jump_insn (body);
3280     }
3281   else if (noutputs == 0 && nclobbers == 0)
3282     {
3283       /* No output operands: put in a raw ASM_OPERANDS rtx.  */
3284       emit_insn (body);
3285     }
3286   else if (noutputs == 1 && nclobbers == 0)
3287     {
3288       ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3289       emit_insn (gen_rtx_SET (output_rvec[0], body));
3290     }
3291   else
3292     {
3293       rtx obody = body;
3294       int num = noutputs;
3295 
3296       if (num == 0)
3297 	num = 1;
3298 
3299       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3300 
3301       /* For each output operand, store a SET.  */
3302       for (i = 0; i < noutputs; ++i)
3303 	{
3304 	  rtx src, o = output_rvec[i];
3305 	  if (i == 0)
3306 	    {
3307 	      ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3308 	      src = obody;
3309 	    }
3310 	  else
3311 	    {
3312 	      src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3313 					  ASM_OPERANDS_TEMPLATE (obody),
3314 					  constraints[i], i, argvec,
3315 					  constraintvec, labelvec, locus);
3316 	      MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3317 	    }
3318 	  XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3319 	}
3320 
3321       /* If there are no outputs (but there are some clobbers)
3322 	 store the bare ASM_OPERANDS into the PARALLEL.  */
3323       if (i == 0)
3324 	XVECEXP (body, 0, i++) = obody;
3325 
3326       /* Store (clobber REG) for each clobbered register specified.  */
3327       for (unsigned j = 0; j < nclobbers; ++j)
3328 	{
3329 	  rtx clobbered_reg = clobber_rvec[j];
3330 
3331 	  /* Do sanity check for overlap between clobbers and respectively
3332 	     input and outputs that hasn't been handled.  Such overlap
3333 	     should have been detected and reported above.  */
3334 	  if (!clobber_conflict_found && REG_P (clobbered_reg))
3335 	    {
3336 	      /* We test the old body (obody) contents to avoid
3337 		 tripping over the under-construction body.  */
3338 	      for (unsigned k = 0; k < noutputs; ++k)
3339 		if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3340 		  internal_error ("asm clobber conflict with output operand");
3341 
3342 	      for (unsigned k = 0; k < ninputs - ninout; ++k)
3343 		if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3344 		  internal_error ("asm clobber conflict with input operand");
3345 	    }
3346 
3347 	  XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3348 	}
3349 
3350       if (nlabels > 0)
3351 	emit_jump_insn (body);
3352       else
3353 	emit_insn (body);
3354     }
3355 
3356   generating_concat_p = old_generating_concat_p;
3357 
3358   if (fallthru_label)
3359     emit_label (fallthru_label);
3360 
3361   if (after_md_seq)
3362     emit_insn (after_md_seq);
3363   if (after_rtl_seq)
3364     emit_insn (after_rtl_seq);
3365 
3366   free_temp_slots ();
3367   crtl->has_asm_statement = 1;
3368 }
3369 
3370 /* Emit code to jump to the address
3371    specified by the pointer expression EXP.  */
3372 
3373 static void
expand_computed_goto(tree exp)3374 expand_computed_goto (tree exp)
3375 {
3376   rtx x = expand_normal (exp);
3377 
3378   do_pending_stack_adjust ();
3379   emit_indirect_jump (x);
3380 }
3381 
3382 /* Generate RTL code for a `goto' statement with target label LABEL.
3383    LABEL should be a LABEL_DECL tree node that was or will later be
3384    defined with `expand_label'.  */
3385 
3386 static void
expand_goto(tree label)3387 expand_goto (tree label)
3388 {
3389   if (flag_checking)
3390     {
3391       /* Check for a nonlocal goto to a containing function.  Should have
3392 	 gotten translated to __builtin_nonlocal_goto.  */
3393       tree context = decl_function_context (label);
3394       gcc_assert (!context || context == current_function_decl);
3395     }
3396 
3397   emit_jump (jump_target_rtx (label));
3398 }
3399 
3400 /* Output a return with no value.  */
3401 
3402 static void
expand_null_return_1(void)3403 expand_null_return_1 (void)
3404 {
3405   clear_pending_stack_adjust ();
3406   do_pending_stack_adjust ();
3407   emit_jump (return_label);
3408 }
3409 
3410 /* Generate RTL to return from the current function, with no value.
3411    (That is, we do not do anything about returning any value.)  */
3412 
3413 void
expand_null_return(void)3414 expand_null_return (void)
3415 {
3416   /* If this function was declared to return a value, but we
3417      didn't, clobber the return registers so that they are not
3418      propagated live to the rest of the function.  */
3419   clobber_return_register ();
3420 
3421   expand_null_return_1 ();
3422 }
3423 
3424 /* Generate RTL to return from the current function, with value VAL.  */
3425 
3426 static void
expand_value_return(rtx val)3427 expand_value_return (rtx val)
3428 {
3429   /* Copy the value to the return location unless it's already there.  */
3430 
3431   tree decl = DECL_RESULT (current_function_decl);
3432   rtx return_reg = DECL_RTL (decl);
3433   if (return_reg != val)
3434     {
3435       tree funtype = TREE_TYPE (current_function_decl);
3436       tree type = TREE_TYPE (decl);
3437       int unsignedp = TYPE_UNSIGNED (type);
3438       machine_mode old_mode = DECL_MODE (decl);
3439       machine_mode mode;
3440       if (DECL_BY_REFERENCE (decl))
3441         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3442       else
3443         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3444 
3445       if (mode != old_mode)
3446 	val = convert_modes (mode, old_mode, val, unsignedp);
3447 
3448       if (GET_CODE (return_reg) == PARALLEL)
3449 	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3450       else
3451 	emit_move_insn (return_reg, val);
3452     }
3453 
3454   expand_null_return_1 ();
3455 }
3456 
3457 /* Generate RTL to evaluate the expression RETVAL and return it
3458    from the current function.  */
3459 
3460 static void
expand_return(tree retval,tree bounds)3461 expand_return (tree retval, tree bounds)
3462 {
3463   rtx result_rtl;
3464   rtx val = 0;
3465   tree retval_rhs;
3466   rtx bounds_rtl;
3467 
3468   /* If function wants no value, give it none.  */
3469   if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3470     {
3471       expand_normal (retval);
3472       expand_null_return ();
3473       return;
3474     }
3475 
3476   if (retval == error_mark_node)
3477     {
3478       /* Treat this like a return of no value from a function that
3479 	 returns a value.  */
3480       expand_null_return ();
3481       return;
3482     }
3483   else if ((TREE_CODE (retval) == MODIFY_EXPR
3484 	    || TREE_CODE (retval) == INIT_EXPR)
3485 	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3486     retval_rhs = TREE_OPERAND (retval, 1);
3487   else
3488     retval_rhs = retval;
3489 
3490   result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3491 
3492   /* Put returned bounds to the right place.  */
3493   bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3494   if (bounds_rtl)
3495     {
3496       rtx addr = NULL;
3497       rtx bnd = NULL;
3498 
3499       if (bounds && bounds != error_mark_node)
3500 	{
3501 	  bnd = expand_normal (bounds);
3502 	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3503 	}
3504       else if (REG_P (bounds_rtl))
3505 	{
3506 	  if (bounds)
3507 	    bnd = chkp_expand_zero_bounds ();
3508 	  else
3509 	    {
3510 	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3511 	      addr = gen_rtx_MEM (Pmode, addr);
3512 	      bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3513 	    }
3514 
3515 	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3516 	}
3517       else
3518 	{
3519 	  int n;
3520 
3521 	  gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3522 
3523 	  if (bounds)
3524 	    bnd = chkp_expand_zero_bounds ();
3525 	  else
3526 	    {
3527 	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3528 	      addr = gen_rtx_MEM (Pmode, addr);
3529 	    }
3530 
3531 	  for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3532 	    {
3533 	      rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3534 	      if (!bounds)
3535 		{
3536 		  rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3537 		  rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3538 		  bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3539 		}
3540 	      targetm.calls.store_returned_bounds (slot, bnd);
3541 	    }
3542 	}
3543     }
3544   else if (chkp_function_instrumented_p (current_function_decl)
3545 	   && !BOUNDED_P (retval_rhs)
3546 	   && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3547 	   && TREE_CODE (retval_rhs) != RESULT_DECL)
3548     {
3549       rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3550       addr = gen_rtx_MEM (Pmode, addr);
3551 
3552       gcc_assert (MEM_P (result_rtl));
3553 
3554       chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3555     }
3556 
3557   /* If we are returning the RESULT_DECL, then the value has already
3558      been stored into it, so we don't have to do anything special.  */
3559   if (TREE_CODE (retval_rhs) == RESULT_DECL)
3560     expand_value_return (result_rtl);
3561 
3562   /* If the result is an aggregate that is being returned in one (or more)
3563      registers, load the registers here.  */
3564 
3565   else if (retval_rhs != 0
3566 	   && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3567 	   && REG_P (result_rtl))
3568     {
3569       val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3570       if (val)
3571 	{
3572 	  /* Use the mode of the result value on the return register.  */
3573 	  PUT_MODE (result_rtl, GET_MODE (val));
3574 	  expand_value_return (val);
3575 	}
3576       else
3577 	expand_null_return ();
3578     }
3579   else if (retval_rhs != 0
3580 	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3581 	   && (REG_P (result_rtl)
3582 	       || (GET_CODE (result_rtl) == PARALLEL)))
3583     {
3584       /* Compute the return value into a temporary (usually a pseudo reg).  */
3585       val
3586 	= assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3587       val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3588       val = force_not_mem (val);
3589       expand_value_return (val);
3590     }
3591   else
3592     {
3593       /* No hard reg used; calculate value into hard return reg.  */
3594       expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3595       expand_value_return (result_rtl);
3596     }
3597 }
3598 
3599 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3600    STMT that doesn't require special handling for outgoing edges.  That
3601    is no tailcalls and no GIMPLE_COND.  */
3602 
3603 static void
expand_gimple_stmt_1(gimple * stmt)3604 expand_gimple_stmt_1 (gimple *stmt)
3605 {
3606   tree op0;
3607 
3608   set_curr_insn_location (gimple_location (stmt));
3609 
3610   switch (gimple_code (stmt))
3611     {
3612     case GIMPLE_GOTO:
3613       op0 = gimple_goto_dest (stmt);
3614       if (TREE_CODE (op0) == LABEL_DECL)
3615 	expand_goto (op0);
3616       else
3617 	expand_computed_goto (op0);
3618       break;
3619     case GIMPLE_LABEL:
3620       expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3621       break;
3622     case GIMPLE_NOP:
3623     case GIMPLE_PREDICT:
3624       break;
3625     case GIMPLE_SWITCH:
3626       {
3627 	gswitch *swtch = as_a <gswitch *> (stmt);
3628 	if (gimple_switch_num_labels (swtch) == 1)
3629 	  expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3630 	else
3631 	  expand_case (swtch);
3632       }
3633       break;
3634     case GIMPLE_ASM:
3635       expand_asm_stmt (as_a <gasm *> (stmt));
3636       break;
3637     case GIMPLE_CALL:
3638       expand_call_stmt (as_a <gcall *> (stmt));
3639       break;
3640 
3641     case GIMPLE_RETURN:
3642       {
3643 	tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3644 	op0 = gimple_return_retval (as_a <greturn *> (stmt));
3645 
3646 	if (op0 && op0 != error_mark_node)
3647 	  {
3648 	    tree result = DECL_RESULT (current_function_decl);
3649 
3650 	    /* Mark we have return statement with missing bounds.  */
3651 	    if (!bnd
3652 		&& chkp_function_instrumented_p (cfun->decl)
3653 		&& !DECL_P (op0))
3654 	      bnd = error_mark_node;
3655 
3656 	    /* If we are not returning the current function's RESULT_DECL,
3657 	       build an assignment to it.  */
3658 	    if (op0 != result)
3659 	      {
3660 		/* I believe that a function's RESULT_DECL is unique.  */
3661 		gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3662 
3663 		/* ??? We'd like to use simply expand_assignment here,
3664 		   but this fails if the value is of BLKmode but the return
3665 		   decl is a register.  expand_return has special handling
3666 		   for this combination, which eventually should move
3667 		   to common code.  See comments there.  Until then, let's
3668 		   build a modify expression :-/  */
3669 		op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3670 			      result, op0);
3671 	      }
3672 	  }
3673 
3674 	if (!op0)
3675 	  expand_null_return ();
3676 	else
3677 	  expand_return (op0, bnd);
3678       }
3679       break;
3680 
3681     case GIMPLE_ASSIGN:
3682       {
3683 	gassign *assign_stmt = as_a <gassign *> (stmt);
3684 	tree lhs = gimple_assign_lhs (assign_stmt);
3685 
3686 	/* Tree expand used to fiddle with |= and &= of two bitfield
3687 	   COMPONENT_REFs here.  This can't happen with gimple, the LHS
3688 	   of binary assigns must be a gimple reg.  */
3689 
3690 	if (TREE_CODE (lhs) != SSA_NAME
3691 	    || get_gimple_rhs_class (gimple_expr_code (stmt))
3692 	       == GIMPLE_SINGLE_RHS)
3693 	  {
3694 	    tree rhs = gimple_assign_rhs1 (assign_stmt);
3695 	    gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3696 			== GIMPLE_SINGLE_RHS);
3697 	    if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3698 		/* Do not put locations on possibly shared trees.  */
3699 		&& !is_gimple_min_invariant (rhs))
3700 	      SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3701 	    if (TREE_CLOBBER_P (rhs))
3702 	      /* This is a clobber to mark the going out of scope for
3703 		 this LHS.  */
3704 	      ;
3705 	    else
3706 	      expand_assignment (lhs, rhs,
3707 				 gimple_assign_nontemporal_move_p (
3708 				   assign_stmt));
3709 	  }
3710 	else
3711 	  {
3712 	    rtx target, temp;
3713 	    bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3714 	    struct separate_ops ops;
3715 	    bool promoted = false;
3716 
3717 	    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3718 	    if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3719 	      promoted = true;
3720 
3721 	    ops.code = gimple_assign_rhs_code (assign_stmt);
3722 	    ops.type = TREE_TYPE (lhs);
3723 	    switch (get_gimple_rhs_class (ops.code))
3724 	      {
3725 		case GIMPLE_TERNARY_RHS:
3726 		  ops.op2 = gimple_assign_rhs3 (assign_stmt);
3727 		  /* Fallthru */
3728 		case GIMPLE_BINARY_RHS:
3729 		  ops.op1 = gimple_assign_rhs2 (assign_stmt);
3730 		  /* Fallthru */
3731 		case GIMPLE_UNARY_RHS:
3732 		  ops.op0 = gimple_assign_rhs1 (assign_stmt);
3733 		  break;
3734 		default:
3735 		  gcc_unreachable ();
3736 	      }
3737 	    ops.location = gimple_location (stmt);
3738 
3739 	    /* If we want to use a nontemporal store, force the value to
3740 	       register first.  If we store into a promoted register,
3741 	       don't directly expand to target.  */
3742 	    temp = nontemporal || promoted ? NULL_RTX : target;
3743 	    temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3744 				       EXPAND_NORMAL);
3745 
3746 	    if (temp == target)
3747 	      ;
3748 	    else if (promoted)
3749 	      {
3750 		int unsignedp = SUBREG_PROMOTED_SIGN (target);
3751 		/* If TEMP is a VOIDmode constant, use convert_modes to make
3752 		   sure that we properly convert it.  */
3753 		if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3754 		  {
3755 		    temp = convert_modes (GET_MODE (target),
3756 					  TYPE_MODE (ops.type),
3757 					  temp, unsignedp);
3758 		    temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3759 					  GET_MODE (target), temp, unsignedp);
3760 		  }
3761 
3762 		convert_move (SUBREG_REG (target), temp, unsignedp);
3763 	      }
3764 	    else if (nontemporal && emit_storent_insn (target, temp))
3765 	      ;
3766 	    else
3767 	      {
3768 		temp = force_operand (temp, target);
3769 		if (temp != target)
3770 		  emit_move_insn (target, temp);
3771 	      }
3772 	  }
3773       }
3774       break;
3775 
3776     default:
3777       gcc_unreachable ();
3778     }
3779 }
3780 
3781 /* Expand one gimple statement STMT and return the last RTL instruction
3782    before any of the newly generated ones.
3783 
3784    In addition to generating the necessary RTL instructions this also
3785    sets REG_EH_REGION notes if necessary and sets the current source
3786    location for diagnostics.  */
3787 
3788 static rtx_insn *
expand_gimple_stmt(gimple * stmt)3789 expand_gimple_stmt (gimple *stmt)
3790 {
3791   location_t saved_location = input_location;
3792   rtx_insn *last = get_last_insn ();
3793   int lp_nr;
3794 
3795   gcc_assert (cfun);
3796 
3797   /* We need to save and restore the current source location so that errors
3798      discovered during expansion are emitted with the right location.  But
3799      it would be better if the diagnostic routines used the source location
3800      embedded in the tree nodes rather than globals.  */
3801   if (gimple_has_location (stmt))
3802     input_location = gimple_location (stmt);
3803 
3804   expand_gimple_stmt_1 (stmt);
3805 
3806   /* Free any temporaries used to evaluate this statement.  */
3807   free_temp_slots ();
3808 
3809   input_location = saved_location;
3810 
3811   /* Mark all insns that may trap.  */
3812   lp_nr = lookup_stmt_eh_lp (stmt);
3813   if (lp_nr)
3814     {
3815       rtx_insn *insn;
3816       for (insn = next_real_insn (last); insn;
3817 	   insn = next_real_insn (insn))
3818 	{
3819 	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3820 	      /* If we want exceptions for non-call insns, any
3821 		 may_trap_p instruction may throw.  */
3822 	      && GET_CODE (PATTERN (insn)) != CLOBBER
3823 	      && GET_CODE (PATTERN (insn)) != USE
3824 	      && insn_could_throw_p (insn))
3825 	    make_reg_eh_region_note (insn, 0, lp_nr);
3826 	}
3827     }
3828 
3829   return last;
3830 }
3831 
3832 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
3833    that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
3834    generated a tail call (something that might be denied by the ABI
3835    rules governing the call; see calls.c).
3836 
3837    Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3838    can still reach the rest of BB.  The case here is __builtin_sqrt,
3839    where the NaN result goes through the external function (with a
3840    tailcall) and the normal result happens via a sqrt instruction.  */
3841 
3842 static basic_block
expand_gimple_tailcall(basic_block bb,gcall * stmt,bool * can_fallthru)3843 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3844 {
3845   rtx_insn *last2, *last;
3846   edge e;
3847   edge_iterator ei;
3848   profile_probability probability;
3849 
3850   last2 = last = expand_gimple_stmt (stmt);
3851 
3852   for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3853     if (CALL_P (last) && SIBLING_CALL_P (last))
3854       goto found;
3855 
3856   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3857 
3858   *can_fallthru = true;
3859   return NULL;
3860 
3861  found:
3862   /* ??? Wouldn't it be better to just reset any pending stack adjust?
3863      Any instructions emitted here are about to be deleted.  */
3864   do_pending_stack_adjust ();
3865 
3866   /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
3867   /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
3868      EH or abnormal edges, we shouldn't have created a tail call in
3869      the first place.  So it seems to me we should just be removing
3870      all edges here, or redirecting the existing fallthru edge to
3871      the exit block.  */
3872 
3873   probability = profile_probability::never ();
3874 
3875   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3876     {
3877       if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3878 	{
3879 	  if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3880 	    e->dest->count -= e->count ();
3881 	  probability += e->probability;
3882 	  remove_edge (e);
3883 	}
3884       else
3885 	ei_next (&ei);
3886     }
3887 
3888   /* This is somewhat ugly: the call_expr expander often emits instructions
3889      after the sibcall (to perform the function return).  These confuse the
3890      find_many_sub_basic_blocks code, so we need to get rid of these.  */
3891   last = NEXT_INSN (last);
3892   gcc_assert (BARRIER_P (last));
3893 
3894   *can_fallthru = false;
3895   while (NEXT_INSN (last))
3896     {
3897       /* For instance an sqrt builtin expander expands if with
3898 	 sibcall in the then and label for `else`.  */
3899       if (LABEL_P (NEXT_INSN (last)))
3900 	{
3901 	  *can_fallthru = true;
3902 	  break;
3903 	}
3904       delete_insn (NEXT_INSN (last));
3905     }
3906 
3907   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3908 		 | EDGE_SIBCALL);
3909   e->probability = probability;
3910   BB_END (bb) = last;
3911   update_bb_for_insn (bb);
3912 
3913   if (NEXT_INSN (last))
3914     {
3915       bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3916 
3917       last = BB_END (bb);
3918       if (BARRIER_P (last))
3919 	BB_END (bb) = PREV_INSN (last);
3920     }
3921 
3922   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3923 
3924   return bb;
3925 }
3926 
3927 /* Return the difference between the floor and the truncated result of
3928    a signed division by OP1 with remainder MOD.  */
3929 static rtx
floor_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)3930 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3931 {
3932   /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3933   return gen_rtx_IF_THEN_ELSE
3934     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3935      gen_rtx_IF_THEN_ELSE
3936      (mode, gen_rtx_LT (BImode,
3937 			gen_rtx_DIV (mode, op1, mod),
3938 			const0_rtx),
3939       constm1_rtx, const0_rtx),
3940      const0_rtx);
3941 }
3942 
3943 /* Return the difference between the ceil and the truncated result of
3944    a signed division by OP1 with remainder MOD.  */
3945 static rtx
ceil_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)3946 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3947 {
3948   /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3949   return gen_rtx_IF_THEN_ELSE
3950     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3951      gen_rtx_IF_THEN_ELSE
3952      (mode, gen_rtx_GT (BImode,
3953 			gen_rtx_DIV (mode, op1, mod),
3954 			const0_rtx),
3955       const1_rtx, const0_rtx),
3956      const0_rtx);
3957 }
3958 
3959 /* Return the difference between the ceil and the truncated result of
3960    an unsigned division by OP1 with remainder MOD.  */
3961 static rtx
ceil_udiv_adjust(machine_mode mode,rtx mod,rtx op1 ATTRIBUTE_UNUSED)3962 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3963 {
3964   /* (mod != 0 ? 1 : 0) */
3965   return gen_rtx_IF_THEN_ELSE
3966     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3967      const1_rtx, const0_rtx);
3968 }
3969 
3970 /* Return the difference between the rounded and the truncated result
3971    of a signed division by OP1 with remainder MOD.  Halfway cases are
3972    rounded away from zero, rather than to the nearest even number.  */
3973 static rtx
round_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)3974 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3975 {
3976   /* (abs (mod) >= abs (op1) - abs (mod)
3977       ? (op1 / mod > 0 ? 1 : -1)
3978       : 0) */
3979   return gen_rtx_IF_THEN_ELSE
3980     (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3981 		       gen_rtx_MINUS (mode,
3982 				      gen_rtx_ABS (mode, op1),
3983 				      gen_rtx_ABS (mode, mod))),
3984      gen_rtx_IF_THEN_ELSE
3985      (mode, gen_rtx_GT (BImode,
3986 			gen_rtx_DIV (mode, op1, mod),
3987 			const0_rtx),
3988       const1_rtx, constm1_rtx),
3989      const0_rtx);
3990 }
3991 
3992 /* Return the difference between the rounded and the truncated result
3993    of a unsigned division by OP1 with remainder MOD.  Halfway cases
3994    are rounded away from zero, rather than to the nearest even
3995    number.  */
3996 static rtx
round_udiv_adjust(machine_mode mode,rtx mod,rtx op1)3997 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3998 {
3999   /* (mod >= op1 - mod ? 1 : 0) */
4000   return gen_rtx_IF_THEN_ELSE
4001     (mode, gen_rtx_GE (BImode, mod,
4002 		       gen_rtx_MINUS (mode, op1, mod)),
4003      const1_rtx, const0_rtx);
4004 }
4005 
4006 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4007    any rtl.  */
4008 
4009 static rtx
convert_debug_memory_address(scalar_int_mode mode,rtx x,addr_space_t as)4010 convert_debug_memory_address (scalar_int_mode mode, rtx x,
4011 			      addr_space_t as)
4012 {
4013 #ifndef POINTERS_EXTEND_UNSIGNED
4014   gcc_assert (mode == Pmode
4015 	      || mode == targetm.addr_space.address_mode (as));
4016   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
4017 #else
4018   rtx temp;
4019 
4020   gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
4021 
4022   if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4023     return x;
4024 
4025   /* X must have some form of address mode already.  */
4026   scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4027   if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4028     x = lowpart_subreg (mode, x, xmode);
4029   else if (POINTERS_EXTEND_UNSIGNED > 0)
4030     x = gen_rtx_ZERO_EXTEND (mode, x);
4031   else if (!POINTERS_EXTEND_UNSIGNED)
4032     x = gen_rtx_SIGN_EXTEND (mode, x);
4033   else
4034     {
4035       switch (GET_CODE (x))
4036 	{
4037 	case SUBREG:
4038 	  if ((SUBREG_PROMOTED_VAR_P (x)
4039 	       || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4040 	       || (GET_CODE (SUBREG_REG (x)) == PLUS
4041 		   && REG_P (XEXP (SUBREG_REG (x), 0))
4042 		   && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4043 		   && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4044 	      && GET_MODE (SUBREG_REG (x)) == mode)
4045 	    return SUBREG_REG (x);
4046 	  break;
4047 	case LABEL_REF:
4048 	  temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4049 	  LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4050 	  return temp;
4051 	case SYMBOL_REF:
4052 	  temp = shallow_copy_rtx (x);
4053 	  PUT_MODE (temp, mode);
4054 	  return temp;
4055 	case CONST:
4056 	  temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4057 	  if (temp)
4058 	    temp = gen_rtx_CONST (mode, temp);
4059 	  return temp;
4060 	case PLUS:
4061 	case MINUS:
4062 	  if (CONST_INT_P (XEXP (x, 1)))
4063 	    {
4064 	      temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4065 	      if (temp)
4066 		return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4067 	    }
4068 	  break;
4069 	default:
4070 	  break;
4071 	}
4072       /* Don't know how to express ptr_extend as operation in debug info.  */
4073       return NULL;
4074     }
4075 #endif /* POINTERS_EXTEND_UNSIGNED */
4076 
4077   return x;
4078 }
4079 
4080 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4081    by avoid_deep_ter_for_debug.  */
4082 
4083 static hash_map<tree, tree> *deep_ter_debug_map;
4084 
4085 /* Split too deep TER chains for debug stmts using debug temporaries.  */
4086 
4087 static void
avoid_deep_ter_for_debug(gimple * stmt,int depth)4088 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4089 {
4090   use_operand_p use_p;
4091   ssa_op_iter iter;
4092   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4093     {
4094       tree use = USE_FROM_PTR (use_p);
4095       if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4096 	continue;
4097       gimple *g = get_gimple_for_ssa_name (use);
4098       if (g == NULL)
4099 	continue;
4100       if (depth > 6 && !stmt_ends_bb_p (g))
4101 	{
4102 	  if (deep_ter_debug_map == NULL)
4103 	    deep_ter_debug_map = new hash_map<tree, tree>;
4104 
4105 	  tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4106 	  if (vexpr != NULL)
4107 	    continue;
4108 	  vexpr = make_node (DEBUG_EXPR_DECL);
4109 	  gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4110 	  DECL_ARTIFICIAL (vexpr) = 1;
4111 	  TREE_TYPE (vexpr) = TREE_TYPE (use);
4112 	  SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4113 	  gimple_stmt_iterator gsi = gsi_for_stmt (g);
4114 	  gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4115 	  avoid_deep_ter_for_debug (def_temp, 0);
4116 	}
4117       else
4118 	avoid_deep_ter_for_debug (g, depth + 1);
4119     }
4120 }
4121 
4122 /* Return an RTX equivalent to the value of the parameter DECL.  */
4123 
4124 static rtx
expand_debug_parm_decl(tree decl)4125 expand_debug_parm_decl (tree decl)
4126 {
4127   rtx incoming = DECL_INCOMING_RTL (decl);
4128 
4129   if (incoming
4130       && GET_MODE (incoming) != BLKmode
4131       && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4132 	  || (MEM_P (incoming)
4133 	      && REG_P (XEXP (incoming, 0))
4134 	      && HARD_REGISTER_P (XEXP (incoming, 0)))))
4135     {
4136       rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4137 
4138 #ifdef HAVE_window_save
4139       /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4140 	 If the target machine has an explicit window save instruction, the
4141 	 actual entry value is the corresponding OUTGOING_REGNO instead.  */
4142       if (REG_P (incoming)
4143 	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4144 	incoming
4145 	  = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4146 				OUTGOING_REGNO (REGNO (incoming)), 0);
4147       else if (MEM_P (incoming))
4148 	{
4149 	  rtx reg = XEXP (incoming, 0);
4150 	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4151 	    {
4152 	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4153 	      incoming = replace_equiv_address_nv (incoming, reg);
4154 	    }
4155 	  else
4156 	    incoming = copy_rtx (incoming);
4157 	}
4158 #endif
4159 
4160       ENTRY_VALUE_EXP (rtl) = incoming;
4161       return rtl;
4162     }
4163 
4164   if (incoming
4165       && GET_MODE (incoming) != BLKmode
4166       && !TREE_ADDRESSABLE (decl)
4167       && MEM_P (incoming)
4168       && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4169 	  || (GET_CODE (XEXP (incoming, 0)) == PLUS
4170 	      && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4171 	      && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4172     return copy_rtx (incoming);
4173 
4174   return NULL_RTX;
4175 }
4176 
4177 /* Return an RTX equivalent to the value of the tree expression EXP.  */
4178 
4179 static rtx
expand_debug_expr(tree exp)4180 expand_debug_expr (tree exp)
4181 {
4182   rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4183   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4184   machine_mode inner_mode = VOIDmode;
4185   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4186   addr_space_t as;
4187   scalar_int_mode op0_mode, op1_mode, addr_mode;
4188 
4189   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4190     {
4191     case tcc_expression:
4192       switch (TREE_CODE (exp))
4193 	{
4194 	case COND_EXPR:
4195 	case DOT_PROD_EXPR:
4196 	case SAD_EXPR:
4197 	case WIDEN_MULT_PLUS_EXPR:
4198 	case WIDEN_MULT_MINUS_EXPR:
4199 	case FMA_EXPR:
4200 	  goto ternary;
4201 
4202 	case TRUTH_ANDIF_EXPR:
4203 	case TRUTH_ORIF_EXPR:
4204 	case TRUTH_AND_EXPR:
4205 	case TRUTH_OR_EXPR:
4206 	case TRUTH_XOR_EXPR:
4207 	  goto binary;
4208 
4209 	case TRUTH_NOT_EXPR:
4210 	  goto unary;
4211 
4212 	default:
4213 	  break;
4214 	}
4215       break;
4216 
4217     ternary:
4218       op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4219       if (!op2)
4220 	return NULL_RTX;
4221       /* Fall through.  */
4222 
4223     binary:
4224     case tcc_binary:
4225       if (mode == BLKmode)
4226 	return NULL_RTX;
4227       op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4228       if (!op1)
4229 	return NULL_RTX;
4230       switch (TREE_CODE (exp))
4231 	{
4232 	case LSHIFT_EXPR:
4233 	case RSHIFT_EXPR:
4234 	case LROTATE_EXPR:
4235 	case RROTATE_EXPR:
4236 	case WIDEN_LSHIFT_EXPR:
4237 	  /* Ensure second operand isn't wider than the first one.  */
4238 	  inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4239 	  if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4240 	      && (GET_MODE_UNIT_PRECISION (mode)
4241 		  < GET_MODE_PRECISION (op1_mode)))
4242 	    op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4243 	  break;
4244 	default:
4245 	  break;
4246 	}
4247       /* Fall through.  */
4248 
4249     unary:
4250     case tcc_unary:
4251       if (mode == BLKmode)
4252 	return NULL_RTX;
4253       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4254       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4255       if (!op0)
4256 	return NULL_RTX;
4257       break;
4258 
4259     case tcc_comparison:
4260       unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4261       goto binary;
4262 
4263     case tcc_type:
4264     case tcc_statement:
4265       gcc_unreachable ();
4266 
4267     case tcc_constant:
4268     case tcc_exceptional:
4269     case tcc_declaration:
4270     case tcc_reference:
4271     case tcc_vl_exp:
4272       break;
4273     }
4274 
4275   switch (TREE_CODE (exp))
4276     {
4277     case STRING_CST:
4278       if (!lookup_constant_def (exp))
4279 	{
4280 	  if (strlen (TREE_STRING_POINTER (exp)) + 1
4281 	      != (size_t) TREE_STRING_LENGTH (exp))
4282 	    return NULL_RTX;
4283 	  op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4284 	  op0 = gen_rtx_MEM (BLKmode, op0);
4285 	  set_mem_attributes (op0, exp, 0);
4286 	  return op0;
4287 	}
4288       /* Fall through.  */
4289 
4290     case INTEGER_CST:
4291     case REAL_CST:
4292     case FIXED_CST:
4293       op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4294       return op0;
4295 
4296     case POLY_INT_CST:
4297       return immed_wide_int_const (poly_int_cst_value (exp), mode);
4298 
4299     case COMPLEX_CST:
4300       gcc_assert (COMPLEX_MODE_P (mode));
4301       op0 = expand_debug_expr (TREE_REALPART (exp));
4302       op1 = expand_debug_expr (TREE_IMAGPART (exp));
4303       return gen_rtx_CONCAT (mode, op0, op1);
4304 
4305     case DEBUG_EXPR_DECL:
4306       op0 = DECL_RTL_IF_SET (exp);
4307 
4308       if (op0)
4309 	return op0;
4310 
4311       op0 = gen_rtx_DEBUG_EXPR (mode);
4312       DEBUG_EXPR_TREE_DECL (op0) = exp;
4313       SET_DECL_RTL (exp, op0);
4314 
4315       return op0;
4316 
4317     case VAR_DECL:
4318     case PARM_DECL:
4319     case FUNCTION_DECL:
4320     case LABEL_DECL:
4321     case CONST_DECL:
4322     case RESULT_DECL:
4323       op0 = DECL_RTL_IF_SET (exp);
4324 
4325       /* This decl was probably optimized away.  */
4326       if (!op0
4327 	  /* At least label RTXen are sometimes replaced by
4328 	     NOTE_INSN_DELETED_LABEL.  Any notes here are not
4329 	     handled by copy_rtx.  */
4330 	  || NOTE_P (op0))
4331 	{
4332 	  if (!VAR_P (exp)
4333 	      || DECL_EXTERNAL (exp)
4334 	      || !TREE_STATIC (exp)
4335 	      || !DECL_NAME (exp)
4336 	      || DECL_HARD_REGISTER (exp)
4337 	      || DECL_IN_CONSTANT_POOL (exp)
4338 	      || mode == VOIDmode)
4339 	    return NULL;
4340 
4341 	  op0 = make_decl_rtl_for_debug (exp);
4342 	  if (!MEM_P (op0)
4343 	      || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4344 	      || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4345 	    return NULL;
4346 	}
4347       else
4348 	op0 = copy_rtx (op0);
4349 
4350       if (GET_MODE (op0) == BLKmode
4351 	  /* If op0 is not BLKmode, but mode is, adjust_mode
4352 	     below would ICE.  While it is likely a FE bug,
4353 	     try to be robust here.  See PR43166.  */
4354 	  || mode == BLKmode
4355 	  || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4356 	{
4357 	  gcc_assert (MEM_P (op0));
4358 	  op0 = adjust_address_nv (op0, mode, 0);
4359 	  return op0;
4360 	}
4361 
4362       /* Fall through.  */
4363 
4364     adjust_mode:
4365     case PAREN_EXPR:
4366     CASE_CONVERT:
4367       {
4368 	inner_mode = GET_MODE (op0);
4369 
4370 	if (mode == inner_mode)
4371 	  return op0;
4372 
4373 	if (inner_mode == VOIDmode)
4374 	  {
4375 	    if (TREE_CODE (exp) == SSA_NAME)
4376 	      inner_mode = TYPE_MODE (TREE_TYPE (exp));
4377 	    else
4378 	      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4379 	    if (mode == inner_mode)
4380 	      return op0;
4381 	  }
4382 
4383 	if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4384 	  {
4385 	    if (GET_MODE_UNIT_BITSIZE (mode)
4386 		== GET_MODE_UNIT_BITSIZE (inner_mode))
4387 	      op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4388 	    else if (GET_MODE_UNIT_BITSIZE (mode)
4389 		     < GET_MODE_UNIT_BITSIZE (inner_mode))
4390 	      op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4391 	    else
4392 	      op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4393 	  }
4394 	else if (FLOAT_MODE_P (mode))
4395 	  {
4396 	    gcc_assert (TREE_CODE (exp) != SSA_NAME);
4397 	    if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4398 	      op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4399 	    else
4400 	      op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4401 	  }
4402 	else if (FLOAT_MODE_P (inner_mode))
4403 	  {
4404 	    if (unsignedp)
4405 	      op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4406 	    else
4407 	      op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4408 	  }
4409 	else if (GET_MODE_UNIT_PRECISION (mode)
4410 		 == GET_MODE_UNIT_PRECISION (inner_mode))
4411 	  op0 = lowpart_subreg (mode, op0, inner_mode);
4412 	else if (GET_MODE_UNIT_PRECISION (mode)
4413 		 < GET_MODE_UNIT_PRECISION (inner_mode))
4414 	  op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4415 	else if (UNARY_CLASS_P (exp)
4416 		 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4417 		 : unsignedp)
4418 	  op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4419 	else
4420 	  op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4421 
4422 	return op0;
4423       }
4424 
4425     case MEM_REF:
4426       if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4427 	{
4428 	  tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4429 				     TREE_OPERAND (exp, 0),
4430 				     TREE_OPERAND (exp, 1));
4431 	  if (newexp)
4432 	    return expand_debug_expr (newexp);
4433 	}
4434       /* FALLTHROUGH */
4435     case INDIRECT_REF:
4436       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4437       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4438       if (!op0)
4439 	return NULL;
4440 
4441       if (TREE_CODE (exp) == MEM_REF)
4442 	{
4443 	  if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4444 	      || (GET_CODE (op0) == PLUS
4445 		  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4446 	    /* (mem (debug_implicit_ptr)) might confuse aliasing.
4447 	       Instead just use get_inner_reference.  */
4448 	    goto component_ref;
4449 
4450 	  op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4451 	  if (!op1 || !CONST_INT_P (op1))
4452 	    return NULL;
4453 
4454 	  op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4455 	}
4456 
4457       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4458 
4459       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4460 					  op0, as);
4461       if (op0 == NULL_RTX)
4462 	return NULL;
4463 
4464       op0 = gen_rtx_MEM (mode, op0);
4465       set_mem_attributes (op0, exp, 0);
4466       if (TREE_CODE (exp) == MEM_REF
4467 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4468 	set_mem_expr (op0, NULL_TREE);
4469       set_mem_addr_space (op0, as);
4470 
4471       return op0;
4472 
4473     case TARGET_MEM_REF:
4474       if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4475 	  && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4476 	return NULL;
4477 
4478       op0 = expand_debug_expr
4479 	    (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4480       if (!op0)
4481 	return NULL;
4482 
4483       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4484       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4485 					  op0, as);
4486       if (op0 == NULL_RTX)
4487 	return NULL;
4488 
4489       op0 = gen_rtx_MEM (mode, op0);
4490 
4491       set_mem_attributes (op0, exp, 0);
4492       set_mem_addr_space (op0, as);
4493 
4494       return op0;
4495 
4496     component_ref:
4497     case ARRAY_REF:
4498     case ARRAY_RANGE_REF:
4499     case COMPONENT_REF:
4500     case BIT_FIELD_REF:
4501     case REALPART_EXPR:
4502     case IMAGPART_EXPR:
4503     case VIEW_CONVERT_EXPR:
4504       {
4505 	machine_mode mode1;
4506 	poly_int64 bitsize, bitpos;
4507 	tree offset;
4508 	int reversep, volatilep = 0;
4509 	tree tem
4510 	  = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4511 				 &unsignedp, &reversep, &volatilep);
4512 	rtx orig_op0;
4513 
4514 	if (known_eq (bitsize, 0))
4515 	  return NULL;
4516 
4517 	orig_op0 = op0 = expand_debug_expr (tem);
4518 
4519 	if (!op0)
4520 	  return NULL;
4521 
4522 	if (offset)
4523 	  {
4524 	    machine_mode addrmode, offmode;
4525 
4526 	    if (!MEM_P (op0))
4527 	      return NULL;
4528 
4529 	    op0 = XEXP (op0, 0);
4530 	    addrmode = GET_MODE (op0);
4531 	    if (addrmode == VOIDmode)
4532 	      addrmode = Pmode;
4533 
4534 	    op1 = expand_debug_expr (offset);
4535 	    if (!op1)
4536 	      return NULL;
4537 
4538 	    offmode = GET_MODE (op1);
4539 	    if (offmode == VOIDmode)
4540 	      offmode = TYPE_MODE (TREE_TYPE (offset));
4541 
4542 	    if (addrmode != offmode)
4543 	      op1 = lowpart_subreg (addrmode, op1, offmode);
4544 
4545 	    /* Don't use offset_address here, we don't need a
4546 	       recognizable address, and we don't want to generate
4547 	       code.  */
4548 	    op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4549 							  op0, op1));
4550 	  }
4551 
4552 	if (MEM_P (op0))
4553 	  {
4554 	    if (mode1 == VOIDmode)
4555 	      {
4556 		if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4557 		  return NULL;
4558 		/* Bitfield.  */
4559 		mode1 = smallest_int_mode_for_size (bitsize);
4560 	      }
4561 	    poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4562 	    if (maybe_ne (bytepos, 0))
4563 	      {
4564 		op0 = adjust_address_nv (op0, mode1, bytepos);
4565 		bitpos = num_trailing_bits (bitpos);
4566 	      }
4567 	    else if (known_eq (bitpos, 0)
4568 		     && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4569 	      op0 = adjust_address_nv (op0, mode, 0);
4570 	    else if (GET_MODE (op0) != mode1)
4571 	      op0 = adjust_address_nv (op0, mode1, 0);
4572 	    else
4573 	      op0 = copy_rtx (op0);
4574 	    if (op0 == orig_op0)
4575 	      op0 = shallow_copy_rtx (op0);
4576 	    set_mem_attributes (op0, exp, 0);
4577 	  }
4578 
4579 	if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4580 	  return op0;
4581 
4582 	if (maybe_lt (bitpos, 0))
4583           return NULL;
4584 
4585 	if (GET_MODE (op0) == BLKmode || mode == BLKmode)
4586 	  return NULL;
4587 
4588 	poly_int64 bytepos;
4589 	if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4590 	    && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4591 	  {
4592 	    machine_mode opmode = GET_MODE (op0);
4593 
4594 	    if (opmode == VOIDmode)
4595 	      opmode = TYPE_MODE (TREE_TYPE (tem));
4596 
4597 	    /* This condition may hold if we're expanding the address
4598 	       right past the end of an array that turned out not to
4599 	       be addressable (i.e., the address was only computed in
4600 	       debug stmts).  The gen_subreg below would rightfully
4601 	       crash, and the address doesn't really exist, so just
4602 	       drop it.  */
4603 	    if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4604 	      return NULL;
4605 
4606 	    if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4607 	      return simplify_gen_subreg (mode, op0, opmode, bytepos);
4608 	  }
4609 
4610 	return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4611 				     && TYPE_UNSIGNED (TREE_TYPE (exp))
4612 				     ? SIGN_EXTRACT
4613 				     : ZERO_EXTRACT, mode,
4614 				     GET_MODE (op0) != VOIDmode
4615 				     ? GET_MODE (op0)
4616 				     : TYPE_MODE (TREE_TYPE (tem)),
4617 				     op0, gen_int_mode (bitsize, word_mode),
4618 				     gen_int_mode (bitpos, word_mode));
4619       }
4620 
4621     case ABS_EXPR:
4622       return simplify_gen_unary (ABS, mode, op0, mode);
4623 
4624     case NEGATE_EXPR:
4625       return simplify_gen_unary (NEG, mode, op0, mode);
4626 
4627     case BIT_NOT_EXPR:
4628       return simplify_gen_unary (NOT, mode, op0, mode);
4629 
4630     case FLOAT_EXPR:
4631       return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4632 									 0)))
4633 				 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4634 				 inner_mode);
4635 
4636     case FIX_TRUNC_EXPR:
4637       return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4638 				 inner_mode);
4639 
4640     case POINTER_PLUS_EXPR:
4641       /* For the rare target where pointers are not the same size as
4642 	 size_t, we need to check for mis-matched modes and correct
4643 	 the addend.  */
4644       if (op0 && op1
4645 	  && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4646 	  && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4647 	  && op0_mode != op1_mode)
4648 	{
4649 	  if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4650 	      /* If OP0 is a partial mode, then we must truncate, even
4651 		 if it has the same bitsize as OP1 as GCC's
4652 		 representation of partial modes is opaque.  */
4653 	      || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4654 		  && (GET_MODE_BITSIZE (op0_mode)
4655 		      == GET_MODE_BITSIZE (op1_mode))))
4656 	    op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4657 	  else
4658 	    /* We always sign-extend, regardless of the signedness of
4659 	       the operand, because the operand is always unsigned
4660 	       here even if the original C expression is signed.  */
4661 	    op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4662 	}
4663       /* Fall through.  */
4664     case PLUS_EXPR:
4665       return simplify_gen_binary (PLUS, mode, op0, op1);
4666 
4667     case MINUS_EXPR:
4668     case POINTER_DIFF_EXPR:
4669       return simplify_gen_binary (MINUS, mode, op0, op1);
4670 
4671     case MULT_EXPR:
4672       return simplify_gen_binary (MULT, mode, op0, op1);
4673 
4674     case RDIV_EXPR:
4675     case TRUNC_DIV_EXPR:
4676     case EXACT_DIV_EXPR:
4677       if (unsignedp)
4678 	return simplify_gen_binary (UDIV, mode, op0, op1);
4679       else
4680 	return simplify_gen_binary (DIV, mode, op0, op1);
4681 
4682     case TRUNC_MOD_EXPR:
4683       return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4684 
4685     case FLOOR_DIV_EXPR:
4686       if (unsignedp)
4687 	return simplify_gen_binary (UDIV, mode, op0, op1);
4688       else
4689 	{
4690 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4691 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4692 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4693 	  return simplify_gen_binary (PLUS, mode, div, adj);
4694 	}
4695 
4696     case FLOOR_MOD_EXPR:
4697       if (unsignedp)
4698 	return simplify_gen_binary (UMOD, mode, op0, op1);
4699       else
4700 	{
4701 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4702 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4703 	  adj = simplify_gen_unary (NEG, mode,
4704 				    simplify_gen_binary (MULT, mode, adj, op1),
4705 				    mode);
4706 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4707 	}
4708 
4709     case CEIL_DIV_EXPR:
4710       if (unsignedp)
4711 	{
4712 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4713 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4714 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4715 	  return simplify_gen_binary (PLUS, mode, div, adj);
4716 	}
4717       else
4718 	{
4719 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4720 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4721 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4722 	  return simplify_gen_binary (PLUS, mode, div, adj);
4723 	}
4724 
4725     case CEIL_MOD_EXPR:
4726       if (unsignedp)
4727 	{
4728 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4729 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4730 	  adj = simplify_gen_unary (NEG, mode,
4731 				    simplify_gen_binary (MULT, mode, adj, op1),
4732 				    mode);
4733 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4734 	}
4735       else
4736 	{
4737 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4738 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4739 	  adj = simplify_gen_unary (NEG, mode,
4740 				    simplify_gen_binary (MULT, mode, adj, op1),
4741 				    mode);
4742 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4743 	}
4744 
4745     case ROUND_DIV_EXPR:
4746       if (unsignedp)
4747 	{
4748 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4749 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4750 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4751 	  return simplify_gen_binary (PLUS, mode, div, adj);
4752 	}
4753       else
4754 	{
4755 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4756 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4757 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4758 	  return simplify_gen_binary (PLUS, mode, div, adj);
4759 	}
4760 
4761     case ROUND_MOD_EXPR:
4762       if (unsignedp)
4763 	{
4764 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4765 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4766 	  adj = simplify_gen_unary (NEG, mode,
4767 				    simplify_gen_binary (MULT, mode, adj, op1),
4768 				    mode);
4769 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4770 	}
4771       else
4772 	{
4773 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4774 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4775 	  adj = simplify_gen_unary (NEG, mode,
4776 				    simplify_gen_binary (MULT, mode, adj, op1),
4777 				    mode);
4778 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4779 	}
4780 
4781     case LSHIFT_EXPR:
4782       return simplify_gen_binary (ASHIFT, mode, op0, op1);
4783 
4784     case RSHIFT_EXPR:
4785       if (unsignedp)
4786 	return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4787       else
4788 	return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4789 
4790     case LROTATE_EXPR:
4791       return simplify_gen_binary (ROTATE, mode, op0, op1);
4792 
4793     case RROTATE_EXPR:
4794       return simplify_gen_binary (ROTATERT, mode, op0, op1);
4795 
4796     case MIN_EXPR:
4797       return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4798 
4799     case MAX_EXPR:
4800       return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4801 
4802     case BIT_AND_EXPR:
4803     case TRUTH_AND_EXPR:
4804       return simplify_gen_binary (AND, mode, op0, op1);
4805 
4806     case BIT_IOR_EXPR:
4807     case TRUTH_OR_EXPR:
4808       return simplify_gen_binary (IOR, mode, op0, op1);
4809 
4810     case BIT_XOR_EXPR:
4811     case TRUTH_XOR_EXPR:
4812       return simplify_gen_binary (XOR, mode, op0, op1);
4813 
4814     case TRUTH_ANDIF_EXPR:
4815       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4816 
4817     case TRUTH_ORIF_EXPR:
4818       return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4819 
4820     case TRUTH_NOT_EXPR:
4821       return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4822 
4823     case LT_EXPR:
4824       return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4825 				      op0, op1);
4826 
4827     case LE_EXPR:
4828       return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4829 				      op0, op1);
4830 
4831     case GT_EXPR:
4832       return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4833 				      op0, op1);
4834 
4835     case GE_EXPR:
4836       return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4837 				      op0, op1);
4838 
4839     case EQ_EXPR:
4840       return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4841 
4842     case NE_EXPR:
4843       return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4844 
4845     case UNORDERED_EXPR:
4846       return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4847 
4848     case ORDERED_EXPR:
4849       return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4850 
4851     case UNLT_EXPR:
4852       return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4853 
4854     case UNLE_EXPR:
4855       return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4856 
4857     case UNGT_EXPR:
4858       return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4859 
4860     case UNGE_EXPR:
4861       return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4862 
4863     case UNEQ_EXPR:
4864       return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4865 
4866     case LTGT_EXPR:
4867       return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4868 
4869     case COND_EXPR:
4870       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4871 
4872     case COMPLEX_EXPR:
4873       gcc_assert (COMPLEX_MODE_P (mode));
4874       if (GET_MODE (op0) == VOIDmode)
4875 	op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4876       if (GET_MODE (op1) == VOIDmode)
4877 	op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4878       return gen_rtx_CONCAT (mode, op0, op1);
4879 
4880     case CONJ_EXPR:
4881       if (GET_CODE (op0) == CONCAT)
4882 	return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4883 			       simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4884 						   XEXP (op0, 1),
4885 						   GET_MODE_INNER (mode)));
4886       else
4887 	{
4888 	  scalar_mode imode = GET_MODE_INNER (mode);
4889 	  rtx re, im;
4890 
4891 	  if (MEM_P (op0))
4892 	    {
4893 	      re = adjust_address_nv (op0, imode, 0);
4894 	      im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4895 	    }
4896 	  else
4897 	    {
4898 	      scalar_int_mode ifmode;
4899 	      scalar_int_mode ihmode;
4900 	      rtx halfsize;
4901 	      if (!int_mode_for_mode (mode).exists (&ifmode)
4902 		  || !int_mode_for_mode (imode).exists (&ihmode))
4903 		return NULL;
4904 	      halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4905 	      re = op0;
4906 	      if (mode != ifmode)
4907 		re = gen_rtx_SUBREG (ifmode, re, 0);
4908 	      re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4909 	      if (imode != ihmode)
4910 		re = gen_rtx_SUBREG (imode, re, 0);
4911 	      im = copy_rtx (op0);
4912 	      if (mode != ifmode)
4913 		im = gen_rtx_SUBREG (ifmode, im, 0);
4914 	      im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4915 	      if (imode != ihmode)
4916 		im = gen_rtx_SUBREG (imode, im, 0);
4917 	    }
4918 	  im = gen_rtx_NEG (imode, im);
4919 	  return gen_rtx_CONCAT (mode, re, im);
4920 	}
4921 
4922     case ADDR_EXPR:
4923       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4924       if (!op0 || !MEM_P (op0))
4925 	{
4926 	  if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4927 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4928 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4929 	      && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4930 		  || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4931 	    return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4932 
4933 	  if (handled_component_p (TREE_OPERAND (exp, 0)))
4934 	    {
4935 	      poly_int64 bitoffset, bitsize, maxsize, byteoffset;
4936 	      bool reverse;
4937 	      tree decl
4938 		= get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4939 					   &bitsize, &maxsize, &reverse);
4940 	      if ((VAR_P (decl)
4941 		   || TREE_CODE (decl) == PARM_DECL
4942 		   || TREE_CODE (decl) == RESULT_DECL)
4943 		  && (!TREE_ADDRESSABLE (decl)
4944 		      || target_for_debug_bind (decl))
4945 		  && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
4946 		  && known_gt (bitsize, 0)
4947 		  && known_eq (bitsize, maxsize))
4948 		{
4949 		  rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4950 		  return plus_constant (mode, base, byteoffset);
4951 		}
4952 	    }
4953 
4954 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4955 	      && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4956 		 == ADDR_EXPR)
4957 	    {
4958 	      op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4959 						     0));
4960 	      if (op0 != NULL
4961 		  && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4962 		      || (GET_CODE (op0) == PLUS
4963 			  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4964 			  && CONST_INT_P (XEXP (op0, 1)))))
4965 		{
4966 		  op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4967 							 1));
4968 		  if (!op1 || !CONST_INT_P (op1))
4969 		    return NULL;
4970 
4971 		  return plus_constant (mode, op0, INTVAL (op1));
4972 		}
4973 	    }
4974 
4975 	  return NULL;
4976 	}
4977 
4978       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4979       addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
4980       op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
4981 
4982       return op0;
4983 
4984     case VECTOR_CST:
4985       {
4986 	unsigned HOST_WIDE_INT i, nelts;
4987 
4988 	if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
4989 	  return NULL;
4990 
4991 	op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
4992 
4993 	for (i = 0; i < nelts; ++i)
4994 	  {
4995 	    op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4996 	    if (!op1)
4997 	      return NULL;
4998 	    XVECEXP (op0, 0, i) = op1;
4999 	  }
5000 
5001 	return op0;
5002       }
5003 
5004     case CONSTRUCTOR:
5005       if (TREE_CLOBBER_P (exp))
5006 	return NULL;
5007       else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
5008 	{
5009 	  unsigned i;
5010 	  unsigned HOST_WIDE_INT nelts;
5011 	  tree val;
5012 
5013 	  if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
5014 	    goto flag_unsupported;
5015 
5016 	  op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5017 
5018 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
5019 	    {
5020 	      op1 = expand_debug_expr (val);
5021 	      if (!op1)
5022 		return NULL;
5023 	      XVECEXP (op0, 0, i) = op1;
5024 	    }
5025 
5026 	  if (i < nelts)
5027 	    {
5028 	      op1 = expand_debug_expr
5029 		(build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
5030 
5031 	      if (!op1)
5032 		return NULL;
5033 
5034 	      for (; i < nelts; i++)
5035 		XVECEXP (op0, 0, i) = op1;
5036 	    }
5037 
5038 	  return op0;
5039 	}
5040       else
5041 	goto flag_unsupported;
5042 
5043     case CALL_EXPR:
5044       /* ??? Maybe handle some builtins?  */
5045       return NULL;
5046 
5047     case SSA_NAME:
5048       {
5049 	gimple *g = get_gimple_for_ssa_name (exp);
5050 	if (g)
5051 	  {
5052 	    tree t = NULL_TREE;
5053 	    if (deep_ter_debug_map)
5054 	      {
5055 		tree *slot = deep_ter_debug_map->get (exp);
5056 		if (slot)
5057 		  t = *slot;
5058 	      }
5059 	    if (t == NULL_TREE)
5060 	      t = gimple_assign_rhs_to_tree (g);
5061 	    op0 = expand_debug_expr (t);
5062 	    if (!op0)
5063 	      return NULL;
5064 	  }
5065 	else
5066 	  {
5067 	    /* If this is a reference to an incoming value of
5068 	       parameter that is never used in the code or where the
5069 	       incoming value is never used in the code, use
5070 	       PARM_DECL's DECL_RTL if set.  */
5071 	    if (SSA_NAME_IS_DEFAULT_DEF (exp)
5072 		&& SSA_NAME_VAR (exp)
5073 		&& TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5074 		&& has_zero_uses (exp))
5075 	      {
5076 		op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5077 		if (op0)
5078 		  goto adjust_mode;
5079 		op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5080 		if (op0)
5081 		  goto adjust_mode;
5082 	      }
5083 
5084 	    int part = var_to_partition (SA.map, exp);
5085 
5086 	    if (part == NO_PARTITION)
5087 	      return NULL;
5088 
5089 	    gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5090 
5091 	    op0 = copy_rtx (SA.partition_to_pseudo[part]);
5092 	  }
5093 	goto adjust_mode;
5094       }
5095 
5096     case ERROR_MARK:
5097       return NULL;
5098 
5099     /* Vector stuff.  For most of the codes we don't have rtl codes.  */
5100     case REALIGN_LOAD_EXPR:
5101     case VEC_COND_EXPR:
5102     case VEC_PACK_FIX_TRUNC_EXPR:
5103     case VEC_PACK_SAT_EXPR:
5104     case VEC_PACK_TRUNC_EXPR:
5105     case VEC_UNPACK_FLOAT_HI_EXPR:
5106     case VEC_UNPACK_FLOAT_LO_EXPR:
5107     case VEC_UNPACK_HI_EXPR:
5108     case VEC_UNPACK_LO_EXPR:
5109     case VEC_WIDEN_MULT_HI_EXPR:
5110     case VEC_WIDEN_MULT_LO_EXPR:
5111     case VEC_WIDEN_MULT_EVEN_EXPR:
5112     case VEC_WIDEN_MULT_ODD_EXPR:
5113     case VEC_WIDEN_LSHIFT_HI_EXPR:
5114     case VEC_WIDEN_LSHIFT_LO_EXPR:
5115     case VEC_PERM_EXPR:
5116     case VEC_DUPLICATE_EXPR:
5117     case VEC_SERIES_EXPR:
5118       return NULL;
5119 
5120     /* Misc codes.  */
5121     case ADDR_SPACE_CONVERT_EXPR:
5122     case FIXED_CONVERT_EXPR:
5123     case OBJ_TYPE_REF:
5124     case WITH_SIZE_EXPR:
5125     case BIT_INSERT_EXPR:
5126       return NULL;
5127 
5128     case DOT_PROD_EXPR:
5129       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5130 	  && SCALAR_INT_MODE_P (mode))
5131 	{
5132 	  op0
5133 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5134 									  0)))
5135 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5136 				  inner_mode);
5137 	  op1
5138 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5139 									  1)))
5140 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5141 				  inner_mode);
5142 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5143 	  return simplify_gen_binary (PLUS, mode, op0, op2);
5144 	}
5145       return NULL;
5146 
5147     case WIDEN_MULT_EXPR:
5148     case WIDEN_MULT_PLUS_EXPR:
5149     case WIDEN_MULT_MINUS_EXPR:
5150       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5151 	  && SCALAR_INT_MODE_P (mode))
5152 	{
5153 	  inner_mode = GET_MODE (op0);
5154 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5155 	    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5156 	  else
5157 	    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5158 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5159 	    op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5160 	  else
5161 	    op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5162 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5163 	  if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5164 	    return op0;
5165 	  else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5166 	    return simplify_gen_binary (PLUS, mode, op0, op2);
5167 	  else
5168 	    return simplify_gen_binary (MINUS, mode, op2, op0);
5169 	}
5170       return NULL;
5171 
5172     case MULT_HIGHPART_EXPR:
5173       /* ??? Similar to the above.  */
5174       return NULL;
5175 
5176     case WIDEN_SUM_EXPR:
5177     case WIDEN_LSHIFT_EXPR:
5178       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5179 	  && SCALAR_INT_MODE_P (mode))
5180 	{
5181 	  op0
5182 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5183 									  0)))
5184 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5185 				  inner_mode);
5186 	  return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5187 				      ? ASHIFT : PLUS, mode, op0, op1);
5188 	}
5189       return NULL;
5190 
5191     case FMA_EXPR:
5192       return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
5193 
5194     default:
5195     flag_unsupported:
5196       if (flag_checking)
5197 	{
5198 	  debug_tree (exp);
5199 	  gcc_unreachable ();
5200 	}
5201       return NULL;
5202     }
5203 }
5204 
5205 /* Return an RTX equivalent to the source bind value of the tree expression
5206    EXP.  */
5207 
5208 static rtx
expand_debug_source_expr(tree exp)5209 expand_debug_source_expr (tree exp)
5210 {
5211   rtx op0 = NULL_RTX;
5212   machine_mode mode = VOIDmode, inner_mode;
5213 
5214   switch (TREE_CODE (exp))
5215     {
5216     case PARM_DECL:
5217       {
5218 	mode = DECL_MODE (exp);
5219 	op0 = expand_debug_parm_decl (exp);
5220 	if (op0)
5221 	   break;
5222 	/* See if this isn't an argument that has been completely
5223 	   optimized out.  */
5224 	if (!DECL_RTL_SET_P (exp)
5225 	    && !DECL_INCOMING_RTL (exp)
5226 	    && DECL_ABSTRACT_ORIGIN (current_function_decl))
5227 	  {
5228 	    tree aexp = DECL_ORIGIN (exp);
5229 	    if (DECL_CONTEXT (aexp)
5230 		== DECL_ABSTRACT_ORIGIN (current_function_decl))
5231 	      {
5232 		vec<tree, va_gc> **debug_args;
5233 		unsigned int ix;
5234 		tree ddecl;
5235 		debug_args = decl_debug_args_lookup (current_function_decl);
5236 		if (debug_args != NULL)
5237 		  {
5238 		    for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5239 			 ix += 2)
5240 		      if (ddecl == aexp)
5241 			return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5242 		  }
5243 	      }
5244 	  }
5245 	break;
5246       }
5247     default:
5248       break;
5249     }
5250 
5251   if (op0 == NULL_RTX)
5252     return NULL_RTX;
5253 
5254   inner_mode = GET_MODE (op0);
5255   if (mode == inner_mode)
5256     return op0;
5257 
5258   if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5259     {
5260       if (GET_MODE_UNIT_BITSIZE (mode)
5261 	  == GET_MODE_UNIT_BITSIZE (inner_mode))
5262 	op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5263       else if (GET_MODE_UNIT_BITSIZE (mode)
5264 	       < GET_MODE_UNIT_BITSIZE (inner_mode))
5265 	op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5266       else
5267 	op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5268     }
5269   else if (FLOAT_MODE_P (mode))
5270     gcc_unreachable ();
5271   else if (FLOAT_MODE_P (inner_mode))
5272     {
5273       if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5274 	op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5275       else
5276 	op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5277     }
5278   else if (GET_MODE_UNIT_PRECISION (mode)
5279 	   == GET_MODE_UNIT_PRECISION (inner_mode))
5280     op0 = lowpart_subreg (mode, op0, inner_mode);
5281   else if (GET_MODE_UNIT_PRECISION (mode)
5282 	   < GET_MODE_UNIT_PRECISION (inner_mode))
5283     op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5284   else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5285     op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5286   else
5287     op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5288 
5289   return op0;
5290 }
5291 
5292 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5293    Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5294    deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN.  */
5295 
5296 static void
avoid_complex_debug_insns(rtx_insn * insn,rtx * exp_p,int depth)5297 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5298 {
5299   rtx exp = *exp_p;
5300 
5301   if (exp == NULL_RTX)
5302     return;
5303 
5304   if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5305     return;
5306 
5307   if (depth == 4)
5308     {
5309       /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
5310       rtx dval = make_debug_expr_from_rtl (exp);
5311 
5312       /* Emit a debug bind insn before INSN.  */
5313       rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5314 				       DEBUG_EXPR_TREE_DECL (dval), exp,
5315 				       VAR_INIT_STATUS_INITIALIZED);
5316 
5317       emit_debug_insn_before (bind, insn);
5318       *exp_p = dval;
5319       return;
5320     }
5321 
5322   const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5323   int i, j;
5324   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5325     switch (*format_ptr++)
5326       {
5327       case 'e':
5328 	avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5329 	break;
5330 
5331       case 'E':
5332       case 'V':
5333 	for (j = 0; j < XVECLEN (exp, i); j++)
5334 	  avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5335 	break;
5336 
5337       default:
5338 	break;
5339       }
5340 }
5341 
5342 /* Expand the _LOCs in debug insns.  We run this after expanding all
5343    regular insns, so that any variables referenced in the function
5344    will have their DECL_RTLs set.  */
5345 
5346 static void
expand_debug_locations(void)5347 expand_debug_locations (void)
5348 {
5349   rtx_insn *insn;
5350   rtx_insn *last = get_last_insn ();
5351   int save_strict_alias = flag_strict_aliasing;
5352 
5353   /* New alias sets while setting up memory attributes cause
5354      -fcompare-debug failures, even though it doesn't bring about any
5355      codegen changes.  */
5356   flag_strict_aliasing = 0;
5357 
5358   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5359     if (DEBUG_BIND_INSN_P (insn))
5360       {
5361 	tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5362 	rtx val;
5363 	rtx_insn *prev_insn, *insn2;
5364 	machine_mode mode;
5365 
5366 	if (value == NULL_TREE)
5367 	  val = NULL_RTX;
5368 	else
5369 	  {
5370 	    if (INSN_VAR_LOCATION_STATUS (insn)
5371 		== VAR_INIT_STATUS_UNINITIALIZED)
5372 	      val = expand_debug_source_expr (value);
5373 	    /* The avoid_deep_ter_for_debug function inserts
5374 	       debug bind stmts after SSA_NAME definition, with the
5375 	       SSA_NAME as the whole bind location.  Disable temporarily
5376 	       expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5377 	       being defined in this DEBUG_INSN.  */
5378 	    else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5379 	      {
5380 		tree *slot = deep_ter_debug_map->get (value);
5381 		if (slot)
5382 		  {
5383 		    if (*slot == INSN_VAR_LOCATION_DECL (insn))
5384 		      *slot = NULL_TREE;
5385 		    else
5386 		      slot = NULL;
5387 		  }
5388 		val = expand_debug_expr (value);
5389 		if (slot)
5390 		  *slot = INSN_VAR_LOCATION_DECL (insn);
5391 	      }
5392 	    else
5393 	      val = expand_debug_expr (value);
5394 	    gcc_assert (last == get_last_insn ());
5395 	  }
5396 
5397 	if (!val)
5398 	  val = gen_rtx_UNKNOWN_VAR_LOC ();
5399 	else
5400 	  {
5401 	    mode = GET_MODE (INSN_VAR_LOCATION (insn));
5402 
5403 	    gcc_assert (mode == GET_MODE (val)
5404 			|| (GET_MODE (val) == VOIDmode
5405 			    && (CONST_SCALAR_INT_P (val)
5406 				|| GET_CODE (val) == CONST_FIXED
5407 				|| GET_CODE (val) == LABEL_REF)));
5408 	  }
5409 
5410 	INSN_VAR_LOCATION_LOC (insn) = val;
5411 	prev_insn = PREV_INSN (insn);
5412 	for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5413 	  avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5414       }
5415 
5416   flag_strict_aliasing = save_strict_alias;
5417 }
5418 
5419 /* Performs swapping operands of commutative operations to expand
5420    the expensive one first.  */
5421 
5422 static void
reorder_operands(basic_block bb)5423 reorder_operands (basic_block bb)
5424 {
5425   unsigned int *lattice;  /* Hold cost of each statement.  */
5426   unsigned int i = 0, n = 0;
5427   gimple_stmt_iterator gsi;
5428   gimple_seq stmts;
5429   gimple *stmt;
5430   bool swap;
5431   tree op0, op1;
5432   ssa_op_iter iter;
5433   use_operand_p use_p;
5434   gimple *def0, *def1;
5435 
5436   /* Compute cost of each statement using estimate_num_insns.  */
5437   stmts = bb_seq (bb);
5438   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5439     {
5440       stmt = gsi_stmt (gsi);
5441       if (!is_gimple_debug (stmt))
5442         gimple_set_uid (stmt, n++);
5443     }
5444   lattice = XNEWVEC (unsigned int, n);
5445   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5446     {
5447       unsigned cost;
5448       stmt = gsi_stmt (gsi);
5449       if (is_gimple_debug (stmt))
5450 	continue;
5451       cost = estimate_num_insns (stmt, &eni_size_weights);
5452       lattice[i] = cost;
5453       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5454 	{
5455 	  tree use = USE_FROM_PTR (use_p);
5456 	  gimple *def_stmt;
5457 	  if (TREE_CODE (use) != SSA_NAME)
5458 	    continue;
5459 	  def_stmt = get_gimple_for_ssa_name (use);
5460 	  if (!def_stmt)
5461 	    continue;
5462 	  lattice[i] += lattice[gimple_uid (def_stmt)];
5463 	}
5464       i++;
5465       if (!is_gimple_assign (stmt)
5466 	  || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5467 	continue;
5468       op0 = gimple_op (stmt, 1);
5469       op1 = gimple_op (stmt, 2);
5470       if (TREE_CODE (op0) != SSA_NAME
5471 	  || TREE_CODE (op1) != SSA_NAME)
5472 	continue;
5473       /* Swap operands if the second one is more expensive.  */
5474       def0 = get_gimple_for_ssa_name (op0);
5475       def1 = get_gimple_for_ssa_name (op1);
5476       if (!def1)
5477 	continue;
5478       swap = false;
5479       if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5480 	swap = true;
5481       if (swap)
5482 	{
5483 	  if (dump_file && (dump_flags & TDF_DETAILS))
5484 	    {
5485 	      fprintf (dump_file, "Swap operands in stmt:\n");
5486 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5487 	      fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5488 		       def0 ? lattice[gimple_uid (def0)] : 0,
5489 		       lattice[gimple_uid (def1)]);
5490 	    }
5491 	  swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5492 			     gimple_assign_rhs2_ptr (stmt));
5493 	}
5494     }
5495   XDELETE (lattice);
5496 }
5497 
5498 /* Expand basic block BB from GIMPLE trees to RTL.  */
5499 
5500 static basic_block
expand_gimple_basic_block(basic_block bb,bool disable_tail_calls)5501 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5502 {
5503   gimple_stmt_iterator gsi;
5504   gimple_seq stmts;
5505   gimple *stmt = NULL;
5506   rtx_note *note = NULL;
5507   rtx_insn *last;
5508   edge e;
5509   edge_iterator ei;
5510 
5511   if (dump_file)
5512     fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5513 	     bb->index);
5514 
5515   /* Note that since we are now transitioning from GIMPLE to RTL, we
5516      cannot use the gsi_*_bb() routines because they expect the basic
5517      block to be in GIMPLE, instead of RTL.  Therefore, we need to
5518      access the BB sequence directly.  */
5519   if (optimize)
5520     reorder_operands (bb);
5521   stmts = bb_seq (bb);
5522   bb->il.gimple.seq = NULL;
5523   bb->il.gimple.phi_nodes = NULL;
5524   rtl_profile_for_bb (bb);
5525   init_rtl_bb_info (bb);
5526   bb->flags |= BB_RTL;
5527 
5528   /* Remove the RETURN_EXPR if we may fall though to the exit
5529      instead.  */
5530   gsi = gsi_last (stmts);
5531   if (!gsi_end_p (gsi)
5532       && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5533     {
5534       greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5535 
5536       gcc_assert (single_succ_p (bb));
5537       gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5538 
5539       if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5540 	  && !gimple_return_retval (ret_stmt))
5541 	{
5542 	  gsi_remove (&gsi, false);
5543 	  single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5544 	}
5545     }
5546 
5547   gsi = gsi_start (stmts);
5548   if (!gsi_end_p (gsi))
5549     {
5550       stmt = gsi_stmt (gsi);
5551       if (gimple_code (stmt) != GIMPLE_LABEL)
5552 	stmt = NULL;
5553     }
5554 
5555   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5556 
5557   if (stmt || elt)
5558     {
5559       gcc_checking_assert (!note);
5560       last = get_last_insn ();
5561 
5562       if (stmt)
5563 	{
5564 	  expand_gimple_stmt (stmt);
5565 	  gsi_next (&gsi);
5566 	}
5567 
5568       if (elt)
5569 	emit_label (*elt);
5570 
5571       BB_HEAD (bb) = NEXT_INSN (last);
5572       if (NOTE_P (BB_HEAD (bb)))
5573 	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5574       gcc_assert (LABEL_P (BB_HEAD (bb)));
5575       note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5576 
5577       maybe_dump_rtl_for_gimple_stmt (stmt, last);
5578     }
5579   else
5580     BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5581 
5582   if (note)
5583     NOTE_BASIC_BLOCK (note) = bb;
5584 
5585   for (; !gsi_end_p (gsi); gsi_next (&gsi))
5586     {
5587       basic_block new_bb;
5588 
5589       stmt = gsi_stmt (gsi);
5590 
5591       /* If this statement is a non-debug one, and we generate debug
5592 	 insns, then this one might be the last real use of a TERed
5593 	 SSA_NAME, but where there are still some debug uses further
5594 	 down.  Expanding the current SSA name in such further debug
5595 	 uses by their RHS might lead to wrong debug info, as coalescing
5596 	 might make the operands of such RHS be placed into the same
5597 	 pseudo as something else.  Like so:
5598 	   a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
5599 	   use(a_1);
5600 	   a_2 = ...
5601            #DEBUG ... => a_1
5602 	 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5603 	 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5604 	 the write to a_2 would actually have clobbered the place which
5605 	 formerly held a_0.
5606 
5607 	 So, instead of that, we recognize the situation, and generate
5608 	 debug temporaries at the last real use of TERed SSA names:
5609 	   a_1 = a_0 + 1;
5610            #DEBUG #D1 => a_1
5611 	   use(a_1);
5612 	   a_2 = ...
5613            #DEBUG ... => #D1
5614 	 */
5615       if (MAY_HAVE_DEBUG_BIND_INSNS
5616 	  && SA.values
5617 	  && !is_gimple_debug (stmt))
5618 	{
5619 	  ssa_op_iter iter;
5620 	  tree op;
5621 	  gimple *def;
5622 
5623 	  location_t sloc = curr_insn_location ();
5624 
5625 	  /* Look for SSA names that have their last use here (TERed
5626 	     names always have only one real use).  */
5627 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5628 	    if ((def = get_gimple_for_ssa_name (op)))
5629 	      {
5630 		imm_use_iterator imm_iter;
5631 		use_operand_p use_p;
5632 		bool have_debug_uses = false;
5633 
5634 		FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5635 		  {
5636 		    if (gimple_debug_bind_p (USE_STMT (use_p)))
5637 		      {
5638 			have_debug_uses = true;
5639 			break;
5640 		      }
5641 		  }
5642 
5643 		if (have_debug_uses)
5644 		  {
5645 		    /* OP is a TERed SSA name, with DEF its defining
5646 		       statement, and where OP is used in further debug
5647 		       instructions.  Generate a debug temporary, and
5648 		       replace all uses of OP in debug insns with that
5649 		       temporary.  */
5650 		    gimple *debugstmt;
5651 		    tree value = gimple_assign_rhs_to_tree (def);
5652 		    tree vexpr = make_node (DEBUG_EXPR_DECL);
5653 		    rtx val;
5654 		    machine_mode mode;
5655 
5656 		    set_curr_insn_location (gimple_location (def));
5657 
5658 		    DECL_ARTIFICIAL (vexpr) = 1;
5659 		    TREE_TYPE (vexpr) = TREE_TYPE (value);
5660 		    if (DECL_P (value))
5661 		      mode = DECL_MODE (value);
5662 		    else
5663 		      mode = TYPE_MODE (TREE_TYPE (value));
5664 		    SET_DECL_MODE (vexpr, mode);
5665 
5666 		    val = gen_rtx_VAR_LOCATION
5667 			(mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5668 
5669 		    emit_debug_insn (val);
5670 
5671 		    FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5672 		      {
5673 			if (!gimple_debug_bind_p (debugstmt))
5674 			  continue;
5675 
5676 			FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5677 			  SET_USE (use_p, vexpr);
5678 
5679 			update_stmt (debugstmt);
5680 		      }
5681 		  }
5682 	      }
5683 	  set_curr_insn_location (sloc);
5684 	}
5685 
5686       currently_expanding_gimple_stmt = stmt;
5687 
5688       /* Expand this statement, then evaluate the resulting RTL and
5689 	 fixup the CFG accordingly.  */
5690       if (gimple_code (stmt) == GIMPLE_COND)
5691 	{
5692 	  new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5693 	  if (new_bb)
5694 	    return new_bb;
5695 	}
5696       else if (is_gimple_debug (stmt))
5697 	{
5698 	  location_t sloc = curr_insn_location ();
5699 	  gimple_stmt_iterator nsi = gsi;
5700 
5701 	  for (;;)
5702 	    {
5703 	      tree var;
5704 	      tree value = NULL_TREE;
5705 	      rtx val = NULL_RTX;
5706 	      machine_mode mode;
5707 
5708 	      if (!gimple_debug_nonbind_marker_p (stmt))
5709 		{
5710 		  if (gimple_debug_bind_p (stmt))
5711 		    {
5712 		      var = gimple_debug_bind_get_var (stmt);
5713 
5714 		      if (TREE_CODE (var) != DEBUG_EXPR_DECL
5715 			  && TREE_CODE (var) != LABEL_DECL
5716 			  && !target_for_debug_bind (var))
5717 			goto delink_debug_stmt;
5718 
5719 		      if (DECL_P (var))
5720 			mode = DECL_MODE (var);
5721 		      else
5722 			mode = TYPE_MODE (TREE_TYPE (var));
5723 
5724 		      if (gimple_debug_bind_has_value_p (stmt))
5725 			value = gimple_debug_bind_get_value (stmt);
5726 
5727 		      val = gen_rtx_VAR_LOCATION
5728 			(mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5729 		    }
5730 		  else if (gimple_debug_source_bind_p (stmt))
5731 		    {
5732 		      var = gimple_debug_source_bind_get_var (stmt);
5733 
5734 		      value = gimple_debug_source_bind_get_value (stmt);
5735 
5736 		      mode = DECL_MODE (var);
5737 
5738 		      val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5739 						  VAR_INIT_STATUS_UNINITIALIZED);
5740 		    }
5741 		  else
5742 		    gcc_unreachable ();
5743 		}
5744 	      /* If this function was first compiled with markers
5745 		 enabled, but they're now disable (e.g. LTO), drop
5746 		 them on the floor.  */
5747 	      else if (gimple_debug_nonbind_marker_p (stmt)
5748 		       && !MAY_HAVE_DEBUG_MARKER_INSNS)
5749 		goto delink_debug_stmt;
5750 	      else if (gimple_debug_begin_stmt_p (stmt))
5751 		val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5752 	      else if (gimple_debug_inline_entry_p (stmt))
5753 		{
5754 		  tree block = gimple_block (stmt);
5755 
5756 		  if (block)
5757 		    val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5758 		  else
5759 		    goto delink_debug_stmt;
5760 		}
5761 	      else
5762 		gcc_unreachable ();
5763 
5764 	      last = get_last_insn ();
5765 
5766 	      set_curr_insn_location (gimple_location (stmt));
5767 
5768 	      emit_debug_insn (val);
5769 
5770 	      if (dump_file && (dump_flags & TDF_DETAILS))
5771 		{
5772 		  /* We can't dump the insn with a TREE where an RTX
5773 		     is expected.  */
5774 		  if (GET_CODE (val) == VAR_LOCATION)
5775 		    {
5776 		      gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5777 		      PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5778 		    }
5779 		  maybe_dump_rtl_for_gimple_stmt (stmt, last);
5780 		  if (GET_CODE (val) == VAR_LOCATION)
5781 		    PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5782 		}
5783 
5784 	    delink_debug_stmt:
5785 	      /* In order not to generate too many debug temporaries,
5786 	         we delink all uses of debug statements we already expanded.
5787 		 Therefore debug statements between definition and real
5788 		 use of TERed SSA names will continue to use the SSA name,
5789 		 and not be replaced with debug temps.  */
5790 	      delink_stmt_imm_use (stmt);
5791 
5792 	      gsi = nsi;
5793 	      gsi_next (&nsi);
5794 	      if (gsi_end_p (nsi))
5795 		break;
5796 	      stmt = gsi_stmt (nsi);
5797 	      if (!is_gimple_debug (stmt))
5798 		break;
5799 	    }
5800 
5801 	  set_curr_insn_location (sloc);
5802 	}
5803       else
5804 	{
5805 	  gcall *call_stmt = dyn_cast <gcall *> (stmt);
5806 	  if (call_stmt
5807 	      && gimple_call_tail_p (call_stmt)
5808 	      && disable_tail_calls)
5809 	    gimple_call_set_tail (call_stmt, false);
5810 
5811 	  if (call_stmt && gimple_call_tail_p (call_stmt))
5812 	    {
5813 	      bool can_fallthru;
5814 	      new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5815 	      if (new_bb)
5816 		{
5817 		  if (can_fallthru)
5818 		    bb = new_bb;
5819 		  else
5820 		    return new_bb;
5821 		}
5822 	    }
5823 	  else
5824 	    {
5825 	      def_operand_p def_p;
5826 	      def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5827 
5828 	      if (def_p != NULL)
5829 		{
5830 		  /* Ignore this stmt if it is in the list of
5831 		     replaceable expressions.  */
5832 		  if (SA.values
5833 		      && bitmap_bit_p (SA.values,
5834 				       SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5835 		    continue;
5836 		}
5837 	      last = expand_gimple_stmt (stmt);
5838 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5839 	    }
5840 	}
5841     }
5842 
5843   currently_expanding_gimple_stmt = NULL;
5844 
5845   /* Expand implicit goto and convert goto_locus.  */
5846   FOR_EACH_EDGE (e, ei, bb->succs)
5847     {
5848       if (e->goto_locus != UNKNOWN_LOCATION)
5849 	set_curr_insn_location (e->goto_locus);
5850       if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5851 	{
5852 	  emit_jump (label_rtx_for_bb (e->dest));
5853 	  e->flags &= ~EDGE_FALLTHRU;
5854 	}
5855     }
5856 
5857   /* Expanded RTL can create a jump in the last instruction of block.
5858      This later might be assumed to be a jump to successor and break edge insertion.
5859      We need to insert dummy move to prevent this. PR41440. */
5860   if (single_succ_p (bb)
5861       && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5862       && (last = get_last_insn ())
5863       && (JUMP_P (last)
5864 	  || (DEBUG_INSN_P (last)
5865 	      && JUMP_P (prev_nondebug_insn (last)))))
5866     {
5867       rtx dummy = gen_reg_rtx (SImode);
5868       emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5869     }
5870 
5871   do_pending_stack_adjust ();
5872 
5873   /* Find the block tail.  The last insn in the block is the insn
5874      before a barrier and/or table jump insn.  */
5875   last = get_last_insn ();
5876   if (BARRIER_P (last))
5877     last = PREV_INSN (last);
5878   if (JUMP_TABLE_DATA_P (last))
5879     last = PREV_INSN (PREV_INSN (last));
5880   BB_END (bb) = last;
5881 
5882   update_bb_for_insn (bb);
5883 
5884   return bb;
5885 }
5886 
5887 
5888 /* Create a basic block for initialization code.  */
5889 
5890 static basic_block
construct_init_block(void)5891 construct_init_block (void)
5892 {
5893   basic_block init_block, first_block;
5894   edge e = NULL;
5895   int flags;
5896 
5897   /* Multiple entry points not supported yet.  */
5898   gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5899   init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5900   init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5901   ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5902   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5903 
5904   e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5905 
5906   /* When entry edge points to first basic block, we don't need jump,
5907      otherwise we have to jump into proper target.  */
5908   if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5909     {
5910       tree label = gimple_block_label (e->dest);
5911 
5912       emit_jump (jump_target_rtx (label));
5913       flags = 0;
5914     }
5915   else
5916     flags = EDGE_FALLTHRU;
5917 
5918   init_block = create_basic_block (NEXT_INSN (get_insns ()),
5919 				   get_last_insn (),
5920 				   ENTRY_BLOCK_PTR_FOR_FN (cfun));
5921   init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5922   add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5923   if (e)
5924     {
5925       first_block = e->dest;
5926       redirect_edge_succ (e, init_block);
5927       e = make_single_succ_edge (init_block, first_block, flags);
5928     }
5929   else
5930     e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5931 			       EDGE_FALLTHRU);
5932 
5933   update_bb_for_insn (init_block);
5934   return init_block;
5935 }
5936 
5937 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5938    found in the block tree.  */
5939 
5940 static void
set_block_levels(tree block,int level)5941 set_block_levels (tree block, int level)
5942 {
5943   while (block)
5944     {
5945       BLOCK_NUMBER (block) = level;
5946       set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5947       block = BLOCK_CHAIN (block);
5948     }
5949 }
5950 
5951 /* Create a block containing landing pads and similar stuff.  */
5952 
5953 static void
construct_exit_block(void)5954 construct_exit_block (void)
5955 {
5956   rtx_insn *head = get_last_insn ();
5957   rtx_insn *end;
5958   basic_block exit_block;
5959   edge e, e2;
5960   unsigned ix;
5961   edge_iterator ei;
5962   basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5963   rtx_insn *orig_end = BB_END (prev_bb);
5964 
5965   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5966 
5967   /* Make sure the locus is set to the end of the function, so that
5968      epilogue line numbers and warnings are set properly.  */
5969   if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5970     input_location = cfun->function_end_locus;
5971 
5972   /* Generate rtl for function exit.  */
5973   expand_function_end ();
5974 
5975   end = get_last_insn ();
5976   if (head == end)
5977     return;
5978   /* While emitting the function end we could move end of the last basic
5979      block.  */
5980   BB_END (prev_bb) = orig_end;
5981   while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5982     head = NEXT_INSN (head);
5983   /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5984      bb count counting will be confused.  Any instructions before that
5985      label are emitted for the case where PREV_BB falls through into the
5986      exit block, so append those instructions to prev_bb in that case.  */
5987   if (NEXT_INSN (head) != return_label)
5988     {
5989       while (NEXT_INSN (head) != return_label)
5990 	{
5991 	  if (!NOTE_P (NEXT_INSN (head)))
5992 	    BB_END (prev_bb) = NEXT_INSN (head);
5993 	  head = NEXT_INSN (head);
5994 	}
5995     }
5996   exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5997   exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5998   add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5999 
6000   ix = 0;
6001   while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
6002     {
6003       e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
6004       if (!(e->flags & EDGE_ABNORMAL))
6005 	redirect_edge_succ (e, exit_block);
6006       else
6007 	ix++;
6008     }
6009 
6010   e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6011 			     EDGE_FALLTHRU);
6012   FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6013     if (e2 != e)
6014       {
6015 	exit_block->count -= e2->count ();
6016       }
6017   update_bb_for_insn (exit_block);
6018 }
6019 
6020 /* Helper function for discover_nonconstant_array_refs.
6021    Look for ARRAY_REF nodes with non-constant indexes and mark them
6022    addressable.  */
6023 
6024 static tree
discover_nonconstant_array_refs_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)6025 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
6026 				   void *data ATTRIBUTE_UNUSED)
6027 {
6028   tree t = *tp;
6029 
6030   if (IS_TYPE_OR_DECL_P (t))
6031     *walk_subtrees = 0;
6032   else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6033     {
6034       while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6035 	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6036 	      && (!TREE_OPERAND (t, 2)
6037 		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6038 	     || (TREE_CODE (t) == COMPONENT_REF
6039 		 && (!TREE_OPERAND (t,2)
6040 		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6041 	     || TREE_CODE (t) == BIT_FIELD_REF
6042 	     || TREE_CODE (t) == REALPART_EXPR
6043 	     || TREE_CODE (t) == IMAGPART_EXPR
6044 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
6045 	     || CONVERT_EXPR_P (t))
6046 	t = TREE_OPERAND (t, 0);
6047 
6048       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6049 	{
6050 	  t = get_base_address (t);
6051 	  if (t && DECL_P (t)
6052               && DECL_MODE (t) != BLKmode)
6053 	    TREE_ADDRESSABLE (t) = 1;
6054 	}
6055 
6056       *walk_subtrees = 0;
6057     }
6058 
6059   return NULL_TREE;
6060 }
6061 
6062 /* RTL expansion is not able to compile array references with variable
6063    offsets for arrays stored in single register.  Discover such
6064    expressions and mark variables as addressable to avoid this
6065    scenario.  */
6066 
6067 static void
discover_nonconstant_array_refs(void)6068 discover_nonconstant_array_refs (void)
6069 {
6070   basic_block bb;
6071   gimple_stmt_iterator gsi;
6072 
6073   FOR_EACH_BB_FN (bb, cfun)
6074     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6075       {
6076 	gimple *stmt = gsi_stmt (gsi);
6077 	if (!is_gimple_debug (stmt))
6078 	  walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6079       }
6080 }
6081 
6082 /* This function sets crtl->args.internal_arg_pointer to a virtual
6083    register if DRAP is needed.  Local register allocator will replace
6084    virtual_incoming_args_rtx with the virtual register.  */
6085 
6086 static void
expand_stack_alignment(void)6087 expand_stack_alignment (void)
6088 {
6089   rtx drap_rtx;
6090   unsigned int preferred_stack_boundary;
6091 
6092   if (! SUPPORTS_STACK_ALIGNMENT)
6093     return;
6094 
6095   if (cfun->calls_alloca
6096       || cfun->has_nonlocal_label
6097       || crtl->has_nonlocal_goto)
6098     crtl->need_drap = true;
6099 
6100   /* Call update_stack_boundary here again to update incoming stack
6101      boundary.  It may set incoming stack alignment to a different
6102      value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
6103      use the minimum incoming stack alignment to check if it is OK
6104      to perform sibcall optimization since sibcall optimization will
6105      only align the outgoing stack to incoming stack boundary.  */
6106   if (targetm.calls.update_stack_boundary)
6107     targetm.calls.update_stack_boundary ();
6108 
6109   /* The incoming stack frame has to be aligned at least at
6110      parm_stack_boundary.  */
6111   gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6112 
6113   /* Update crtl->stack_alignment_estimated and use it later to align
6114      stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
6115      exceptions since callgraph doesn't collect incoming stack alignment
6116      in this case.  */
6117   if (cfun->can_throw_non_call_exceptions
6118       && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6119     preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6120   else
6121     preferred_stack_boundary = crtl->preferred_stack_boundary;
6122   if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6123     crtl->stack_alignment_estimated = preferred_stack_boundary;
6124   if (preferred_stack_boundary > crtl->stack_alignment_needed)
6125     crtl->stack_alignment_needed = preferred_stack_boundary;
6126 
6127   gcc_assert (crtl->stack_alignment_needed
6128 	      <= crtl->stack_alignment_estimated);
6129 
6130   crtl->stack_realign_needed
6131     = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6132   crtl->stack_realign_tried = crtl->stack_realign_needed;
6133 
6134   crtl->stack_realign_processed = true;
6135 
6136   /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6137      alignment.  */
6138   gcc_assert (targetm.calls.get_drap_rtx != NULL);
6139   drap_rtx = targetm.calls.get_drap_rtx ();
6140 
6141   /* stack_realign_drap and drap_rtx must match.  */
6142   gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6143 
6144   /* Do nothing if NULL is returned, which means DRAP is not needed.  */
6145   if (drap_rtx != NULL)
6146     {
6147       crtl->args.internal_arg_pointer = drap_rtx;
6148 
6149       /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6150          needed. */
6151       fixup_tail_calls ();
6152     }
6153 }
6154 
6155 
6156 static void
expand_main_function(void)6157 expand_main_function (void)
6158 {
6159 #if (defined(INVOKE__main)				\
6160      || (!defined(HAS_INIT_SECTION)			\
6161 	 && !defined(INIT_SECTION_ASM_OP)		\
6162 	 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6163   emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6164 #endif
6165 }
6166 
6167 
6168 /* Expand code to initialize the stack_protect_guard.  This is invoked at
6169    the beginning of a function to be protected.  */
6170 
6171 static void
stack_protect_prologue(void)6172 stack_protect_prologue (void)
6173 {
6174   tree guard_decl = targetm.stack_protect_guard ();
6175   rtx x, y;
6176 
6177   x = expand_normal (crtl->stack_protect_guard);
6178   if (guard_decl)
6179     y = expand_normal (guard_decl);
6180   else
6181     y = const0_rtx;
6182 
6183   /* Allow the target to copy from Y to X without leaking Y into a
6184      register.  */
6185   if (targetm.have_stack_protect_set ())
6186     if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6187       {
6188 	emit_insn (insn);
6189 	return;
6190       }
6191 
6192   /* Otherwise do a straight move.  */
6193   emit_move_insn (x, y);
6194 }
6195 
6196 /* Translate the intermediate representation contained in the CFG
6197    from GIMPLE trees to RTL.
6198 
6199    We do conversion per basic block and preserve/update the tree CFG.
6200    This implies we have to do some magic as the CFG can simultaneously
6201    consist of basic blocks containing RTL and GIMPLE trees.  This can
6202    confuse the CFG hooks, so be careful to not manipulate CFG during
6203    the expansion.  */
6204 
6205 namespace {
6206 
6207 const pass_data pass_data_expand =
6208 {
6209   RTL_PASS, /* type */
6210   "expand", /* name */
6211   OPTGROUP_NONE, /* optinfo_flags */
6212   TV_EXPAND, /* tv_id */
6213   ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6214     | PROP_gimple_lcx
6215     | PROP_gimple_lvec
6216     | PROP_gimple_lva), /* properties_required */
6217   PROP_rtl, /* properties_provided */
6218   ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6219   0, /* todo_flags_start */
6220   0, /* todo_flags_finish */
6221 };
6222 
6223 class pass_expand : public rtl_opt_pass
6224 {
6225 public:
pass_expand(gcc::context * ctxt)6226   pass_expand (gcc::context *ctxt)
6227     : rtl_opt_pass (pass_data_expand, ctxt)
6228   {}
6229 
6230   /* opt_pass methods: */
6231   virtual unsigned int execute (function *);
6232 
6233 }; // class pass_expand
6234 
6235 unsigned int
execute(function * fun)6236 pass_expand::execute (function *fun)
6237 {
6238   basic_block bb, init_block;
6239   edge_iterator ei;
6240   edge e;
6241   rtx_insn *var_seq, *var_ret_seq;
6242   unsigned i;
6243 
6244   timevar_push (TV_OUT_OF_SSA);
6245   rewrite_out_of_ssa (&SA);
6246   timevar_pop (TV_OUT_OF_SSA);
6247   SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6248 
6249   if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6250     {
6251       gimple_stmt_iterator gsi;
6252       FOR_EACH_BB_FN (bb, cfun)
6253 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6254 	  if (gimple_debug_bind_p (gsi_stmt (gsi)))
6255 	    avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6256     }
6257 
6258   /* Make sure all values used by the optimization passes have sane
6259      defaults.  */
6260   reg_renumber = 0;
6261 
6262   /* Some backends want to know that we are expanding to RTL.  */
6263   currently_expanding_to_rtl = 1;
6264   /* Dominators are not kept up-to-date as we may create new basic-blocks.  */
6265   free_dominance_info (CDI_DOMINATORS);
6266 
6267   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6268 
6269   if (chkp_function_instrumented_p (current_function_decl))
6270     chkp_reset_rtl_bounds ();
6271 
6272   insn_locations_init ();
6273   if (!DECL_IS_BUILTIN (current_function_decl))
6274     {
6275       /* Eventually, all FEs should explicitly set function_start_locus.  */
6276       if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6277 	set_curr_insn_location
6278 	  (DECL_SOURCE_LOCATION (current_function_decl));
6279       else
6280 	set_curr_insn_location (fun->function_start_locus);
6281     }
6282   else
6283     set_curr_insn_location (UNKNOWN_LOCATION);
6284   prologue_location = curr_insn_location ();
6285 
6286 #ifdef INSN_SCHEDULING
6287   init_sched_attrs ();
6288 #endif
6289 
6290   /* Make sure first insn is a note even if we don't want linenums.
6291      This makes sure the first insn will never be deleted.
6292      Also, final expects a note to appear there.  */
6293   emit_note (NOTE_INSN_DELETED);
6294 
6295   /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
6296   discover_nonconstant_array_refs ();
6297 
6298   targetm.expand_to_rtl_hook ();
6299   crtl->init_stack_alignment ();
6300   fun->cfg->max_jumptable_ents = 0;
6301 
6302   /* Resovle the function section.  Some targets, like ARM EABI rely on knowledge
6303      of the function section at exapnsion time to predict distance of calls.  */
6304   resolve_unique_section (current_function_decl, 0, flag_function_sections);
6305 
6306   /* Expand the variables recorded during gimple lowering.  */
6307   timevar_push (TV_VAR_EXPAND);
6308   start_sequence ();
6309 
6310   var_ret_seq = expand_used_vars ();
6311 
6312   var_seq = get_insns ();
6313   end_sequence ();
6314   timevar_pop (TV_VAR_EXPAND);
6315 
6316   /* Honor stack protection warnings.  */
6317   if (warn_stack_protect)
6318     {
6319       if (fun->calls_alloca)
6320 	warning (OPT_Wstack_protector,
6321 		 "stack protector not protecting local variables: "
6322 		 "variable length buffer");
6323       if (has_short_buffer && !crtl->stack_protect_guard)
6324 	warning (OPT_Wstack_protector,
6325 		 "stack protector not protecting function: "
6326 		 "all local arrays are less than %d bytes long",
6327 		 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6328     }
6329 
6330   /* Set up parameters and prepare for return, for the function.  */
6331   expand_function_start (current_function_decl);
6332 
6333   /* If we emitted any instructions for setting up the variables,
6334      emit them before the FUNCTION_START note.  */
6335   if (var_seq)
6336     {
6337       emit_insn_before (var_seq, parm_birth_insn);
6338 
6339       /* In expand_function_end we'll insert the alloca save/restore
6340 	 before parm_birth_insn.  We've just insertted an alloca call.
6341 	 Adjust the pointer to match.  */
6342       parm_birth_insn = var_seq;
6343     }
6344 
6345   /* Now propagate the RTL assignment of each partition to the
6346      underlying var of each SSA_NAME.  */
6347   tree name;
6348 
6349   FOR_EACH_SSA_NAME (i, name, cfun)
6350     {
6351       /* We might have generated new SSA names in
6352 	 update_alias_info_with_stack_vars.  They will have a NULL
6353 	 defining statements, and won't be part of the partitioning,
6354 	 so ignore those.  */
6355       if (!SSA_NAME_DEF_STMT (name))
6356 	continue;
6357 
6358       adjust_one_expanded_partition_var (name);
6359     }
6360 
6361   /* Clean up RTL of variables that straddle across multiple
6362      partitions, and check that the rtl of any PARM_DECLs that are not
6363      cleaned up is that of their default defs.  */
6364   FOR_EACH_SSA_NAME (i, name, cfun)
6365     {
6366       int part;
6367 
6368       /* We might have generated new SSA names in
6369 	 update_alias_info_with_stack_vars.  They will have a NULL
6370 	 defining statements, and won't be part of the partitioning,
6371 	 so ignore those.  */
6372       if (!SSA_NAME_DEF_STMT (name))
6373 	continue;
6374       part = var_to_partition (SA.map, name);
6375       if (part == NO_PARTITION)
6376 	continue;
6377 
6378       /* If this decl was marked as living in multiple places, reset
6379 	 this now to NULL.  */
6380       tree var = SSA_NAME_VAR (name);
6381       if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6382 	SET_DECL_RTL (var, NULL);
6383       /* Check that the pseudos chosen by assign_parms are those of
6384 	 the corresponding default defs.  */
6385       else if (SSA_NAME_IS_DEFAULT_DEF (name)
6386 	       && (TREE_CODE (var) == PARM_DECL
6387 		   || TREE_CODE (var) == RESULT_DECL))
6388 	{
6389 	  rtx in = DECL_RTL_IF_SET (var);
6390 	  gcc_assert (in);
6391 	  rtx out = SA.partition_to_pseudo[part];
6392 	  gcc_assert (in == out);
6393 
6394 	  /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6395 	     those expected by debug backends for each parm and for
6396 	     the result.  This is particularly important for stabs,
6397 	     whose register elimination from parm's DECL_RTL may cause
6398 	     -fcompare-debug differences as SET_DECL_RTL changes reg's
6399 	     attrs.  So, make sure the RTL already has the parm as the
6400 	     EXPR, so that it won't change.  */
6401 	  SET_DECL_RTL (var, NULL_RTX);
6402 	  if (MEM_P (in))
6403 	    set_mem_attributes (in, var, true);
6404 	  SET_DECL_RTL (var, in);
6405 	}
6406     }
6407 
6408   /* If this function is `main', emit a call to `__main'
6409      to run global initializers, etc.  */
6410   if (DECL_NAME (current_function_decl)
6411       && MAIN_NAME_P (DECL_NAME (current_function_decl))
6412       && DECL_FILE_SCOPE_P (current_function_decl))
6413     expand_main_function ();
6414 
6415   /* Initialize the stack_protect_guard field.  This must happen after the
6416      call to __main (if any) so that the external decl is initialized.  */
6417   if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6418     stack_protect_prologue ();
6419 
6420   expand_phi_nodes (&SA);
6421 
6422   /* Release any stale SSA redirection data.  */
6423   redirect_edge_var_map_empty ();
6424 
6425   /* Register rtl specific functions for cfg.  */
6426   rtl_register_cfg_hooks ();
6427 
6428   init_block = construct_init_block ();
6429 
6430   /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
6431      remaining edges later.  */
6432   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6433     e->flags &= ~EDGE_EXECUTABLE;
6434 
6435   /* If the function has too many markers, drop them while expanding.  */
6436   if (cfun->debug_marker_count
6437       >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
6438     cfun->debug_nonbind_markers = false;
6439 
6440   lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6441   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6442 		  next_bb)
6443     bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6444 
6445   if (MAY_HAVE_DEBUG_BIND_INSNS)
6446     expand_debug_locations ();
6447 
6448   if (deep_ter_debug_map)
6449     {
6450       delete deep_ter_debug_map;
6451       deep_ter_debug_map = NULL;
6452     }
6453 
6454   /* Free stuff we no longer need after GIMPLE optimizations.  */
6455   free_dominance_info (CDI_DOMINATORS);
6456   free_dominance_info (CDI_POST_DOMINATORS);
6457   delete_tree_cfg_annotations (fun);
6458 
6459   timevar_push (TV_OUT_OF_SSA);
6460   finish_out_of_ssa (&SA);
6461   timevar_pop (TV_OUT_OF_SSA);
6462 
6463   timevar_push (TV_POST_EXPAND);
6464   /* We are no longer in SSA form.  */
6465   fun->gimple_df->in_ssa_p = false;
6466   loops_state_clear (LOOP_CLOSED_SSA);
6467 
6468   /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6469      conservatively to true until they are all profile aware.  */
6470   delete lab_rtx_for_bb;
6471   free_histograms (fun);
6472 
6473   construct_exit_block ();
6474   insn_locations_finalize ();
6475 
6476   if (var_ret_seq)
6477     {
6478       rtx_insn *after = return_label;
6479       rtx_insn *next = NEXT_INSN (after);
6480       if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6481 	after = next;
6482       emit_insn_after (var_ret_seq, after);
6483     }
6484 
6485   /* Zap the tree EH table.  */
6486   set_eh_throw_stmt_table (fun, NULL);
6487 
6488   /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6489      split edges which edge insertions might do.  */
6490   rebuild_jump_labels (get_insns ());
6491 
6492   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6493 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6494     {
6495       edge e;
6496       edge_iterator ei;
6497       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6498 	{
6499 	  if (e->insns.r)
6500 	    {
6501 	      rebuild_jump_labels_chain (e->insns.r);
6502 	      /* Put insns after parm birth, but before
6503 		 NOTE_INSNS_FUNCTION_BEG.  */
6504 	      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6505 		  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6506 		{
6507 		  rtx_insn *insns = e->insns.r;
6508 		  e->insns.r = NULL;
6509 		  if (NOTE_P (parm_birth_insn)
6510 		      && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6511 		    emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6512 		  else
6513 		    emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6514 		}
6515 	      else
6516 		commit_one_edge_insertion (e);
6517 	    }
6518 	  else
6519 	    ei_next (&ei);
6520 	}
6521     }
6522 
6523   /* We're done expanding trees to RTL.  */
6524   currently_expanding_to_rtl = 0;
6525 
6526   flush_mark_addressable_queue ();
6527 
6528   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6529 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6530     {
6531       edge e;
6532       edge_iterator ei;
6533       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6534 	{
6535 	  /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
6536 	  e->flags &= ~EDGE_EXECUTABLE;
6537 
6538 	  /* At the moment not all abnormal edges match the RTL
6539 	     representation.  It is safe to remove them here as
6540 	     find_many_sub_basic_blocks will rediscover them.
6541 	     In the future we should get this fixed properly.  */
6542 	  if ((e->flags & EDGE_ABNORMAL)
6543 	      && !(e->flags & EDGE_SIBCALL))
6544 	    remove_edge (e);
6545 	  else
6546 	    ei_next (&ei);
6547 	}
6548     }
6549 
6550   auto_sbitmap blocks (last_basic_block_for_fn (fun));
6551   bitmap_ones (blocks);
6552   find_many_sub_basic_blocks (blocks);
6553   purge_all_dead_edges ();
6554 
6555   /* After initial rtl generation, call back to finish generating
6556      exception support code.  We need to do this before cleaning up
6557      the CFG as the code does not expect dead landing pads.  */
6558   if (fun->eh->region_tree != NULL)
6559     finish_eh_generation ();
6560 
6561   /* Call expand_stack_alignment after finishing all
6562      updates to crtl->preferred_stack_boundary.  */
6563   expand_stack_alignment ();
6564 
6565   /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6566      function.  */
6567   if (crtl->tail_call_emit)
6568     fixup_tail_calls ();
6569 
6570   /* BB subdivision may have created basic blocks that are are only reachable
6571      from unlikely bbs but not marked as such in the profile.  */
6572   if (optimize)
6573     propagate_unlikely_bbs_forward ();
6574 
6575   /* Remove unreachable blocks, otherwise we cannot compute dominators
6576      which are needed for loop state verification.  As a side-effect
6577      this also compacts blocks.
6578      ???  We cannot remove trivially dead insns here as for example
6579      the DRAP reg on i?86 is not magically live at this point.
6580      gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise.  */
6581   cleanup_cfg (CLEANUP_NO_INSN_DEL);
6582 
6583   checking_verify_flow_info ();
6584 
6585   /* Initialize pseudos allocated for hard registers.  */
6586   emit_initial_value_sets ();
6587 
6588   /* And finally unshare all RTL.  */
6589   unshare_all_rtl ();
6590 
6591   /* There's no need to defer outputting this function any more; we
6592      know we want to output it.  */
6593   DECL_DEFER_OUTPUT (current_function_decl) = 0;
6594 
6595   /* Now that we're done expanding trees to RTL, we shouldn't have any
6596      more CONCATs anywhere.  */
6597   generating_concat_p = 0;
6598 
6599   if (dump_file)
6600     {
6601       fprintf (dump_file,
6602 	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6603       /* And the pass manager will dump RTL for us.  */
6604     }
6605 
6606   /* If we're emitting a nested function, make sure its parent gets
6607      emitted as well.  Doing otherwise confuses debug info.  */
6608     {
6609       tree parent;
6610       for (parent = DECL_CONTEXT (current_function_decl);
6611 	   parent != NULL_TREE;
6612 	   parent = get_containing_scope (parent))
6613 	if (TREE_CODE (parent) == FUNCTION_DECL)
6614 	  TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6615     }
6616 
6617   TREE_ASM_WRITTEN (current_function_decl) = 1;
6618 
6619   /* After expanding, the return labels are no longer needed. */
6620   return_label = NULL;
6621   naked_return_label = NULL;
6622 
6623   /* After expanding, the tm_restart map is no longer needed.  */
6624   if (fun->gimple_df->tm_restart)
6625     fun->gimple_df->tm_restart = NULL;
6626 
6627   /* Tag the blocks with a depth number so that change_scope can find
6628      the common parent easily.  */
6629   set_block_levels (DECL_INITIAL (fun->decl), 0);
6630   default_rtl_profile ();
6631 
6632   /* For -dx discard loops now, otherwise IL verify in clean_state will
6633      ICE.  */
6634   if (rtl_dump_and_exit)
6635     {
6636       cfun->curr_properties &= ~PROP_loops;
6637       loop_optimizer_finalize ();
6638     }
6639 
6640   timevar_pop (TV_POST_EXPAND);
6641 
6642   return 0;
6643 }
6644 
6645 } // anon namespace
6646 
6647 rtl_opt_pass *
make_pass_expand(gcc::context * ctxt)6648 make_pass_expand (gcc::context *ctxt)
6649 {
6650   return new pass_expand (ctxt);
6651 }
6652