xref: /dragonfly/contrib/gcc-8.0/gcc/cfgexpand.c (revision 58e805e6)
1 /* A pass for lowering trees to RTL.
2    Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber.  */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "params.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
69 #include "cfgloop.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING.  */
71 #include "stringpool.h"
72 #include "attribs.h"
73 #include "asan.h"
74 #include "tree-ssa-address.h"
75 #include "output.h"
76 #include "builtins.h"
77 #include "tree-chkp.h"
78 #include "rtl-chkp.h"
79 
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81    cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82    give the same symbol without quotes for an alternative entry point.  You
83    must define both, or neither.  */
84 #ifndef NAME__MAIN
85 #define NAME__MAIN "__main"
86 #endif
87 
88 /* This variable holds information helping the rewriting of SSA trees
89    into RTL.  */
90 struct ssaexpand SA;
91 
92 /* This variable holds the currently expanded gimple statement for purposes
93    of comminucating the profile info to the builtin expanders.  */
94 gimple *currently_expanding_gimple_stmt;
95 
96 static rtx expand_debug_expr (tree);
97 
98 static bool defer_stack_allocation (tree, bool);
99 
100 static void record_alignment_for_reg_var (unsigned int);
101 
102 /* Return an expression tree corresponding to the RHS of GIMPLE
103    statement STMT.  */
104 
105 tree
gimple_assign_rhs_to_tree(gimple * stmt)106 gimple_assign_rhs_to_tree (gimple *stmt)
107 {
108   tree t;
109   enum gimple_rhs_class grhs_class;
110 
111   grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
112 
113   if (grhs_class == GIMPLE_TERNARY_RHS)
114     t = build3 (gimple_assign_rhs_code (stmt),
115 		TREE_TYPE (gimple_assign_lhs (stmt)),
116 		gimple_assign_rhs1 (stmt),
117 		gimple_assign_rhs2 (stmt),
118 		gimple_assign_rhs3 (stmt));
119   else if (grhs_class == GIMPLE_BINARY_RHS)
120     t = build2 (gimple_assign_rhs_code (stmt),
121 		TREE_TYPE (gimple_assign_lhs (stmt)),
122 		gimple_assign_rhs1 (stmt),
123 		gimple_assign_rhs2 (stmt));
124   else if (grhs_class == GIMPLE_UNARY_RHS)
125     t = build1 (gimple_assign_rhs_code (stmt),
126 		TREE_TYPE (gimple_assign_lhs (stmt)),
127 		gimple_assign_rhs1 (stmt));
128   else if (grhs_class == GIMPLE_SINGLE_RHS)
129     {
130       t = gimple_assign_rhs1 (stmt);
131       /* Avoid modifying this tree in place below.  */
132       if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
133 	   && gimple_location (stmt) != EXPR_LOCATION (t))
134 	  || (gimple_block (stmt)
135 	      && currently_expanding_to_rtl
136 	      && EXPR_P (t)))
137 	t = copy_node (t);
138     }
139   else
140     gcc_unreachable ();
141 
142   if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
143     SET_EXPR_LOCATION (t, gimple_location (stmt));
144 
145   return t;
146 }
147 
148 
149 #ifndef STACK_ALIGNMENT_NEEDED
150 #define STACK_ALIGNMENT_NEEDED 1
151 #endif
152 
153 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
154 
155 /* Choose either CUR or NEXT as the leader DECL for a partition.
156    Prefer ignored decls, to simplify debug dumps and reduce ambiguity
157    out of the same user variable being in multiple partitions (this is
158    less likely for compiler-introduced temps).  */
159 
160 static tree
leader_merge(tree cur,tree next)161 leader_merge (tree cur, tree next)
162 {
163   if (cur == NULL || cur == next)
164     return next;
165 
166   if (DECL_P (cur) && DECL_IGNORED_P (cur))
167     return cur;
168 
169   if (DECL_P (next) && DECL_IGNORED_P (next))
170     return next;
171 
172   return cur;
173 }
174 
175 /* Associate declaration T with storage space X.  If T is no
176    SSA name this is exactly SET_DECL_RTL, otherwise make the
177    partition of T associated with X.  */
178 static inline void
set_rtl(tree t,rtx x)179 set_rtl (tree t, rtx x)
180 {
181   gcc_checking_assert (!x
182 		       || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
183 		       || (use_register_for_decl (t)
184 			   ? (REG_P (x)
185 			      || (GET_CODE (x) == CONCAT
186 				  && (REG_P (XEXP (x, 0))
187 				      || SUBREG_P (XEXP (x, 0)))
188 				  && (REG_P (XEXP (x, 1))
189 				      || SUBREG_P (XEXP (x, 1))))
190 			      /* We need to accept PARALLELs for RESUT_DECLs
191 				 because of vector types with BLKmode returned
192 				 in multiple registers, but they are supposed
193 				 to be uncoalesced.  */
194 			      || (GET_CODE (x) == PARALLEL
195 				  && SSAVAR (t)
196 				  && TREE_CODE (SSAVAR (t)) == RESULT_DECL
197 				  && (GET_MODE (x) == BLKmode
198 				      || !flag_tree_coalesce_vars)))
199 			   : (MEM_P (x) || x == pc_rtx
200 			      || (GET_CODE (x) == CONCAT
201 				  && MEM_P (XEXP (x, 0))
202 				  && MEM_P (XEXP (x, 1))))));
203   /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
204      RESULT_DECLs has the expected mode.  For memory, we accept
205      unpromoted modes, since that's what we're likely to get.  For
206      PARM_DECLs and RESULT_DECLs, we'll have been called by
207      set_parm_rtl, which will give us the default def, so we don't
208      have to compute it ourselves.  For RESULT_DECLs, we accept mode
209      mismatches too, as long as we have BLKmode or are not coalescing
210      across variables, so that we don't reject BLKmode PARALLELs or
211      unpromoted REGs.  */
212   gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
213 		       || (SSAVAR (t)
214 			   && TREE_CODE (SSAVAR (t)) == RESULT_DECL
215 			   && (promote_ssa_mode (t, NULL) == BLKmode
216 			       || !flag_tree_coalesce_vars))
217 		       || !use_register_for_decl (t)
218 		       || GET_MODE (x) == promote_ssa_mode (t, NULL));
219 
220   if (x)
221     {
222       bool skip = false;
223       tree cur = NULL_TREE;
224       rtx xm = x;
225 
226     retry:
227       if (MEM_P (xm))
228 	cur = MEM_EXPR (xm);
229       else if (REG_P (xm))
230 	cur = REG_EXPR (xm);
231       else if (SUBREG_P (xm))
232 	{
233 	  gcc_assert (subreg_lowpart_p (xm));
234 	  xm = SUBREG_REG (xm);
235 	  goto retry;
236 	}
237       else if (GET_CODE (xm) == CONCAT)
238 	{
239 	  xm = XEXP (xm, 0);
240 	  goto retry;
241 	}
242       else if (GET_CODE (xm) == PARALLEL)
243 	{
244 	  xm = XVECEXP (xm, 0, 0);
245 	  gcc_assert (GET_CODE (xm) == EXPR_LIST);
246 	  xm = XEXP (xm, 0);
247 	  goto retry;
248 	}
249       else if (xm == pc_rtx)
250 	skip = true;
251       else
252 	gcc_unreachable ();
253 
254       tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
255 
256       if (cur != next)
257 	{
258 	  if (MEM_P (x))
259 	    set_mem_attributes (x,
260 				next && TREE_CODE (next) == SSA_NAME
261 				? TREE_TYPE (next)
262 				: next, true);
263 	  else
264 	    set_reg_attrs_for_decl_rtl (next, x);
265 	}
266     }
267 
268   if (TREE_CODE (t) == SSA_NAME)
269     {
270       int part = var_to_partition (SA.map, t);
271       if (part != NO_PARTITION)
272 	{
273 	  if (SA.partition_to_pseudo[part])
274 	    gcc_assert (SA.partition_to_pseudo[part] == x);
275 	  else if (x != pc_rtx)
276 	    SA.partition_to_pseudo[part] = x;
277 	}
278       /* For the benefit of debug information at -O0 (where
279          vartracking doesn't run) record the place also in the base
280          DECL.  For PARMs and RESULTs, do so only when setting the
281          default def.  */
282       if (x && x != pc_rtx && SSA_NAME_VAR (t)
283 	  && (VAR_P (SSA_NAME_VAR (t))
284 	      || SSA_NAME_IS_DEFAULT_DEF (t)))
285 	{
286 	  tree var = SSA_NAME_VAR (t);
287 	  /* If we don't yet have something recorded, just record it now.  */
288 	  if (!DECL_RTL_SET_P (var))
289 	    SET_DECL_RTL (var, x);
290 	  /* If we have it set already to "multiple places" don't
291 	     change this.  */
292 	  else if (DECL_RTL (var) == pc_rtx)
293 	    ;
294 	  /* If we have something recorded and it's not the same place
295 	     as we want to record now, we have multiple partitions for the
296 	     same base variable, with different places.  We can't just
297 	     randomly chose one, hence we have to say that we don't know.
298 	     This only happens with optimization, and there var-tracking
299 	     will figure out the right thing.  */
300 	  else if (DECL_RTL (var) != x)
301 	    SET_DECL_RTL (var, pc_rtx);
302 	}
303     }
304   else
305     SET_DECL_RTL (t, x);
306 }
307 
308 /* This structure holds data relevant to one variable that will be
309    placed in a stack slot.  */
310 struct stack_var
311 {
312   /* The Variable.  */
313   tree decl;
314 
315   /* Initially, the size of the variable.  Later, the size of the partition,
316      if this variable becomes it's partition's representative.  */
317   poly_uint64 size;
318 
319   /* The *byte* alignment required for this variable.  Or as, with the
320      size, the alignment for this partition.  */
321   unsigned int alignb;
322 
323   /* The partition representative.  */
324   size_t representative;
325 
326   /* The next stack variable in the partition, or EOC.  */
327   size_t next;
328 
329   /* The numbers of conflicting stack variables.  */
330   bitmap conflicts;
331 };
332 
333 #define EOC  ((size_t)-1)
334 
335 /* We have an array of such objects while deciding allocation.  */
336 static struct stack_var *stack_vars;
337 static size_t stack_vars_alloc;
338 static size_t stack_vars_num;
339 static hash_map<tree, size_t> *decl_to_stack_part;
340 
341 /* Conflict bitmaps go on this obstack.  This allows us to destroy
342    all of them in one big sweep.  */
343 static bitmap_obstack stack_var_bitmap_obstack;
344 
345 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
346    is non-decreasing.  */
347 static size_t *stack_vars_sorted;
348 
349 /* The phase of the stack frame.  This is the known misalignment of
350    virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
351    (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
352 static int frame_phase;
353 
354 /* Used during expand_used_vars to remember if we saw any decls for
355    which we'd like to enable stack smashing protection.  */
356 static bool has_protected_decls;
357 
358 /* Used during expand_used_vars.  Remember if we say a character buffer
359    smaller than our cutoff threshold.  Used for -Wstack-protector.  */
360 static bool has_short_buffer;
361 
362 /* Compute the byte alignment to use for DECL.  Ignore alignment
363    we can't do with expected alignment of the stack boundary.  */
364 
365 static unsigned int
align_local_variable(tree decl)366 align_local_variable (tree decl)
367 {
368   unsigned int align;
369 
370   if (TREE_CODE (decl) == SSA_NAME)
371     align = TYPE_ALIGN (TREE_TYPE (decl));
372   else
373     {
374       align = LOCAL_DECL_ALIGNMENT (decl);
375       SET_DECL_ALIGN (decl, align);
376     }
377   return align / BITS_PER_UNIT;
378 }
379 
380 /* Align given offset BASE with ALIGN.  Truncate up if ALIGN_UP is true,
381    down otherwise.  Return truncated BASE value.  */
382 
383 static inline unsigned HOST_WIDE_INT
align_base(HOST_WIDE_INT base,unsigned HOST_WIDE_INT align,bool align_up)384 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
385 {
386   return align_up ? (base + align - 1) & -align : base & -align;
387 }
388 
389 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
390    Return the frame offset.  */
391 
392 static poly_int64
alloc_stack_frame_space(poly_int64 size,unsigned HOST_WIDE_INT align)393 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
394 {
395   poly_int64 offset, new_frame_offset;
396 
397   if (FRAME_GROWS_DOWNWARD)
398     {
399       new_frame_offset
400 	= aligned_lower_bound (frame_offset - frame_phase - size,
401 			       align) + frame_phase;
402       offset = new_frame_offset;
403     }
404   else
405     {
406       new_frame_offset
407 	= aligned_upper_bound (frame_offset - frame_phase,
408 			       align) + frame_phase;
409       offset = new_frame_offset;
410       new_frame_offset += size;
411     }
412   frame_offset = new_frame_offset;
413 
414   if (frame_offset_overflow (frame_offset, cfun->decl))
415     frame_offset = offset = 0;
416 
417   return offset;
418 }
419 
420 /* Accumulate DECL into STACK_VARS.  */
421 
422 static void
add_stack_var(tree decl)423 add_stack_var (tree decl)
424 {
425   struct stack_var *v;
426 
427   if (stack_vars_num >= stack_vars_alloc)
428     {
429       if (stack_vars_alloc)
430 	stack_vars_alloc = stack_vars_alloc * 3 / 2;
431       else
432 	stack_vars_alloc = 32;
433       stack_vars
434 	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
435     }
436   if (!decl_to_stack_part)
437     decl_to_stack_part = new hash_map<tree, size_t>;
438 
439   v = &stack_vars[stack_vars_num];
440   decl_to_stack_part->put (decl, stack_vars_num);
441 
442   v->decl = decl;
443   tree size = TREE_CODE (decl) == SSA_NAME
444     ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
445     : DECL_SIZE_UNIT (decl);
446   v->size = tree_to_poly_uint64 (size);
447   /* Ensure that all variables have size, so that &a != &b for any two
448      variables that are simultaneously live.  */
449   if (known_eq (v->size, 0U))
450     v->size = 1;
451   v->alignb = align_local_variable (decl);
452   /* An alignment of zero can mightily confuse us later.  */
453   gcc_assert (v->alignb != 0);
454 
455   /* All variables are initially in their own partition.  */
456   v->representative = stack_vars_num;
457   v->next = EOC;
458 
459   /* All variables initially conflict with no other.  */
460   v->conflicts = NULL;
461 
462   /* Ensure that this decl doesn't get put onto the list twice.  */
463   set_rtl (decl, pc_rtx);
464 
465   stack_vars_num++;
466 }
467 
468 /* Make the decls associated with luid's X and Y conflict.  */
469 
470 static void
add_stack_var_conflict(size_t x,size_t y)471 add_stack_var_conflict (size_t x, size_t y)
472 {
473   struct stack_var *a = &stack_vars[x];
474   struct stack_var *b = &stack_vars[y];
475   if (!a->conflicts)
476     a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
477   if (!b->conflicts)
478     b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
479   bitmap_set_bit (a->conflicts, y);
480   bitmap_set_bit (b->conflicts, x);
481 }
482 
483 /* Check whether the decls associated with luid's X and Y conflict.  */
484 
485 static bool
stack_var_conflict_p(size_t x,size_t y)486 stack_var_conflict_p (size_t x, size_t y)
487 {
488   struct stack_var *a = &stack_vars[x];
489   struct stack_var *b = &stack_vars[y];
490   if (x == y)
491     return false;
492   /* Partitions containing an SSA name result from gimple registers
493      with things like unsupported modes.  They are top-level and
494      hence conflict with everything else.  */
495   if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
496     return true;
497 
498   if (!a->conflicts || !b->conflicts)
499     return false;
500   return bitmap_bit_p (a->conflicts, y);
501 }
502 
503 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
504    enter its partition number into bitmap DATA.  */
505 
506 static bool
visit_op(gimple *,tree op,tree,void * data)507 visit_op (gimple *, tree op, tree, void *data)
508 {
509   bitmap active = (bitmap)data;
510   op = get_base_address (op);
511   if (op
512       && DECL_P (op)
513       && DECL_RTL_IF_SET (op) == pc_rtx)
514     {
515       size_t *v = decl_to_stack_part->get (op);
516       if (v)
517 	bitmap_set_bit (active, *v);
518     }
519   return false;
520 }
521 
522 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
523    record conflicts between it and all currently active other partitions
524    from bitmap DATA.  */
525 
526 static bool
visit_conflict(gimple *,tree op,tree,void * data)527 visit_conflict (gimple *, tree op, tree, void *data)
528 {
529   bitmap active = (bitmap)data;
530   op = get_base_address (op);
531   if (op
532       && DECL_P (op)
533       && DECL_RTL_IF_SET (op) == pc_rtx)
534     {
535       size_t *v = decl_to_stack_part->get (op);
536       if (v && bitmap_set_bit (active, *v))
537 	{
538 	  size_t num = *v;
539 	  bitmap_iterator bi;
540 	  unsigned i;
541 	  gcc_assert (num < stack_vars_num);
542 	  EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
543 	    add_stack_var_conflict (num, i);
544 	}
545     }
546   return false;
547 }
548 
549 /* Helper routine for add_scope_conflicts, calculating the active partitions
550    at the end of BB, leaving the result in WORK.  We're called to generate
551    conflicts when FOR_CONFLICT is true, otherwise we're just tracking
552    liveness.  */
553 
554 static void
add_scope_conflicts_1(basic_block bb,bitmap work,bool for_conflict)555 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
556 {
557   edge e;
558   edge_iterator ei;
559   gimple_stmt_iterator gsi;
560   walk_stmt_load_store_addr_fn visit;
561 
562   bitmap_clear (work);
563   FOR_EACH_EDGE (e, ei, bb->preds)
564     bitmap_ior_into (work, (bitmap)e->src->aux);
565 
566   visit = visit_op;
567 
568   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
569     {
570       gimple *stmt = gsi_stmt (gsi);
571       walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
572     }
573   for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
574     {
575       gimple *stmt = gsi_stmt (gsi);
576 
577       if (gimple_clobber_p (stmt))
578 	{
579 	  tree lhs = gimple_assign_lhs (stmt);
580 	  size_t *v;
581 	  /* Nested function lowering might introduce LHSs
582 	     that are COMPONENT_REFs.  */
583 	  if (!VAR_P (lhs))
584 	    continue;
585 	  if (DECL_RTL_IF_SET (lhs) == pc_rtx
586 	      && (v = decl_to_stack_part->get (lhs)))
587 	    bitmap_clear_bit (work, *v);
588 	}
589       else if (!is_gimple_debug (stmt))
590 	{
591 	  if (for_conflict
592 	      && visit == visit_op)
593 	    {
594 	      /* If this is the first real instruction in this BB we need
595 	         to add conflicts for everything live at this point now.
596 		 Unlike classical liveness for named objects we can't
597 		 rely on seeing a def/use of the names we're interested in.
598 		 There might merely be indirect loads/stores.  We'd not add any
599 		 conflicts for such partitions.  */
600 	      bitmap_iterator bi;
601 	      unsigned i;
602 	      EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
603 		{
604 		  struct stack_var *a = &stack_vars[i];
605 		  if (!a->conflicts)
606 		    a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
607 		  bitmap_ior_into (a->conflicts, work);
608 		}
609 	      visit = visit_conflict;
610 	    }
611 	  walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
612 	}
613     }
614 }
615 
616 /* Generate stack partition conflicts between all partitions that are
617    simultaneously live.  */
618 
619 static void
add_scope_conflicts(void)620 add_scope_conflicts (void)
621 {
622   basic_block bb;
623   bool changed;
624   bitmap work = BITMAP_ALLOC (NULL);
625   int *rpo;
626   int n_bbs;
627 
628   /* We approximate the live range of a stack variable by taking the first
629      mention of its name as starting point(s), and by the end-of-scope
630      death clobber added by gimplify as ending point(s) of the range.
631      This overapproximates in the case we for instance moved an address-taken
632      operation upward, without also moving a dereference to it upwards.
633      But it's conservatively correct as a variable never can hold values
634      before its name is mentioned at least once.
635 
636      We then do a mostly classical bitmap liveness algorithm.  */
637 
638   FOR_ALL_BB_FN (bb, cfun)
639     bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
640 
641   rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
642   n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
643 
644   changed = true;
645   while (changed)
646     {
647       int i;
648       changed = false;
649       for (i = 0; i < n_bbs; i++)
650 	{
651 	  bitmap active;
652 	  bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
653 	  active = (bitmap)bb->aux;
654 	  add_scope_conflicts_1 (bb, work, false);
655 	  if (bitmap_ior_into (active, work))
656 	    changed = true;
657 	}
658     }
659 
660   FOR_EACH_BB_FN (bb, cfun)
661     add_scope_conflicts_1 (bb, work, true);
662 
663   free (rpo);
664   BITMAP_FREE (work);
665   FOR_ALL_BB_FN (bb, cfun)
666     BITMAP_FREE (bb->aux);
667 }
668 
669 /* A subroutine of partition_stack_vars.  A comparison function for qsort,
670    sorting an array of indices by the properties of the object.  */
671 
672 static int
stack_var_cmp(const void * a,const void * b)673 stack_var_cmp (const void *a, const void *b)
674 {
675   size_t ia = *(const size_t *)a;
676   size_t ib = *(const size_t *)b;
677   unsigned int aligna = stack_vars[ia].alignb;
678   unsigned int alignb = stack_vars[ib].alignb;
679   poly_int64 sizea = stack_vars[ia].size;
680   poly_int64 sizeb = stack_vars[ib].size;
681   tree decla = stack_vars[ia].decl;
682   tree declb = stack_vars[ib].decl;
683   bool largea, largeb;
684   unsigned int uida, uidb;
685 
686   /* Primary compare on "large" alignment.  Large comes first.  */
687   largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
688   largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
689   if (largea != largeb)
690     return (int)largeb - (int)largea;
691 
692   /* Secondary compare on size, decreasing  */
693   int diff = compare_sizes_for_sort (sizeb, sizea);
694   if (diff != 0)
695     return diff;
696 
697   /* Tertiary compare on true alignment, decreasing.  */
698   if (aligna < alignb)
699     return -1;
700   if (aligna > alignb)
701     return 1;
702 
703   /* Final compare on ID for sort stability, increasing.
704      Two SSA names are compared by their version, SSA names come before
705      non-SSA names, and two normal decls are compared by their DECL_UID.  */
706   if (TREE_CODE (decla) == SSA_NAME)
707     {
708       if (TREE_CODE (declb) == SSA_NAME)
709 	uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
710       else
711 	return -1;
712     }
713   else if (TREE_CODE (declb) == SSA_NAME)
714     return 1;
715   else
716     uida = DECL_UID (decla), uidb = DECL_UID (declb);
717   if (uida < uidb)
718     return 1;
719   if (uida > uidb)
720     return -1;
721   return 0;
722 }
723 
724 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
725 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
726 
727 /* If the points-to solution *PI points to variables that are in a partition
728    together with other variables add all partition members to the pointed-to
729    variables bitmap.  */
730 
731 static void
add_partitioned_vars_to_ptset(struct pt_solution * pt,part_hashmap * decls_to_partitions,hash_set<bitmap> * visited,bitmap temp)732 add_partitioned_vars_to_ptset (struct pt_solution *pt,
733 			       part_hashmap *decls_to_partitions,
734 			       hash_set<bitmap> *visited, bitmap temp)
735 {
736   bitmap_iterator bi;
737   unsigned i;
738   bitmap *part;
739 
740   if (pt->anything
741       || pt->vars == NULL
742       /* The pointed-to vars bitmap is shared, it is enough to
743 	 visit it once.  */
744       || visited->add (pt->vars))
745     return;
746 
747   bitmap_clear (temp);
748 
749   /* By using a temporary bitmap to store all members of the partitions
750      we have to add we make sure to visit each of the partitions only
751      once.  */
752   EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
753     if ((!temp
754 	 || !bitmap_bit_p (temp, i))
755 	&& (part = decls_to_partitions->get (i)))
756       bitmap_ior_into (temp, *part);
757   if (!bitmap_empty_p (temp))
758     bitmap_ior_into (pt->vars, temp);
759 }
760 
761 /* Update points-to sets based on partition info, so we can use them on RTL.
762    The bitmaps representing stack partitions will be saved until expand,
763    where partitioned decls used as bases in memory expressions will be
764    rewritten.  */
765 
766 static void
update_alias_info_with_stack_vars(void)767 update_alias_info_with_stack_vars (void)
768 {
769   part_hashmap *decls_to_partitions = NULL;
770   size_t i, j;
771   tree var = NULL_TREE;
772 
773   for (i = 0; i < stack_vars_num; i++)
774     {
775       bitmap part = NULL;
776       tree name;
777       struct ptr_info_def *pi;
778 
779       /* Not interested in partitions with single variable.  */
780       if (stack_vars[i].representative != i
781           || stack_vars[i].next == EOC)
782         continue;
783 
784       if (!decls_to_partitions)
785 	{
786 	  decls_to_partitions = new part_hashmap;
787 	  cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
788 	}
789 
790       /* Create an SSA_NAME that points to the partition for use
791          as base during alias-oracle queries on RTL for bases that
792 	 have been partitioned.  */
793       if (var == NULL_TREE)
794 	var = create_tmp_var (ptr_type_node);
795       name = make_ssa_name (var);
796 
797       /* Create bitmaps representing partitions.  They will be used for
798          points-to sets later, so use GGC alloc.  */
799       part = BITMAP_GGC_ALLOC ();
800       for (j = i; j != EOC; j = stack_vars[j].next)
801 	{
802 	  tree decl = stack_vars[j].decl;
803 	  unsigned int uid = DECL_PT_UID (decl);
804 	  bitmap_set_bit (part, uid);
805 	  decls_to_partitions->put (uid, part);
806 	  cfun->gimple_df->decls_to_pointers->put (decl, name);
807 	  if (TREE_ADDRESSABLE (decl))
808 	    TREE_ADDRESSABLE (name) = 1;
809 	}
810 
811       /* Make the SSA name point to all partition members.  */
812       pi = get_ptr_info (name);
813       pt_solution_set (&pi->pt, part, false);
814     }
815 
816   /* Make all points-to sets that contain one member of a partition
817      contain all members of the partition.  */
818   if (decls_to_partitions)
819     {
820       unsigned i;
821       tree name;
822       hash_set<bitmap> visited;
823       bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
824 
825       FOR_EACH_SSA_NAME (i, name, cfun)
826 	{
827 	  struct ptr_info_def *pi;
828 
829 	  if (POINTER_TYPE_P (TREE_TYPE (name))
830 	      && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
831 	    add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
832 					   &visited, temp);
833 	}
834 
835       add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
836 				     decls_to_partitions, &visited, temp);
837 
838       delete decls_to_partitions;
839       BITMAP_FREE (temp);
840     }
841 }
842 
843 /* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
844    partitioning algorithm.  Partitions A and B are known to be non-conflicting.
845    Merge them into a single partition A.  */
846 
847 static void
union_stack_vars(size_t a,size_t b)848 union_stack_vars (size_t a, size_t b)
849 {
850   struct stack_var *vb = &stack_vars[b];
851   bitmap_iterator bi;
852   unsigned u;
853 
854   gcc_assert (stack_vars[b].next == EOC);
855    /* Add B to A's partition.  */
856   stack_vars[b].next = stack_vars[a].next;
857   stack_vars[b].representative = a;
858   stack_vars[a].next = b;
859 
860   /* Update the required alignment of partition A to account for B.  */
861   if (stack_vars[a].alignb < stack_vars[b].alignb)
862     stack_vars[a].alignb = stack_vars[b].alignb;
863 
864   /* Update the interference graph and merge the conflicts.  */
865   if (vb->conflicts)
866     {
867       EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
868 	add_stack_var_conflict (a, stack_vars[u].representative);
869       BITMAP_FREE (vb->conflicts);
870     }
871 }
872 
873 /* A subroutine of expand_used_vars.  Binpack the variables into
874    partitions constrained by the interference graph.  The overall
875    algorithm used is as follows:
876 
877 	Sort the objects by size in descending order.
878 	For each object A {
879 	  S = size(A)
880 	  O = 0
881 	  loop {
882 	    Look for the largest non-conflicting object B with size <= S.
883 	    UNION (A, B)
884 	  }
885 	}
886 */
887 
888 static void
partition_stack_vars(void)889 partition_stack_vars (void)
890 {
891   size_t si, sj, n = stack_vars_num;
892 
893   stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
894   for (si = 0; si < n; ++si)
895     stack_vars_sorted[si] = si;
896 
897   if (n == 1)
898     return;
899 
900   qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
901 
902   for (si = 0; si < n; ++si)
903     {
904       size_t i = stack_vars_sorted[si];
905       unsigned int ialign = stack_vars[i].alignb;
906       poly_int64 isize = stack_vars[i].size;
907 
908       /* Ignore objects that aren't partition representatives. If we
909          see a var that is not a partition representative, it must
910          have been merged earlier.  */
911       if (stack_vars[i].representative != i)
912         continue;
913 
914       for (sj = si + 1; sj < n; ++sj)
915 	{
916 	  size_t j = stack_vars_sorted[sj];
917 	  unsigned int jalign = stack_vars[j].alignb;
918 	  poly_int64 jsize = stack_vars[j].size;
919 
920 	  /* Ignore objects that aren't partition representatives.  */
921 	  if (stack_vars[j].representative != j)
922 	    continue;
923 
924 	  /* Do not mix objects of "small" (supported) alignment
925 	     and "large" (unsupported) alignment.  */
926 	  if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
927 	      != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
928 	    break;
929 
930 	  /* For Address Sanitizer do not mix objects with different
931 	     sizes, as the shorter vars wouldn't be adequately protected.
932 	     Don't do that for "large" (unsupported) alignment objects,
933 	     those aren't protected anyway.  */
934 	  if (asan_sanitize_stack_p ()
935 	      && maybe_ne (isize, jsize)
936 	      && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 	    break;
938 
939 	  /* Ignore conflicting objects.  */
940 	  if (stack_var_conflict_p (i, j))
941 	    continue;
942 
943 	  /* UNION the objects, placing J at OFFSET.  */
944 	  union_stack_vars (i, j);
945 	}
946     }
947 
948   update_alias_info_with_stack_vars ();
949 }
950 
951 /* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
952 
953 static void
dump_stack_var_partition(void)954 dump_stack_var_partition (void)
955 {
956   size_t si, i, j, n = stack_vars_num;
957 
958   for (si = 0; si < n; ++si)
959     {
960       i = stack_vars_sorted[si];
961 
962       /* Skip variables that aren't partition representatives, for now.  */
963       if (stack_vars[i].representative != i)
964 	continue;
965 
966       fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
967       print_dec (stack_vars[i].size, dump_file);
968       fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
969 
970       for (j = i; j != EOC; j = stack_vars[j].next)
971 	{
972 	  fputc ('\t', dump_file);
973 	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
974 	}
975       fputc ('\n', dump_file);
976     }
977 }
978 
979 /* Assign rtl to DECL at BASE + OFFSET.  */
980 
981 static void
expand_one_stack_var_at(tree decl,rtx base,unsigned base_align,poly_int64 offset)982 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
983 			 poly_int64 offset)
984 {
985   unsigned align;
986   rtx x;
987 
988   /* If this fails, we've overflowed the stack frame.  Error nicely?  */
989   gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
990 
991   x = plus_constant (Pmode, base, offset);
992   x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
993 		   ? TYPE_MODE (TREE_TYPE (decl))
994 		   : DECL_MODE (SSAVAR (decl)), x);
995 
996   if (TREE_CODE (decl) != SSA_NAME)
997     {
998       /* Set alignment we actually gave this decl if it isn't an SSA name.
999          If it is we generate stack slots only accidentally so it isn't as
1000 	 important, we'll simply use the alignment that is already set.  */
1001       if (base == virtual_stack_vars_rtx)
1002 	offset -= frame_phase;
1003       align = known_alignment (offset);
1004       align *= BITS_PER_UNIT;
1005       if (align == 0 || align > base_align)
1006 	align = base_align;
1007 
1008       /* One would think that we could assert that we're not decreasing
1009 	 alignment here, but (at least) the i386 port does exactly this
1010 	 via the MINIMUM_ALIGNMENT hook.  */
1011 
1012       SET_DECL_ALIGN (decl, align);
1013       DECL_USER_ALIGN (decl) = 0;
1014     }
1015 
1016   set_rtl (decl, x);
1017 }
1018 
1019 struct stack_vars_data
1020 {
1021   /* Vector of offset pairs, always end of some padding followed
1022      by start of the padding that needs Address Sanitizer protection.
1023      The vector is in reversed, highest offset pairs come first.  */
1024   auto_vec<HOST_WIDE_INT> asan_vec;
1025 
1026   /* Vector of partition representative decls in between the paddings.  */
1027   auto_vec<tree> asan_decl_vec;
1028 
1029   /* Base pseudo register for Address Sanitizer protected automatic vars.  */
1030   rtx asan_base;
1031 
1032   /* Alignment needed for the Address Sanitizer protected automatic vars.  */
1033   unsigned int asan_alignb;
1034 };
1035 
1036 /* A subroutine of expand_used_vars.  Give each partition representative
1037    a unique location within the stack frame.  Update each partition member
1038    with that location.  */
1039 
1040 static void
expand_stack_vars(bool (* pred)(size_t),struct stack_vars_data * data)1041 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1042 {
1043   size_t si, i, j, n = stack_vars_num;
1044   poly_uint64 large_size = 0, large_alloc = 0;
1045   rtx large_base = NULL;
1046   unsigned large_align = 0;
1047   bool large_allocation_done = false;
1048   tree decl;
1049 
1050   /* Determine if there are any variables requiring "large" alignment.
1051      Since these are dynamically allocated, we only process these if
1052      no predicate involved.  */
1053   large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1054   if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1055     {
1056       /* Find the total size of these variables.  */
1057       for (si = 0; si < n; ++si)
1058 	{
1059 	  unsigned alignb;
1060 
1061 	  i = stack_vars_sorted[si];
1062 	  alignb = stack_vars[i].alignb;
1063 
1064 	  /* All "large" alignment decls come before all "small" alignment
1065 	     decls, but "large" alignment decls are not sorted based on
1066 	     their alignment.  Increase large_align to track the largest
1067 	     required alignment.  */
1068 	  if ((alignb * BITS_PER_UNIT) > large_align)
1069 	    large_align = alignb * BITS_PER_UNIT;
1070 
1071 	  /* Stop when we get to the first decl with "small" alignment.  */
1072 	  if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1073 	    break;
1074 
1075 	  /* Skip variables that aren't partition representatives.  */
1076 	  if (stack_vars[i].representative != i)
1077 	    continue;
1078 
1079 	  /* Skip variables that have already had rtl assigned.  See also
1080 	     add_stack_var where we perpetrate this pc_rtx hack.  */
1081 	  decl = stack_vars[i].decl;
1082 	  if (TREE_CODE (decl) == SSA_NAME
1083 	      ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1084 	      : DECL_RTL (decl) != pc_rtx)
1085 	    continue;
1086 
1087 	  large_size = aligned_upper_bound (large_size, alignb);
1088 	  large_size += stack_vars[i].size;
1089 	}
1090     }
1091 
1092   for (si = 0; si < n; ++si)
1093     {
1094       rtx base;
1095       unsigned base_align, alignb;
1096       poly_int64 offset;
1097 
1098       i = stack_vars_sorted[si];
1099 
1100       /* Skip variables that aren't partition representatives, for now.  */
1101       if (stack_vars[i].representative != i)
1102 	continue;
1103 
1104       /* Skip variables that have already had rtl assigned.  See also
1105 	 add_stack_var where we perpetrate this pc_rtx hack.  */
1106       decl = stack_vars[i].decl;
1107       if (TREE_CODE (decl) == SSA_NAME
1108 	  ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1109 	  : DECL_RTL (decl) != pc_rtx)
1110 	continue;
1111 
1112       /* Check the predicate to see whether this variable should be
1113 	 allocated in this pass.  */
1114       if (pred && !pred (i))
1115 	continue;
1116 
1117       alignb = stack_vars[i].alignb;
1118       if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1119 	{
1120 	  base = virtual_stack_vars_rtx;
1121 	  /* ASAN description strings don't yet have a syntax for expressing
1122 	     polynomial offsets.  */
1123 	  HOST_WIDE_INT prev_offset;
1124 	  if (asan_sanitize_stack_p ()
1125 	      && pred
1126 	      && frame_offset.is_constant (&prev_offset)
1127 	      && stack_vars[i].size.is_constant ())
1128 	    {
1129 	      prev_offset = align_base (prev_offset,
1130 					MAX (alignb, ASAN_RED_ZONE_SIZE),
1131 					!FRAME_GROWS_DOWNWARD);
1132 	      tree repr_decl = NULL_TREE;
1133 	      offset
1134 		= alloc_stack_frame_space (stack_vars[i].size
1135 					   + ASAN_RED_ZONE_SIZE,
1136 					   MAX (alignb, ASAN_RED_ZONE_SIZE));
1137 
1138 	      data->asan_vec.safe_push (prev_offset);
1139 	      /* Allocating a constant amount of space from a constant
1140 		 starting offset must give a constant result.  */
1141 	      data->asan_vec.safe_push ((offset + stack_vars[i].size)
1142 					.to_constant ());
1143 	      /* Find best representative of the partition.
1144 		 Prefer those with DECL_NAME, even better
1145 		 satisfying asan_protect_stack_decl predicate.  */
1146 	      for (j = i; j != EOC; j = stack_vars[j].next)
1147 		if (asan_protect_stack_decl (stack_vars[j].decl)
1148 		    && DECL_NAME (stack_vars[j].decl))
1149 		  {
1150 		    repr_decl = stack_vars[j].decl;
1151 		    break;
1152 		  }
1153 		else if (repr_decl == NULL_TREE
1154 			 && DECL_P (stack_vars[j].decl)
1155 			 && DECL_NAME (stack_vars[j].decl))
1156 		  repr_decl = stack_vars[j].decl;
1157 	      if (repr_decl == NULL_TREE)
1158 		repr_decl = stack_vars[i].decl;
1159 	      data->asan_decl_vec.safe_push (repr_decl);
1160 
1161 	      /* Make sure a representative is unpoison if another
1162 		 variable in the partition is handled by
1163 		 use-after-scope sanitization.  */
1164 	      if (asan_handled_variables != NULL
1165 		  && !asan_handled_variables->contains (repr_decl))
1166 		{
1167 		  for (j = i; j != EOC; j = stack_vars[j].next)
1168 		    if (asan_handled_variables->contains (stack_vars[j].decl))
1169 		      break;
1170 		  if (j != EOC)
1171 		    asan_handled_variables->add (repr_decl);
1172 		}
1173 
1174 	      data->asan_alignb = MAX (data->asan_alignb, alignb);
1175 	      if (data->asan_base == NULL)
1176 		data->asan_base = gen_reg_rtx (Pmode);
1177 	      base = data->asan_base;
1178 
1179 	      if (!STRICT_ALIGNMENT)
1180 		base_align = crtl->max_used_stack_slot_alignment;
1181 	      else
1182 		base_align = MAX (crtl->max_used_stack_slot_alignment,
1183 				  GET_MODE_ALIGNMENT (SImode)
1184 				  << ASAN_SHADOW_SHIFT);
1185 	    }
1186 	  else
1187 	    {
1188 	      offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1189 	      base_align = crtl->max_used_stack_slot_alignment;
1190 	    }
1191 	}
1192       else
1193 	{
1194 	  /* Large alignment is only processed in the last pass.  */
1195 	  if (pred)
1196 	    continue;
1197 
1198 	  /* If there were any variables requiring "large" alignment, allocate
1199 	     space.  */
1200 	  if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1201 	    {
1202 	      poly_int64 loffset;
1203 	      rtx large_allocsize;
1204 
1205 	      large_allocsize = gen_int_mode (large_size, Pmode);
1206 	      get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1207 	      loffset = alloc_stack_frame_space
1208 		(rtx_to_poly_int64 (large_allocsize),
1209 		 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1210 	      large_base = get_dynamic_stack_base (loffset, large_align);
1211 	      large_allocation_done = true;
1212 	    }
1213 	  gcc_assert (large_base != NULL);
1214 
1215 	  large_alloc = aligned_upper_bound (large_alloc, alignb);
1216 	  offset = large_alloc;
1217 	  large_alloc += stack_vars[i].size;
1218 
1219 	  base = large_base;
1220 	  base_align = large_align;
1221 	}
1222 
1223       /* Create rtl for each variable based on their location within the
1224 	 partition.  */
1225       for (j = i; j != EOC; j = stack_vars[j].next)
1226 	{
1227 	  expand_one_stack_var_at (stack_vars[j].decl,
1228 				   base, base_align,
1229 				   offset);
1230 	}
1231     }
1232 
1233   gcc_assert (known_eq (large_alloc, large_size));
1234 }
1235 
1236 /* Take into account all sizes of partitions and reset DECL_RTLs.  */
1237 static poly_uint64
account_stack_vars(void)1238 account_stack_vars (void)
1239 {
1240   size_t si, j, i, n = stack_vars_num;
1241   poly_uint64 size = 0;
1242 
1243   for (si = 0; si < n; ++si)
1244     {
1245       i = stack_vars_sorted[si];
1246 
1247       /* Skip variables that aren't partition representatives, for now.  */
1248       if (stack_vars[i].representative != i)
1249 	continue;
1250 
1251       size += stack_vars[i].size;
1252       for (j = i; j != EOC; j = stack_vars[j].next)
1253 	set_rtl (stack_vars[j].decl, NULL);
1254     }
1255   return size;
1256 }
1257 
1258 /* Record the RTL assignment X for the default def of PARM.  */
1259 
1260 extern void
set_parm_rtl(tree parm,rtx x)1261 set_parm_rtl (tree parm, rtx x)
1262 {
1263   gcc_assert (TREE_CODE (parm) == PARM_DECL
1264 	      || TREE_CODE (parm) == RESULT_DECL);
1265 
1266   if (x && !MEM_P (x))
1267     {
1268       unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1269 					      TYPE_MODE (TREE_TYPE (parm)),
1270 					      TYPE_ALIGN (TREE_TYPE (parm)));
1271 
1272       /* If the variable alignment is very large we'll dynamicaly
1273 	 allocate it, which means that in-frame portion is just a
1274 	 pointer.  ??? We've got a pseudo for sure here, do we
1275 	 actually dynamically allocate its spilling area if needed?
1276 	 ??? Isn't it a problem when Pmode alignment also exceeds
1277 	 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32?  */
1278       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1279 	align = GET_MODE_ALIGNMENT (Pmode);
1280 
1281       record_alignment_for_reg_var (align);
1282     }
1283 
1284   tree ssa = ssa_default_def (cfun, parm);
1285   if (!ssa)
1286     return set_rtl (parm, x);
1287 
1288   int part = var_to_partition (SA.map, ssa);
1289   gcc_assert (part != NO_PARTITION);
1290 
1291   bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1292   gcc_assert (changed);
1293 
1294   set_rtl (ssa, x);
1295   gcc_assert (DECL_RTL (parm) == x);
1296 }
1297 
1298 /* A subroutine of expand_one_var.  Called to immediately assign rtl
1299    to a variable to be allocated in the stack frame.  */
1300 
1301 static void
expand_one_stack_var_1(tree var)1302 expand_one_stack_var_1 (tree var)
1303 {
1304   poly_uint64 size;
1305   poly_int64 offset;
1306   unsigned byte_align;
1307 
1308   if (TREE_CODE (var) == SSA_NAME)
1309     {
1310       tree type = TREE_TYPE (var);
1311       size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1312       byte_align = TYPE_ALIGN_UNIT (type);
1313     }
1314   else
1315     {
1316       size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1317       byte_align = align_local_variable (var);
1318     }
1319 
1320   /* We handle highly aligned variables in expand_stack_vars.  */
1321   gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1322 
1323   offset = alloc_stack_frame_space (size, byte_align);
1324 
1325   expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1326 			   crtl->max_used_stack_slot_alignment, offset);
1327 }
1328 
1329 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1330    already assigned some MEM.  */
1331 
1332 static void
expand_one_stack_var(tree var)1333 expand_one_stack_var (tree var)
1334 {
1335   if (TREE_CODE (var) == SSA_NAME)
1336     {
1337       int part = var_to_partition (SA.map, var);
1338       if (part != NO_PARTITION)
1339 	{
1340 	  rtx x = SA.partition_to_pseudo[part];
1341 	  gcc_assert (x);
1342 	  gcc_assert (MEM_P (x));
1343 	  return;
1344 	}
1345     }
1346 
1347   return expand_one_stack_var_1 (var);
1348 }
1349 
1350 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1351    that will reside in a hard register.  */
1352 
1353 static void
expand_one_hard_reg_var(tree var)1354 expand_one_hard_reg_var (tree var)
1355 {
1356   rest_of_decl_compilation (var, 0, 0);
1357 }
1358 
1359 /* Record the alignment requirements of some variable assigned to a
1360    pseudo.  */
1361 
1362 static void
record_alignment_for_reg_var(unsigned int align)1363 record_alignment_for_reg_var (unsigned int align)
1364 {
1365   if (SUPPORTS_STACK_ALIGNMENT
1366       && crtl->stack_alignment_estimated < align)
1367     {
1368       /* stack_alignment_estimated shouldn't change after stack
1369          realign decision made */
1370       gcc_assert (!crtl->stack_realign_processed);
1371       crtl->stack_alignment_estimated = align;
1372     }
1373 
1374   /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1375      So here we only make sure stack_alignment_needed >= align.  */
1376   if (crtl->stack_alignment_needed < align)
1377     crtl->stack_alignment_needed = align;
1378   if (crtl->max_used_stack_slot_alignment < align)
1379     crtl->max_used_stack_slot_alignment = align;
1380 }
1381 
1382 /* Create RTL for an SSA partition.  */
1383 
1384 static void
expand_one_ssa_partition(tree var)1385 expand_one_ssa_partition (tree var)
1386 {
1387   int part = var_to_partition (SA.map, var);
1388   gcc_assert (part != NO_PARTITION);
1389 
1390   if (SA.partition_to_pseudo[part])
1391     return;
1392 
1393   unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1394 					  TYPE_MODE (TREE_TYPE (var)),
1395 					  TYPE_ALIGN (TREE_TYPE (var)));
1396 
1397   /* If the variable alignment is very large we'll dynamicaly allocate
1398      it, which means that in-frame portion is just a pointer.  */
1399   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1400     align = GET_MODE_ALIGNMENT (Pmode);
1401 
1402   record_alignment_for_reg_var (align);
1403 
1404   if (!use_register_for_decl (var))
1405     {
1406       if (defer_stack_allocation (var, true))
1407 	add_stack_var (var);
1408       else
1409 	expand_one_stack_var_1 (var);
1410       return;
1411     }
1412 
1413   machine_mode reg_mode = promote_ssa_mode (var, NULL);
1414   rtx x = gen_reg_rtx (reg_mode);
1415 
1416   set_rtl (var, x);
1417 
1418   /* For a promoted variable, X will not be used directly but wrapped in a
1419      SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1420      will assume that its upper bits can be inferred from its lower bits.
1421      Therefore, if X isn't initialized on every path from the entry, then
1422      we must do it manually in order to fulfill the above assumption.  */
1423   if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1424       && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1425     emit_move_insn (x, CONST0_RTX (reg_mode));
1426 }
1427 
1428 /* Record the association between the RTL generated for partition PART
1429    and the underlying variable of the SSA_NAME VAR.  */
1430 
1431 static void
adjust_one_expanded_partition_var(tree var)1432 adjust_one_expanded_partition_var (tree var)
1433 {
1434   if (!var)
1435     return;
1436 
1437   tree decl = SSA_NAME_VAR (var);
1438 
1439   int part = var_to_partition (SA.map, var);
1440   if (part == NO_PARTITION)
1441     return;
1442 
1443   rtx x = SA.partition_to_pseudo[part];
1444 
1445   gcc_assert (x);
1446 
1447   set_rtl (var, x);
1448 
1449   if (!REG_P (x))
1450     return;
1451 
1452   /* Note if the object is a user variable.  */
1453   if (decl && !DECL_ARTIFICIAL (decl))
1454     mark_user_reg (x);
1455 
1456   if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1457     mark_reg_pointer (x, get_pointer_alignment (var));
1458 }
1459 
1460 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1461    that will reside in a pseudo register.  */
1462 
1463 static void
expand_one_register_var(tree var)1464 expand_one_register_var (tree var)
1465 {
1466   if (TREE_CODE (var) == SSA_NAME)
1467     {
1468       int part = var_to_partition (SA.map, var);
1469       if (part != NO_PARTITION)
1470 	{
1471 	  rtx x = SA.partition_to_pseudo[part];
1472 	  gcc_assert (x);
1473 	  gcc_assert (REG_P (x));
1474 	  return;
1475 	}
1476       gcc_unreachable ();
1477     }
1478 
1479   tree decl = var;
1480   tree type = TREE_TYPE (decl);
1481   machine_mode reg_mode = promote_decl_mode (decl, NULL);
1482   rtx x = gen_reg_rtx (reg_mode);
1483 
1484   set_rtl (var, x);
1485 
1486   /* Note if the object is a user variable.  */
1487   if (!DECL_ARTIFICIAL (decl))
1488     mark_user_reg (x);
1489 
1490   if (POINTER_TYPE_P (type))
1491     mark_reg_pointer (x, get_pointer_alignment (var));
1492 }
1493 
1494 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
1495    has some associated error, e.g. its type is error-mark.  We just need
1496    to pick something that won't crash the rest of the compiler.  */
1497 
1498 static void
expand_one_error_var(tree var)1499 expand_one_error_var (tree var)
1500 {
1501   machine_mode mode = DECL_MODE (var);
1502   rtx x;
1503 
1504   if (mode == BLKmode)
1505     x = gen_rtx_MEM (BLKmode, const0_rtx);
1506   else if (mode == VOIDmode)
1507     x = const0_rtx;
1508   else
1509     x = gen_reg_rtx (mode);
1510 
1511   SET_DECL_RTL (var, x);
1512 }
1513 
1514 /* A subroutine of expand_one_var.  VAR is a variable that will be
1515    allocated to the local stack frame.  Return true if we wish to
1516    add VAR to STACK_VARS so that it will be coalesced with other
1517    variables.  Return false to allocate VAR immediately.
1518 
1519    This function is used to reduce the number of variables considered
1520    for coalescing, which reduces the size of the quadratic problem.  */
1521 
1522 static bool
defer_stack_allocation(tree var,bool toplevel)1523 defer_stack_allocation (tree var, bool toplevel)
1524 {
1525   tree size_unit = TREE_CODE (var) == SSA_NAME
1526     ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1527     : DECL_SIZE_UNIT (var);
1528   poly_uint64 size;
1529 
1530   /* Whether the variable is small enough for immediate allocation not to be
1531      a problem with regard to the frame size.  */
1532   bool smallish
1533     = (poly_int_tree_p (size_unit, &size)
1534        && (estimated_poly_value (size)
1535 	   < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
1536 
1537   /* If stack protection is enabled, *all* stack variables must be deferred,
1538      so that we can re-order the strings to the top of the frame.
1539      Similarly for Address Sanitizer.  */
1540   if (flag_stack_protect || asan_sanitize_stack_p ())
1541     return true;
1542 
1543   unsigned int align = TREE_CODE (var) == SSA_NAME
1544     ? TYPE_ALIGN (TREE_TYPE (var))
1545     : DECL_ALIGN (var);
1546 
1547   /* We handle "large" alignment via dynamic allocation.  We want to handle
1548      this extra complication in only one place, so defer them.  */
1549   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1550     return true;
1551 
1552   bool ignored = TREE_CODE (var) == SSA_NAME
1553     ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1554     : DECL_IGNORED_P (var);
1555 
1556   /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1557      might be detached from their block and appear at toplevel when we reach
1558      here.  We want to coalesce them with variables from other blocks when
1559      the immediate contribution to the frame size would be noticeable.  */
1560   if (toplevel && optimize > 0 && ignored && !smallish)
1561     return true;
1562 
1563   /* Variables declared in the outermost scope automatically conflict
1564      with every other variable.  The only reason to want to defer them
1565      at all is that, after sorting, we can more efficiently pack
1566      small variables in the stack frame.  Continue to defer at -O2.  */
1567   if (toplevel && optimize < 2)
1568     return false;
1569 
1570   /* Without optimization, *most* variables are allocated from the
1571      stack, which makes the quadratic problem large exactly when we
1572      want compilation to proceed as quickly as possible.  On the
1573      other hand, we don't want the function's stack frame size to
1574      get completely out of hand.  So we avoid adding scalars and
1575      "small" aggregates to the list at all.  */
1576   if (optimize == 0 && smallish)
1577     return false;
1578 
1579   return true;
1580 }
1581 
1582 /* A subroutine of expand_used_vars.  Expand one variable according to
1583    its flavor.  Variables to be placed on the stack are not actually
1584    expanded yet, merely recorded.
1585    When REALLY_EXPAND is false, only add stack values to be allocated.
1586    Return stack usage this variable is supposed to take.
1587 */
1588 
1589 static poly_uint64
expand_one_var(tree var,bool toplevel,bool really_expand)1590 expand_one_var (tree var, bool toplevel, bool really_expand)
1591 {
1592   unsigned int align = BITS_PER_UNIT;
1593   tree origvar = var;
1594 
1595   var = SSAVAR (var);
1596 
1597   if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1598     {
1599       if (is_global_var (var))
1600 	return 0;
1601 
1602       /* Because we don't know if VAR will be in register or on stack,
1603 	 we conservatively assume it will be on stack even if VAR is
1604 	 eventually put into register after RA pass.  For non-automatic
1605 	 variables, which won't be on stack, we collect alignment of
1606 	 type and ignore user specified alignment.  Similarly for
1607 	 SSA_NAMEs for which use_register_for_decl returns true.  */
1608       if (TREE_STATIC (var)
1609 	  || DECL_EXTERNAL (var)
1610 	  || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1611 	align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1612 				   TYPE_MODE (TREE_TYPE (var)),
1613 				   TYPE_ALIGN (TREE_TYPE (var)));
1614       else if (DECL_HAS_VALUE_EXPR_P (var)
1615 	       || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1616 	/* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1617 	   or variables which were assigned a stack slot already by
1618 	   expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1619 	   changed from the offset chosen to it.  */
1620 	align = crtl->stack_alignment_estimated;
1621       else
1622 	align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1623 
1624       /* If the variable alignment is very large we'll dynamicaly allocate
1625 	 it, which means that in-frame portion is just a pointer.  */
1626       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1627 	align = GET_MODE_ALIGNMENT (Pmode);
1628     }
1629 
1630   record_alignment_for_reg_var (align);
1631 
1632   poly_uint64 size;
1633   if (TREE_CODE (origvar) == SSA_NAME)
1634     {
1635       gcc_assert (!VAR_P (var)
1636 		  || (!DECL_EXTERNAL (var)
1637 		      && !DECL_HAS_VALUE_EXPR_P (var)
1638 		      && !TREE_STATIC (var)
1639 		      && TREE_TYPE (var) != error_mark_node
1640 		      && !DECL_HARD_REGISTER (var)
1641 		      && really_expand));
1642     }
1643   if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1644     ;
1645   else if (DECL_EXTERNAL (var))
1646     ;
1647   else if (DECL_HAS_VALUE_EXPR_P (var))
1648     ;
1649   else if (TREE_STATIC (var))
1650     ;
1651   else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1652     ;
1653   else if (TREE_TYPE (var) == error_mark_node)
1654     {
1655       if (really_expand)
1656         expand_one_error_var (var);
1657     }
1658   else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1659     {
1660       if (really_expand)
1661 	{
1662 	  expand_one_hard_reg_var (var);
1663 	  if (!DECL_HARD_REGISTER (var))
1664 	    /* Invalid register specification.  */
1665 	    expand_one_error_var (var);
1666 	}
1667     }
1668   else if (use_register_for_decl (var))
1669     {
1670       if (really_expand)
1671         expand_one_register_var (origvar);
1672     }
1673   else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1674 	   || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1675     {
1676       /* Reject variables which cover more than half of the address-space.  */
1677       if (really_expand)
1678 	{
1679 	  error ("size of variable %q+D is too large", var);
1680 	  expand_one_error_var (var);
1681 	}
1682     }
1683   else if (defer_stack_allocation (var, toplevel))
1684     add_stack_var (origvar);
1685   else
1686     {
1687       if (really_expand)
1688         {
1689           if (lookup_attribute ("naked",
1690                                 DECL_ATTRIBUTES (current_function_decl)))
1691             error ("cannot allocate stack for variable %q+D, naked function.",
1692                    var);
1693 
1694           expand_one_stack_var (origvar);
1695         }
1696       return size;
1697     }
1698   return 0;
1699 }
1700 
1701 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1702    expanding variables.  Those variables that can be put into registers
1703    are allocated pseudos; those that can't are put on the stack.
1704 
1705    TOPLEVEL is true if this is the outermost BLOCK.  */
1706 
1707 static void
expand_used_vars_for_block(tree block,bool toplevel)1708 expand_used_vars_for_block (tree block, bool toplevel)
1709 {
1710   tree t;
1711 
1712   /* Expand all variables at this level.  */
1713   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1714     if (TREE_USED (t)
1715         && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1716 	    || !DECL_NONSHAREABLE (t)))
1717       expand_one_var (t, toplevel, true);
1718 
1719   /* Expand all variables at containing levels.  */
1720   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1721     expand_used_vars_for_block (t, false);
1722 }
1723 
1724 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1725    and clear TREE_USED on all local variables.  */
1726 
1727 static void
clear_tree_used(tree block)1728 clear_tree_used (tree block)
1729 {
1730   tree t;
1731 
1732   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1733     /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1734     if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1735 	|| !DECL_NONSHAREABLE (t))
1736       TREE_USED (t) = 0;
1737 
1738   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1739     clear_tree_used (t);
1740 }
1741 
1742 enum {
1743   SPCT_FLAG_DEFAULT = 1,
1744   SPCT_FLAG_ALL = 2,
1745   SPCT_FLAG_STRONG = 3,
1746   SPCT_FLAG_EXPLICIT = 4
1747 };
1748 
1749 /* Examine TYPE and determine a bit mask of the following features.  */
1750 
1751 #define SPCT_HAS_LARGE_CHAR_ARRAY	1
1752 #define SPCT_HAS_SMALL_CHAR_ARRAY	2
1753 #define SPCT_HAS_ARRAY			4
1754 #define SPCT_HAS_AGGREGATE		8
1755 
1756 static unsigned int
stack_protect_classify_type(tree type)1757 stack_protect_classify_type (tree type)
1758 {
1759   unsigned int ret = 0;
1760   tree t;
1761 
1762   switch (TREE_CODE (type))
1763     {
1764     case ARRAY_TYPE:
1765       t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1766       if (t == char_type_node
1767 	  || t == signed_char_type_node
1768 	  || t == unsigned_char_type_node)
1769 	{
1770 	  unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1771 	  unsigned HOST_WIDE_INT len;
1772 
1773 	  if (!TYPE_SIZE_UNIT (type)
1774 	      || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1775 	    len = max;
1776 	  else
1777 	    len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1778 
1779 	  if (len < max)
1780 	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1781 	  else
1782 	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1783 	}
1784       else
1785 	ret = SPCT_HAS_ARRAY;
1786       break;
1787 
1788     case UNION_TYPE:
1789     case QUAL_UNION_TYPE:
1790     case RECORD_TYPE:
1791       ret = SPCT_HAS_AGGREGATE;
1792       for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1793 	if (TREE_CODE (t) == FIELD_DECL)
1794 	  ret |= stack_protect_classify_type (TREE_TYPE (t));
1795       break;
1796 
1797     default:
1798       break;
1799     }
1800 
1801   return ret;
1802 }
1803 
1804 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1805    part of the local stack frame.  Remember if we ever return nonzero for
1806    any variable in this function.  The return value is the phase number in
1807    which the variable should be allocated.  */
1808 
1809 static int
stack_protect_decl_phase(tree decl)1810 stack_protect_decl_phase (tree decl)
1811 {
1812   unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1813   int ret = 0;
1814 
1815   if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1816     has_short_buffer = true;
1817 
1818   if (flag_stack_protect == SPCT_FLAG_ALL
1819       || flag_stack_protect == SPCT_FLAG_STRONG
1820       || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1821 	  && lookup_attribute ("stack_protect",
1822 			       DECL_ATTRIBUTES (current_function_decl))))
1823     {
1824       if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1825 	  && !(bits & SPCT_HAS_AGGREGATE))
1826 	ret = 1;
1827       else if (bits & SPCT_HAS_ARRAY)
1828 	ret = 2;
1829     }
1830   else
1831     ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1832 
1833   if (ret)
1834     has_protected_decls = true;
1835 
1836   return ret;
1837 }
1838 
1839 /* Two helper routines that check for phase 1 and phase 2.  These are used
1840    as callbacks for expand_stack_vars.  */
1841 
1842 static bool
stack_protect_decl_phase_1(size_t i)1843 stack_protect_decl_phase_1 (size_t i)
1844 {
1845   return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1846 }
1847 
1848 static bool
stack_protect_decl_phase_2(size_t i)1849 stack_protect_decl_phase_2 (size_t i)
1850 {
1851   return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1852 }
1853 
1854 /* And helper function that checks for asan phase (with stack protector
1855    it is phase 3).  This is used as callback for expand_stack_vars.
1856    Returns true if any of the vars in the partition need to be protected.  */
1857 
1858 static bool
asan_decl_phase_3(size_t i)1859 asan_decl_phase_3 (size_t i)
1860 {
1861   while (i != EOC)
1862     {
1863       if (asan_protect_stack_decl (stack_vars[i].decl))
1864 	return true;
1865       i = stack_vars[i].next;
1866     }
1867   return false;
1868 }
1869 
1870 /* Ensure that variables in different stack protection phases conflict
1871    so that they are not merged and share the same stack slot.  */
1872 
1873 static void
add_stack_protection_conflicts(void)1874 add_stack_protection_conflicts (void)
1875 {
1876   size_t i, j, n = stack_vars_num;
1877   unsigned char *phase;
1878 
1879   phase = XNEWVEC (unsigned char, n);
1880   for (i = 0; i < n; ++i)
1881     phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1882 
1883   for (i = 0; i < n; ++i)
1884     {
1885       unsigned char ph_i = phase[i];
1886       for (j = i + 1; j < n; ++j)
1887 	if (ph_i != phase[j])
1888 	  add_stack_var_conflict (i, j);
1889     }
1890 
1891   XDELETEVEC (phase);
1892 }
1893 
1894 /* Create a decl for the guard at the top of the stack frame.  */
1895 
1896 static void
create_stack_guard(void)1897 create_stack_guard (void)
1898 {
1899   tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1900 			   VAR_DECL, NULL, ptr_type_node);
1901   TREE_THIS_VOLATILE (guard) = 1;
1902   TREE_USED (guard) = 1;
1903   expand_one_stack_var (guard);
1904   crtl->stack_protect_guard = guard;
1905 }
1906 
1907 /* Prepare for expanding variables.  */
1908 static void
init_vars_expansion(void)1909 init_vars_expansion (void)
1910 {
1911   /* Conflict bitmaps, and a few related temporary bitmaps, go here.  */
1912   bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1913 
1914   /* A map from decl to stack partition.  */
1915   decl_to_stack_part = new hash_map<tree, size_t>;
1916 
1917   /* Initialize local stack smashing state.  */
1918   has_protected_decls = false;
1919   has_short_buffer = false;
1920 }
1921 
1922 /* Free up stack variable graph data.  */
1923 static void
fini_vars_expansion(void)1924 fini_vars_expansion (void)
1925 {
1926   bitmap_obstack_release (&stack_var_bitmap_obstack);
1927   if (stack_vars)
1928     XDELETEVEC (stack_vars);
1929   if (stack_vars_sorted)
1930     XDELETEVEC (stack_vars_sorted);
1931   stack_vars = NULL;
1932   stack_vars_sorted = NULL;
1933   stack_vars_alloc = stack_vars_num = 0;
1934   delete decl_to_stack_part;
1935   decl_to_stack_part = NULL;
1936 }
1937 
1938 /* Make a fair guess for the size of the stack frame of the function
1939    in NODE.  This doesn't have to be exact, the result is only used in
1940    the inline heuristics.  So we don't want to run the full stack var
1941    packing algorithm (which is quadratic in the number of stack vars).
1942    Instead, we calculate the total size of all stack vars.  This turns
1943    out to be a pretty fair estimate -- packing of stack vars doesn't
1944    happen very often.  */
1945 
1946 HOST_WIDE_INT
estimated_stack_frame_size(struct cgraph_node * node)1947 estimated_stack_frame_size (struct cgraph_node *node)
1948 {
1949   poly_int64 size = 0;
1950   size_t i;
1951   tree var;
1952   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1953 
1954   push_cfun (fn);
1955 
1956   init_vars_expansion ();
1957 
1958   FOR_EACH_LOCAL_DECL (fn, i, var)
1959     if (auto_var_in_fn_p (var, fn->decl))
1960       size += expand_one_var (var, true, false);
1961 
1962   if (stack_vars_num > 0)
1963     {
1964       /* Fake sorting the stack vars for account_stack_vars ().  */
1965       stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1966       for (i = 0; i < stack_vars_num; ++i)
1967 	stack_vars_sorted[i] = i;
1968       size += account_stack_vars ();
1969     }
1970 
1971   fini_vars_expansion ();
1972   pop_cfun ();
1973   return estimated_poly_value (size);
1974 }
1975 
1976 /* Helper routine to check if a record or union contains an array field. */
1977 
1978 static int
record_or_union_type_has_array_p(const_tree tree_type)1979 record_or_union_type_has_array_p (const_tree tree_type)
1980 {
1981   tree fields = TYPE_FIELDS (tree_type);
1982   tree f;
1983 
1984   for (f = fields; f; f = DECL_CHAIN (f))
1985     if (TREE_CODE (f) == FIELD_DECL)
1986       {
1987 	tree field_type = TREE_TYPE (f);
1988 	if (RECORD_OR_UNION_TYPE_P (field_type)
1989 	    && record_or_union_type_has_array_p (field_type))
1990 	  return 1;
1991 	if (TREE_CODE (field_type) == ARRAY_TYPE)
1992 	  return 1;
1993       }
1994   return 0;
1995 }
1996 
1997 /* Check if the current function has local referenced variables that
1998    have their addresses taken, contain an array, or are arrays.  */
1999 
2000 static bool
stack_protect_decl_p()2001 stack_protect_decl_p ()
2002 {
2003   unsigned i;
2004   tree var;
2005 
2006   FOR_EACH_LOCAL_DECL (cfun, i, var)
2007     if (!is_global_var (var))
2008       {
2009 	tree var_type = TREE_TYPE (var);
2010 	if (VAR_P (var)
2011 	    && (TREE_CODE (var_type) == ARRAY_TYPE
2012 		|| TREE_ADDRESSABLE (var)
2013 		|| (RECORD_OR_UNION_TYPE_P (var_type)
2014 		    && record_or_union_type_has_array_p (var_type))))
2015 	  return true;
2016       }
2017   return false;
2018 }
2019 
2020 /* Check if the current function has calls that use a return slot.  */
2021 
2022 static bool
stack_protect_return_slot_p()2023 stack_protect_return_slot_p ()
2024 {
2025   basic_block bb;
2026 
2027   FOR_ALL_BB_FN (bb, cfun)
2028     for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2029 	 !gsi_end_p (gsi); gsi_next (&gsi))
2030       {
2031 	gimple *stmt = gsi_stmt (gsi);
2032 	/* This assumes that calls to internal-only functions never
2033 	   use a return slot.  */
2034 	if (is_gimple_call (stmt)
2035 	    && !gimple_call_internal_p (stmt)
2036 	    && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2037 				  gimple_call_fndecl (stmt)))
2038 	  return true;
2039       }
2040   return false;
2041 }
2042 
2043 /* Expand all variables used in the function.  */
2044 
2045 static rtx_insn *
expand_used_vars(void)2046 expand_used_vars (void)
2047 {
2048   tree var, outer_block = DECL_INITIAL (current_function_decl);
2049   auto_vec<tree> maybe_local_decls;
2050   rtx_insn *var_end_seq = NULL;
2051   unsigned i;
2052   unsigned len;
2053   bool gen_stack_protect_signal = false;
2054 
2055   /* Compute the phase of the stack frame for this function.  */
2056   {
2057     int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2058     int off = targetm.starting_frame_offset () % align;
2059     frame_phase = off ? align - off : 0;
2060   }
2061 
2062   /* Set TREE_USED on all variables in the local_decls.  */
2063   FOR_EACH_LOCAL_DECL (cfun, i, var)
2064     TREE_USED (var) = 1;
2065   /* Clear TREE_USED on all variables associated with a block scope.  */
2066   clear_tree_used (DECL_INITIAL (current_function_decl));
2067 
2068   init_vars_expansion ();
2069 
2070   if (targetm.use_pseudo_pic_reg ())
2071     pic_offset_table_rtx = gen_reg_rtx (Pmode);
2072 
2073   for (i = 0; i < SA.map->num_partitions; i++)
2074     {
2075       if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2076 	continue;
2077 
2078       tree var = partition_to_var (SA.map, i);
2079 
2080       gcc_assert (!virtual_operand_p (var));
2081 
2082       expand_one_ssa_partition (var);
2083     }
2084 
2085   if (flag_stack_protect == SPCT_FLAG_STRONG)
2086       gen_stack_protect_signal
2087 	= stack_protect_decl_p () || stack_protect_return_slot_p ();
2088 
2089   /* At this point all variables on the local_decls with TREE_USED
2090      set are not associated with any block scope.  Lay them out.  */
2091 
2092   len = vec_safe_length (cfun->local_decls);
2093   FOR_EACH_LOCAL_DECL (cfun, i, var)
2094     {
2095       bool expand_now = false;
2096 
2097       /* Expanded above already.  */
2098       if (is_gimple_reg (var))
2099 	{
2100 	  TREE_USED (var) = 0;
2101 	  goto next;
2102 	}
2103       /* We didn't set a block for static or extern because it's hard
2104 	 to tell the difference between a global variable (re)declared
2105 	 in a local scope, and one that's really declared there to
2106 	 begin with.  And it doesn't really matter much, since we're
2107 	 not giving them stack space.  Expand them now.  */
2108       else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2109 	expand_now = true;
2110 
2111       /* Expand variables not associated with any block now.  Those created by
2112 	 the optimizers could be live anywhere in the function.  Those that
2113 	 could possibly have been scoped originally and detached from their
2114 	 block will have their allocation deferred so we coalesce them with
2115 	 others when optimization is enabled.  */
2116       else if (TREE_USED (var))
2117 	expand_now = true;
2118 
2119       /* Finally, mark all variables on the list as used.  We'll use
2120 	 this in a moment when we expand those associated with scopes.  */
2121       TREE_USED (var) = 1;
2122 
2123       if (expand_now)
2124 	expand_one_var (var, true, true);
2125 
2126     next:
2127       if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2128 	{
2129 	  rtx rtl = DECL_RTL_IF_SET (var);
2130 
2131 	  /* Keep artificial non-ignored vars in cfun->local_decls
2132 	     chain until instantiate_decls.  */
2133 	  if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2134 	    add_local_decl (cfun, var);
2135 	  else if (rtl == NULL_RTX)
2136 	    /* If rtl isn't set yet, which can happen e.g. with
2137 	       -fstack-protector, retry before returning from this
2138 	       function.  */
2139 	    maybe_local_decls.safe_push (var);
2140 	}
2141     }
2142 
2143   /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2144 
2145      +-----------------+-----------------+
2146      | ...processed... | ...duplicates...|
2147      +-----------------+-----------------+
2148                        ^
2149 		       +-- LEN points here.
2150 
2151      We just want the duplicates, as those are the artificial
2152      non-ignored vars that we want to keep until instantiate_decls.
2153      Move them down and truncate the array.  */
2154   if (!vec_safe_is_empty (cfun->local_decls))
2155     cfun->local_decls->block_remove (0, len);
2156 
2157   /* At this point, all variables within the block tree with TREE_USED
2158      set are actually used by the optimized function.  Lay them out.  */
2159   expand_used_vars_for_block (outer_block, true);
2160 
2161   if (stack_vars_num > 0)
2162     {
2163       add_scope_conflicts ();
2164 
2165       /* If stack protection is enabled, we don't share space between
2166 	 vulnerable data and non-vulnerable data.  */
2167       if (flag_stack_protect != 0
2168 	  && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2169 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2170 		  && lookup_attribute ("stack_protect",
2171 				       DECL_ATTRIBUTES (current_function_decl)))))
2172 	add_stack_protection_conflicts ();
2173 
2174       /* Now that we have collected all stack variables, and have computed a
2175 	 minimal interference graph, attempt to save some stack space.  */
2176       partition_stack_vars ();
2177       if (dump_file)
2178 	dump_stack_var_partition ();
2179     }
2180 
2181   switch (flag_stack_protect)
2182     {
2183     case SPCT_FLAG_ALL:
2184       create_stack_guard ();
2185       break;
2186 
2187     case SPCT_FLAG_STRONG:
2188       if (gen_stack_protect_signal
2189 	  || cfun->calls_alloca || has_protected_decls
2190 	  || lookup_attribute ("stack_protect",
2191 			       DECL_ATTRIBUTES (current_function_decl)))
2192 	create_stack_guard ();
2193       break;
2194 
2195     case SPCT_FLAG_DEFAULT:
2196       if (cfun->calls_alloca || has_protected_decls
2197 	  || lookup_attribute ("stack_protect",
2198 			       DECL_ATTRIBUTES (current_function_decl)))
2199 	create_stack_guard ();
2200       break;
2201 
2202     case SPCT_FLAG_EXPLICIT:
2203       if (lookup_attribute ("stack_protect",
2204 			    DECL_ATTRIBUTES (current_function_decl)))
2205 	create_stack_guard ();
2206       break;
2207     default:
2208       ;
2209     }
2210 
2211   /* Assign rtl to each variable based on these partitions.  */
2212   if (stack_vars_num > 0)
2213     {
2214       struct stack_vars_data data;
2215 
2216       data.asan_base = NULL_RTX;
2217       data.asan_alignb = 0;
2218 
2219       /* Reorder decls to be protected by iterating over the variables
2220 	 array multiple times, and allocating out of each phase in turn.  */
2221       /* ??? We could probably integrate this into the qsort we did
2222 	 earlier, such that we naturally see these variables first,
2223 	 and thus naturally allocate things in the right order.  */
2224       if (has_protected_decls)
2225 	{
2226 	  /* Phase 1 contains only character arrays.  */
2227 	  expand_stack_vars (stack_protect_decl_phase_1, &data);
2228 
2229 	  /* Phase 2 contains other kinds of arrays.  */
2230 	  if (flag_stack_protect == SPCT_FLAG_ALL
2231 	      || flag_stack_protect == SPCT_FLAG_STRONG
2232 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2233 		  && lookup_attribute ("stack_protect",
2234 				       DECL_ATTRIBUTES (current_function_decl))))
2235 	    expand_stack_vars (stack_protect_decl_phase_2, &data);
2236 	}
2237 
2238       if (asan_sanitize_stack_p ())
2239 	/* Phase 3, any partitions that need asan protection
2240 	   in addition to phase 1 and 2.  */
2241 	expand_stack_vars (asan_decl_phase_3, &data);
2242 
2243       /* ASAN description strings don't yet have a syntax for expressing
2244 	 polynomial offsets.  */
2245       HOST_WIDE_INT prev_offset;
2246       if (!data.asan_vec.is_empty ()
2247 	  && frame_offset.is_constant (&prev_offset))
2248 	{
2249 	  HOST_WIDE_INT offset, sz, redzonesz;
2250 	  redzonesz = ASAN_RED_ZONE_SIZE;
2251 	  sz = data.asan_vec[0] - prev_offset;
2252 	  if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2253 	      && data.asan_alignb <= 4096
2254 	      && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2255 	    redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2256 			 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2257 	  /* Allocating a constant amount of space from a constant
2258 	     starting offset must give a constant result.  */
2259 	  offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2260 		    .to_constant ());
2261 	  data.asan_vec.safe_push (prev_offset);
2262 	  data.asan_vec.safe_push (offset);
2263 	  /* Leave space for alignment if STRICT_ALIGNMENT.  */
2264 	  if (STRICT_ALIGNMENT)
2265 	    alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2266 				      << ASAN_SHADOW_SHIFT)
2267 				     / BITS_PER_UNIT, 1);
2268 
2269 	  var_end_seq
2270 	    = asan_emit_stack_protection (virtual_stack_vars_rtx,
2271 					  data.asan_base,
2272 					  data.asan_alignb,
2273 					  data.asan_vec.address (),
2274 					  data.asan_decl_vec.address (),
2275 					  data.asan_vec.length ());
2276 	}
2277 
2278       expand_stack_vars (NULL, &data);
2279     }
2280 
2281   if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2282     var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2283 					      virtual_stack_vars_rtx,
2284 					      var_end_seq);
2285 
2286   fini_vars_expansion ();
2287 
2288   /* If there were any artificial non-ignored vars without rtl
2289      found earlier, see if deferred stack allocation hasn't assigned
2290      rtl to them.  */
2291   FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2292     {
2293       rtx rtl = DECL_RTL_IF_SET (var);
2294 
2295       /* Keep artificial non-ignored vars in cfun->local_decls
2296 	 chain until instantiate_decls.  */
2297       if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2298 	add_local_decl (cfun, var);
2299     }
2300 
2301   /* If the target requires that FRAME_OFFSET be aligned, do it.  */
2302   if (STACK_ALIGNMENT_NEEDED)
2303     {
2304       HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2305       if (FRAME_GROWS_DOWNWARD)
2306 	frame_offset = aligned_lower_bound (frame_offset, align);
2307       else
2308 	frame_offset = aligned_upper_bound (frame_offset, align);
2309     }
2310 
2311   return var_end_seq;
2312 }
2313 
2314 
2315 /* If we need to produce a detailed dump, print the tree representation
2316    for STMT to the dump file.  SINCE is the last RTX after which the RTL
2317    generated for STMT should have been appended.  */
2318 
2319 static void
maybe_dump_rtl_for_gimple_stmt(gimple * stmt,rtx_insn * since)2320 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2321 {
2322   if (dump_file && (dump_flags & TDF_DETAILS))
2323     {
2324       fprintf (dump_file, "\n;; ");
2325       print_gimple_stmt (dump_file, stmt, 0,
2326 			 TDF_SLIM | (dump_flags & TDF_LINENO));
2327       fprintf (dump_file, "\n");
2328 
2329       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2330     }
2331 }
2332 
2333 /* Maps the blocks that do not contain tree labels to rtx labels.  */
2334 
2335 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2336 
2337 /* Returns the label_rtx expression for a label starting basic block BB.  */
2338 
2339 static rtx_code_label *
label_rtx_for_bb(basic_block bb ATTRIBUTE_UNUSED)2340 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2341 {
2342   gimple_stmt_iterator gsi;
2343   tree lab;
2344 
2345   if (bb->flags & BB_RTL)
2346     return block_label (bb);
2347 
2348   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2349   if (elt)
2350     return *elt;
2351 
2352   /* Find the tree label if it is present.  */
2353 
2354   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2355     {
2356       glabel *lab_stmt;
2357 
2358       lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2359       if (!lab_stmt)
2360 	break;
2361 
2362       lab = gimple_label_label (lab_stmt);
2363       if (DECL_NONLOCAL (lab))
2364 	break;
2365 
2366       return jump_target_rtx (lab);
2367     }
2368 
2369   rtx_code_label *l = gen_label_rtx ();
2370   lab_rtx_for_bb->put (bb, l);
2371   return l;
2372 }
2373 
2374 
2375 /* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
2376    of a basic block where we just expanded the conditional at the end,
2377    possibly clean up the CFG and instruction sequence.  LAST is the
2378    last instruction before the just emitted jump sequence.  */
2379 
2380 static void
maybe_cleanup_end_of_block(edge e,rtx_insn * last)2381 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2382 {
2383   /* Special case: when jumpif decides that the condition is
2384      trivial it emits an unconditional jump (and the necessary
2385      barrier).  But we still have two edges, the fallthru one is
2386      wrong.  purge_dead_edges would clean this up later.  Unfortunately
2387      we have to insert insns (and split edges) before
2388      find_many_sub_basic_blocks and hence before purge_dead_edges.
2389      But splitting edges might create new blocks which depend on the
2390      fact that if there are two edges there's no barrier.  So the
2391      barrier would get lost and verify_flow_info would ICE.  Instead
2392      of auditing all edge splitters to care for the barrier (which
2393      normally isn't there in a cleaned CFG), fix it here.  */
2394   if (BARRIER_P (get_last_insn ()))
2395     {
2396       rtx_insn *insn;
2397       remove_edge (e);
2398       /* Now, we have a single successor block, if we have insns to
2399 	 insert on the remaining edge we potentially will insert
2400 	 it at the end of this block (if the dest block isn't feasible)
2401 	 in order to avoid splitting the edge.  This insertion will take
2402 	 place in front of the last jump.  But we might have emitted
2403 	 multiple jumps (conditional and one unconditional) to the
2404 	 same destination.  Inserting in front of the last one then
2405 	 is a problem.  See PR 40021.  We fix this by deleting all
2406 	 jumps except the last unconditional one.  */
2407       insn = PREV_INSN (get_last_insn ());
2408       /* Make sure we have an unconditional jump.  Otherwise we're
2409 	 confused.  */
2410       gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2411       for (insn = PREV_INSN (insn); insn != last;)
2412 	{
2413 	  insn = PREV_INSN (insn);
2414 	  if (JUMP_P (NEXT_INSN (insn)))
2415 	    {
2416 	      if (!any_condjump_p (NEXT_INSN (insn)))
2417 		{
2418 		  gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2419 		  delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2420 		}
2421 	      delete_insn (NEXT_INSN (insn));
2422 	    }
2423 	}
2424     }
2425 }
2426 
2427 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
2428    Returns a new basic block if we've terminated the current basic
2429    block and created a new one.  */
2430 
2431 static basic_block
expand_gimple_cond(basic_block bb,gcond * stmt)2432 expand_gimple_cond (basic_block bb, gcond *stmt)
2433 {
2434   basic_block new_bb, dest;
2435   edge true_edge;
2436   edge false_edge;
2437   rtx_insn *last2, *last;
2438   enum tree_code code;
2439   tree op0, op1;
2440 
2441   code = gimple_cond_code (stmt);
2442   op0 = gimple_cond_lhs (stmt);
2443   op1 = gimple_cond_rhs (stmt);
2444   /* We're sometimes presented with such code:
2445        D.123_1 = x < y;
2446        if (D.123_1 != 0)
2447          ...
2448      This would expand to two comparisons which then later might
2449      be cleaned up by combine.  But some pattern matchers like if-conversion
2450      work better when there's only one compare, so make up for this
2451      here as special exception if TER would have made the same change.  */
2452   if (SA.values
2453       && TREE_CODE (op0) == SSA_NAME
2454       && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2455       && TREE_CODE (op1) == INTEGER_CST
2456       && ((gimple_cond_code (stmt) == NE_EXPR
2457 	   && integer_zerop (op1))
2458 	  || (gimple_cond_code (stmt) == EQ_EXPR
2459 	      && integer_onep (op1)))
2460       && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2461     {
2462       gimple *second = SSA_NAME_DEF_STMT (op0);
2463       if (gimple_code (second) == GIMPLE_ASSIGN)
2464 	{
2465 	  enum tree_code code2 = gimple_assign_rhs_code (second);
2466 	  if (TREE_CODE_CLASS (code2) == tcc_comparison)
2467 	    {
2468 	      code = code2;
2469 	      op0 = gimple_assign_rhs1 (second);
2470 	      op1 = gimple_assign_rhs2 (second);
2471 	    }
2472 	  /* If jumps are cheap and the target does not support conditional
2473 	     compare, turn some more codes into jumpy sequences.  */
2474 	  else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2475 		   && targetm.gen_ccmp_first == NULL)
2476 	    {
2477 	      if ((code2 == BIT_AND_EXPR
2478 		   && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2479 		   && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2480 		  || code2 == TRUTH_AND_EXPR)
2481 		{
2482 		  code = TRUTH_ANDIF_EXPR;
2483 		  op0 = gimple_assign_rhs1 (second);
2484 		  op1 = gimple_assign_rhs2 (second);
2485 		}
2486 	      else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2487 		{
2488 		  code = TRUTH_ORIF_EXPR;
2489 		  op0 = gimple_assign_rhs1 (second);
2490 		  op1 = gimple_assign_rhs2 (second);
2491 		}
2492 	    }
2493 	}
2494     }
2495 
2496   last2 = last = get_last_insn ();
2497 
2498   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2499   set_curr_insn_location (gimple_location (stmt));
2500 
2501   /* These flags have no purpose in RTL land.  */
2502   true_edge->flags &= ~EDGE_TRUE_VALUE;
2503   false_edge->flags &= ~EDGE_FALSE_VALUE;
2504 
2505   /* We can either have a pure conditional jump with one fallthru edge or
2506      two-way jump that needs to be decomposed into two basic blocks.  */
2507   if (false_edge->dest == bb->next_bb)
2508     {
2509       jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2510 		true_edge->probability);
2511       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2512       if (true_edge->goto_locus != UNKNOWN_LOCATION)
2513 	set_curr_insn_location (true_edge->goto_locus);
2514       false_edge->flags |= EDGE_FALLTHRU;
2515       maybe_cleanup_end_of_block (false_edge, last);
2516       return NULL;
2517     }
2518   if (true_edge->dest == bb->next_bb)
2519     {
2520       jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2521 		   false_edge->probability);
2522       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2523       if (false_edge->goto_locus != UNKNOWN_LOCATION)
2524 	set_curr_insn_location (false_edge->goto_locus);
2525       true_edge->flags |= EDGE_FALLTHRU;
2526       maybe_cleanup_end_of_block (true_edge, last);
2527       return NULL;
2528     }
2529 
2530   jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2531 	    true_edge->probability);
2532   last = get_last_insn ();
2533   if (false_edge->goto_locus != UNKNOWN_LOCATION)
2534     set_curr_insn_location (false_edge->goto_locus);
2535   emit_jump (label_rtx_for_bb (false_edge->dest));
2536 
2537   BB_END (bb) = last;
2538   if (BARRIER_P (BB_END (bb)))
2539     BB_END (bb) = PREV_INSN (BB_END (bb));
2540   update_bb_for_insn (bb);
2541 
2542   new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2543   dest = false_edge->dest;
2544   redirect_edge_succ (false_edge, new_bb);
2545   false_edge->flags |= EDGE_FALLTHRU;
2546   new_bb->count = false_edge->count ();
2547   loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2548   add_bb_to_loop (new_bb, loop);
2549   if (loop->latch == bb
2550       && loop->header == dest)
2551     loop->latch = new_bb;
2552   make_single_succ_edge (new_bb, dest, 0);
2553   if (BARRIER_P (BB_END (new_bb)))
2554     BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2555   update_bb_for_insn (new_bb);
2556 
2557   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2558 
2559   if (true_edge->goto_locus != UNKNOWN_LOCATION)
2560     {
2561       set_curr_insn_location (true_edge->goto_locus);
2562       true_edge->goto_locus = curr_insn_location ();
2563     }
2564 
2565   return new_bb;
2566 }
2567 
2568 /* Mark all calls that can have a transaction restart.  */
2569 
2570 static void
mark_transaction_restart_calls(gimple * stmt)2571 mark_transaction_restart_calls (gimple *stmt)
2572 {
2573   struct tm_restart_node dummy;
2574   tm_restart_node **slot;
2575 
2576   if (!cfun->gimple_df->tm_restart)
2577     return;
2578 
2579   dummy.stmt = stmt;
2580   slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2581   if (slot)
2582     {
2583       struct tm_restart_node *n = *slot;
2584       tree list = n->label_or_list;
2585       rtx_insn *insn;
2586 
2587       for (insn = next_real_insn (get_last_insn ());
2588 	   !CALL_P (insn);
2589 	   insn = next_real_insn (insn))
2590 	continue;
2591 
2592       if (TREE_CODE (list) == LABEL_DECL)
2593 	add_reg_note (insn, REG_TM, label_rtx (list));
2594       else
2595 	for (; list ; list = TREE_CHAIN (list))
2596 	  add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2597     }
2598 }
2599 
2600 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2601    statement STMT.  */
2602 
2603 static void
expand_call_stmt(gcall * stmt)2604 expand_call_stmt (gcall *stmt)
2605 {
2606   tree exp, decl, lhs;
2607   bool builtin_p;
2608   size_t i;
2609 
2610   if (gimple_call_internal_p (stmt))
2611     {
2612       expand_internal_call (stmt);
2613       return;
2614     }
2615 
2616   /* If this is a call to a built-in function and it has no effect other
2617      than setting the lhs, try to implement it using an internal function
2618      instead.  */
2619   decl = gimple_call_fndecl (stmt);
2620   if (gimple_call_lhs (stmt)
2621       && !gimple_has_side_effects (stmt)
2622       && (optimize || (decl && called_as_built_in (decl))))
2623     {
2624       internal_fn ifn = replacement_internal_fn (stmt);
2625       if (ifn != IFN_LAST)
2626 	{
2627 	  expand_internal_call (ifn, stmt);
2628 	  return;
2629 	}
2630     }
2631 
2632   exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2633 
2634   CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2635   builtin_p = decl && DECL_BUILT_IN (decl);
2636 
2637   /* If this is not a builtin function, the function type through which the
2638      call is made may be different from the type of the function.  */
2639   if (!builtin_p)
2640     CALL_EXPR_FN (exp)
2641       = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2642 		      CALL_EXPR_FN (exp));
2643 
2644   TREE_TYPE (exp) = gimple_call_return_type (stmt);
2645   CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2646 
2647   for (i = 0; i < gimple_call_num_args (stmt); i++)
2648     {
2649       tree arg = gimple_call_arg (stmt, i);
2650       gimple *def;
2651       /* TER addresses into arguments of builtin functions so we have a
2652 	 chance to infer more correct alignment information.  See PR39954.  */
2653       if (builtin_p
2654 	  && TREE_CODE (arg) == SSA_NAME
2655 	  && (def = get_gimple_for_ssa_name (arg))
2656 	  && gimple_assign_rhs_code (def) == ADDR_EXPR)
2657 	arg = gimple_assign_rhs1 (def);
2658       CALL_EXPR_ARG (exp, i) = arg;
2659     }
2660 
2661   if (gimple_has_side_effects (stmt))
2662     TREE_SIDE_EFFECTS (exp) = 1;
2663 
2664   if (gimple_call_nothrow_p (stmt))
2665     TREE_NOTHROW (exp) = 1;
2666 
2667   if (gimple_no_warning_p (stmt))
2668     TREE_NO_WARNING (exp) = 1;
2669 
2670   CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2671   CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2672   CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2673   if (decl
2674       && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2675       && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2676     CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2677   else
2678     CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2679   CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2680   CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2681   SET_EXPR_LOCATION (exp, gimple_location (stmt));
2682   CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2683 
2684   /* Ensure RTL is created for debug args.  */
2685   if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2686     {
2687       vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2688       unsigned int ix;
2689       tree dtemp;
2690 
2691       if (debug_args)
2692 	for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2693 	  {
2694 	    gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2695 	    expand_debug_expr (dtemp);
2696 	  }
2697     }
2698 
2699   rtx_insn *before_call = get_last_insn ();
2700   lhs = gimple_call_lhs (stmt);
2701   if (lhs)
2702     expand_assignment (lhs, exp, false);
2703   else
2704     expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2705 
2706   /* If the gimple call is an indirect call and has 'nocf_check'
2707      attribute find a generated CALL insn to mark it as no
2708      control-flow verification is needed.  */
2709   if (gimple_call_nocf_check_p (stmt)
2710       && !gimple_call_fndecl (stmt))
2711     {
2712       rtx_insn *last = get_last_insn ();
2713       while (!CALL_P (last)
2714 	     && last != before_call)
2715 	last = PREV_INSN (last);
2716 
2717       if (last != before_call)
2718 	add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2719     }
2720 
2721   mark_transaction_restart_calls (stmt);
2722 }
2723 
2724 
2725 /* Generate RTL for an asm statement (explicit assembler code).
2726    STRING is a STRING_CST node containing the assembler code text,
2727    or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
2728    insn is volatile; don't optimize it.  */
2729 
2730 static void
expand_asm_loc(tree string,int vol,location_t locus)2731 expand_asm_loc (tree string, int vol, location_t locus)
2732 {
2733   rtx body;
2734 
2735   body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2736 				ggc_strdup (TREE_STRING_POINTER (string)),
2737 				locus);
2738 
2739   MEM_VOLATILE_P (body) = vol;
2740 
2741   /* Non-empty basic ASM implicitly clobbers memory.  */
2742   if (TREE_STRING_LENGTH (string) != 0)
2743     {
2744       rtx asm_op, clob;
2745       unsigned i, nclobbers;
2746       auto_vec<rtx> input_rvec, output_rvec;
2747       auto_vec<const char *> constraints;
2748       auto_vec<rtx> clobber_rvec;
2749       HARD_REG_SET clobbered_regs;
2750       CLEAR_HARD_REG_SET (clobbered_regs);
2751 
2752       clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2753       clobber_rvec.safe_push (clob);
2754 
2755       if (targetm.md_asm_adjust)
2756 	targetm.md_asm_adjust (output_rvec, input_rvec,
2757 			       constraints, clobber_rvec,
2758 			       clobbered_regs);
2759 
2760       asm_op = body;
2761       nclobbers = clobber_rvec.length ();
2762       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2763 
2764       XVECEXP (body, 0, 0) = asm_op;
2765       for (i = 0; i < nclobbers; i++)
2766 	XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2767     }
2768 
2769   emit_insn (body);
2770 }
2771 
2772 /* Return the number of times character C occurs in string S.  */
2773 static int
n_occurrences(int c,const char * s)2774 n_occurrences (int c, const char *s)
2775 {
2776   int n = 0;
2777   while (*s)
2778     n += (*s++ == c);
2779   return n;
2780 }
2781 
2782 /* A subroutine of expand_asm_operands.  Check that all operands have
2783    the same number of alternatives.  Return true if so.  */
2784 
2785 static bool
check_operand_nalternatives(const vec<const char * > & constraints)2786 check_operand_nalternatives (const vec<const char *> &constraints)
2787 {
2788   unsigned len = constraints.length();
2789   if (len > 0)
2790     {
2791       int nalternatives = n_occurrences (',', constraints[0]);
2792 
2793       if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2794 	{
2795 	  error ("too many alternatives in %<asm%>");
2796 	  return false;
2797 	}
2798 
2799       for (unsigned i = 1; i < len; ++i)
2800 	if (n_occurrences (',', constraints[i]) != nalternatives)
2801 	  {
2802 	    error ("operand constraints for %<asm%> differ "
2803 		   "in number of alternatives");
2804 	    return false;
2805 	  }
2806     }
2807   return true;
2808 }
2809 
2810 /* Check for overlap between registers marked in CLOBBERED_REGS and
2811    anything inappropriate in T.  Emit error and return the register
2812    variable definition for error, NULL_TREE for ok.  */
2813 
2814 static bool
tree_conflicts_with_clobbers_p(tree t,HARD_REG_SET * clobbered_regs)2815 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2816 {
2817   /* Conflicts between asm-declared register variables and the clobber
2818      list are not allowed.  */
2819   tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2820 
2821   if (overlap)
2822     {
2823       error ("asm-specifier for variable %qE conflicts with asm clobber list",
2824 	     DECL_NAME (overlap));
2825 
2826       /* Reset registerness to stop multiple errors emitted for a single
2827 	 variable.  */
2828       DECL_REGISTER (overlap) = 0;
2829       return true;
2830     }
2831 
2832   return false;
2833 }
2834 
2835 /* Generate RTL for an asm statement with arguments.
2836    STRING is the instruction template.
2837    OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2838    Each output or input has an expression in the TREE_VALUE and
2839    a tree list in TREE_PURPOSE which in turn contains a constraint
2840    name in TREE_VALUE (or NULL_TREE) and a constraint string
2841    in TREE_PURPOSE.
2842    CLOBBERS is a list of STRING_CST nodes each naming a hard register
2843    that is clobbered by this insn.
2844 
2845    LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2846    should be the fallthru basic block of the asm goto.
2847 
2848    Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2849    Some elements of OUTPUTS may be replaced with trees representing temporary
2850    values.  The caller should copy those temporary values to the originally
2851    specified lvalues.
2852 
2853    VOL nonzero means the insn is volatile; don't optimize it.  */
2854 
2855 static void
expand_asm_stmt(gasm * stmt)2856 expand_asm_stmt (gasm *stmt)
2857 {
2858   class save_input_location
2859   {
2860     location_t old;
2861 
2862   public:
2863     explicit save_input_location(location_t where)
2864     {
2865       old = input_location;
2866       input_location = where;
2867     }
2868 
2869     ~save_input_location()
2870     {
2871       input_location = old;
2872     }
2873   };
2874 
2875   location_t locus = gimple_location (stmt);
2876 
2877   if (gimple_asm_input_p (stmt))
2878     {
2879       const char *s = gimple_asm_string (stmt);
2880       tree string = build_string (strlen (s), s);
2881       expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2882       return;
2883     }
2884 
2885   /* There are some legacy diagnostics in here, and also avoids a
2886      sixth parameger to targetm.md_asm_adjust.  */
2887   save_input_location s_i_l(locus);
2888 
2889   unsigned noutputs = gimple_asm_noutputs (stmt);
2890   unsigned ninputs = gimple_asm_ninputs (stmt);
2891   unsigned nlabels = gimple_asm_nlabels (stmt);
2892   unsigned i;
2893 
2894   /* ??? Diagnose during gimplification?  */
2895   if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2896     {
2897       error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2898       return;
2899     }
2900 
2901   auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2902   auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2903   auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2904 
2905   /* Copy the gimple vectors into new vectors that we can manipulate.  */
2906 
2907   output_tvec.safe_grow (noutputs);
2908   input_tvec.safe_grow (ninputs);
2909   constraints.safe_grow (noutputs + ninputs);
2910 
2911   for (i = 0; i < noutputs; ++i)
2912     {
2913       tree t = gimple_asm_output_op (stmt, i);
2914       output_tvec[i] = TREE_VALUE (t);
2915       constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2916     }
2917   for (i = 0; i < ninputs; i++)
2918     {
2919       tree t = gimple_asm_input_op (stmt, i);
2920       input_tvec[i] = TREE_VALUE (t);
2921       constraints[i + noutputs]
2922 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2923     }
2924 
2925   /* ??? Diagnose during gimplification?  */
2926   if (! check_operand_nalternatives (constraints))
2927     return;
2928 
2929   /* Count the number of meaningful clobbered registers, ignoring what
2930      we would ignore later.  */
2931   auto_vec<rtx> clobber_rvec;
2932   HARD_REG_SET clobbered_regs;
2933   CLEAR_HARD_REG_SET (clobbered_regs);
2934 
2935   if (unsigned n = gimple_asm_nclobbers (stmt))
2936     {
2937       clobber_rvec.reserve (n);
2938       for (i = 0; i < n; i++)
2939 	{
2940 	  tree t = gimple_asm_clobber_op (stmt, i);
2941           const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2942 	  int nregs, j;
2943 
2944 	  j = decode_reg_name_and_count (regname, &nregs);
2945 	  if (j < 0)
2946 	    {
2947 	      if (j == -2)
2948 		{
2949 		  /* ??? Diagnose during gimplification?  */
2950 		  error ("unknown register name %qs in %<asm%>", regname);
2951 		}
2952 	      else if (j == -4)
2953 		{
2954 		  rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2955 		  clobber_rvec.safe_push (x);
2956 		}
2957 	      else
2958 		{
2959 		  /* Otherwise we should have -1 == empty string
2960 		     or -3 == cc, which is not a register.  */
2961 		  gcc_assert (j == -1 || j == -3);
2962 		}
2963 	    }
2964 	  else
2965 	    for (int reg = j; reg < j + nregs; reg++)
2966 	      {
2967 		/* Clobbering the PIC register is an error.  */
2968 		if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2969 		  {
2970 		    /* ??? Diagnose during gimplification?  */
2971 		    error ("PIC register clobbered by %qs in %<asm%>",
2972 			   regname);
2973 		    return;
2974 		  }
2975 
2976 	        SET_HARD_REG_BIT (clobbered_regs, reg);
2977 	        rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2978 		clobber_rvec.safe_push (x);
2979 	      }
2980 	}
2981     }
2982   unsigned nclobbers = clobber_rvec.length();
2983 
2984   /* First pass over inputs and outputs checks validity and sets
2985      mark_addressable if needed.  */
2986   /* ??? Diagnose during gimplification?  */
2987 
2988   for (i = 0; i < noutputs; ++i)
2989     {
2990       tree val = output_tvec[i];
2991       tree type = TREE_TYPE (val);
2992       const char *constraint;
2993       bool is_inout;
2994       bool allows_reg;
2995       bool allows_mem;
2996 
2997       /* Try to parse the output constraint.  If that fails, there's
2998 	 no point in going further.  */
2999       constraint = constraints[i];
3000       if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
3001 				    &allows_mem, &allows_reg, &is_inout))
3002 	return;
3003 
3004       if (! allows_reg
3005 	  && (allows_mem
3006 	      || is_inout
3007 	      || (DECL_P (val)
3008 		  && REG_P (DECL_RTL (val))
3009 		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
3010 	mark_addressable (val);
3011     }
3012 
3013   for (i = 0; i < ninputs; ++i)
3014     {
3015       bool allows_reg, allows_mem;
3016       const char *constraint;
3017 
3018       constraint = constraints[i + noutputs];
3019       if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3020 				    constraints.address (),
3021 				    &allows_mem, &allows_reg))
3022 	return;
3023 
3024       if (! allows_reg && allows_mem)
3025 	mark_addressable (input_tvec[i]);
3026     }
3027 
3028   /* Second pass evaluates arguments.  */
3029 
3030   /* Make sure stack is consistent for asm goto.  */
3031   if (nlabels > 0)
3032     do_pending_stack_adjust ();
3033   int old_generating_concat_p = generating_concat_p;
3034 
3035   /* Vector of RTX's of evaluated output operands.  */
3036   auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3037   auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3038   rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3039 
3040   output_rvec.safe_grow (noutputs);
3041 
3042   for (i = 0; i < noutputs; ++i)
3043     {
3044       tree val = output_tvec[i];
3045       tree type = TREE_TYPE (val);
3046       bool is_inout, allows_reg, allows_mem, ok;
3047       rtx op;
3048 
3049       ok = parse_output_constraint (&constraints[i], i, ninputs,
3050 				    noutputs, &allows_mem, &allows_reg,
3051 				    &is_inout);
3052       gcc_assert (ok);
3053 
3054       /* If an output operand is not a decl or indirect ref and our constraint
3055 	 allows a register, make a temporary to act as an intermediate.
3056 	 Make the asm insn write into that, then we will copy it to
3057 	 the real output operand.  Likewise for promoted variables.  */
3058 
3059       generating_concat_p = 0;
3060 
3061       if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
3062 	  || (DECL_P (val)
3063 	      && (allows_mem || REG_P (DECL_RTL (val)))
3064 	      && ! (REG_P (DECL_RTL (val))
3065 		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3066 	  || ! allows_reg
3067 	  || is_inout
3068 	  || TREE_ADDRESSABLE (type))
3069 	{
3070 	  op = expand_expr (val, NULL_RTX, VOIDmode,
3071 			    !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3072 	  if (MEM_P (op))
3073 	    op = validize_mem (op);
3074 
3075 	  if (! allows_reg && !MEM_P (op))
3076 	    error ("output number %d not directly addressable", i);
3077 	  if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
3078 	      || GET_CODE (op) == CONCAT)
3079 	    {
3080 	      rtx old_op = op;
3081 	      op = gen_reg_rtx (GET_MODE (op));
3082 
3083 	      generating_concat_p = old_generating_concat_p;
3084 
3085 	      if (is_inout)
3086 		emit_move_insn (op, old_op);
3087 
3088 	      push_to_sequence2 (after_rtl_seq, after_rtl_end);
3089 	      emit_move_insn (old_op, op);
3090 	      after_rtl_seq = get_insns ();
3091 	      after_rtl_end = get_last_insn ();
3092 	      end_sequence ();
3093 	    }
3094 	}
3095       else
3096 	{
3097 	  op = assign_temp (type, 0, 1);
3098 	  op = validize_mem (op);
3099 	  if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3100 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3101 
3102 	  generating_concat_p = old_generating_concat_p;
3103 
3104 	  push_to_sequence2 (after_rtl_seq, after_rtl_end);
3105 	  expand_assignment (val, make_tree (type, op), false);
3106 	  after_rtl_seq = get_insns ();
3107 	  after_rtl_end = get_last_insn ();
3108 	  end_sequence ();
3109 	}
3110       output_rvec[i] = op;
3111 
3112       if (is_inout)
3113 	inout_opnum.safe_push (i);
3114     }
3115 
3116   auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3117   auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3118 
3119   input_rvec.safe_grow (ninputs);
3120   input_mode.safe_grow (ninputs);
3121 
3122   generating_concat_p = 0;
3123 
3124   for (i = 0; i < ninputs; ++i)
3125     {
3126       tree val = input_tvec[i];
3127       tree type = TREE_TYPE (val);
3128       bool allows_reg, allows_mem, ok;
3129       const char *constraint;
3130       rtx op;
3131 
3132       constraint = constraints[i + noutputs];
3133       ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3134 				   constraints.address (),
3135 				   &allows_mem, &allows_reg);
3136       gcc_assert (ok);
3137 
3138       /* EXPAND_INITIALIZER will not generate code for valid initializer
3139 	 constants, but will still generate code for other types of operand.
3140 	 This is the behavior we want for constant constraints.  */
3141       op = expand_expr (val, NULL_RTX, VOIDmode,
3142 			allows_reg ? EXPAND_NORMAL
3143 			: allows_mem ? EXPAND_MEMORY
3144 			: EXPAND_INITIALIZER);
3145 
3146       /* Never pass a CONCAT to an ASM.  */
3147       if (GET_CODE (op) == CONCAT)
3148 	op = force_reg (GET_MODE (op), op);
3149       else if (MEM_P (op))
3150 	op = validize_mem (op);
3151 
3152       if (asm_operand_ok (op, constraint, NULL) <= 0)
3153 	{
3154 	  if (allows_reg && TYPE_MODE (type) != BLKmode)
3155 	    op = force_reg (TYPE_MODE (type), op);
3156 	  else if (!allows_mem)
3157 	    warning (0, "asm operand %d probably doesn%'t match constraints",
3158 		     i + noutputs);
3159 	  else if (MEM_P (op))
3160 	    {
3161 	      /* We won't recognize either volatile memory or memory
3162 		 with a queued address as available a memory_operand
3163 		 at this point.  Ignore it: clearly this *is* a memory.  */
3164 	    }
3165 	  else
3166 	    gcc_unreachable ();
3167 	}
3168       input_rvec[i] = op;
3169       input_mode[i] = TYPE_MODE (type);
3170     }
3171 
3172   /* For in-out operands, copy output rtx to input rtx.  */
3173   unsigned ninout = inout_opnum.length();
3174   for (i = 0; i < ninout; i++)
3175     {
3176       int j = inout_opnum[i];
3177       rtx o = output_rvec[j];
3178 
3179       input_rvec.safe_push (o);
3180       input_mode.safe_push (GET_MODE (o));
3181 
3182       char buffer[16];
3183       sprintf (buffer, "%d", j);
3184       constraints.safe_push (ggc_strdup (buffer));
3185     }
3186   ninputs += ninout;
3187 
3188   /* Sometimes we wish to automatically clobber registers across an asm.
3189      Case in point is when the i386 backend moved from cc0 to a hard reg --
3190      maintaining source-level compatibility means automatically clobbering
3191      the flags register.  */
3192   rtx_insn *after_md_seq = NULL;
3193   if (targetm.md_asm_adjust)
3194     after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3195 					  constraints, clobber_rvec,
3196 					  clobbered_regs);
3197 
3198   /* Do not allow the hook to change the output and input count,
3199      lest it mess up the operand numbering.  */
3200   gcc_assert (output_rvec.length() == noutputs);
3201   gcc_assert (input_rvec.length() == ninputs);
3202   gcc_assert (constraints.length() == noutputs + ninputs);
3203 
3204   /* But it certainly can adjust the clobbers.  */
3205   nclobbers = clobber_rvec.length();
3206 
3207   /* Third pass checks for easy conflicts.  */
3208   /* ??? Why are we doing this on trees instead of rtx.  */
3209 
3210   bool clobber_conflict_found = 0;
3211   for (i = 0; i < noutputs; ++i)
3212     if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3213 	clobber_conflict_found = 1;
3214   for (i = 0; i < ninputs - ninout; ++i)
3215     if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3216 	clobber_conflict_found = 1;
3217 
3218   /* Make vectors for the expression-rtx, constraint strings,
3219      and named operands.  */
3220 
3221   rtvec argvec = rtvec_alloc (ninputs);
3222   rtvec constraintvec = rtvec_alloc (ninputs);
3223   rtvec labelvec = rtvec_alloc (nlabels);
3224 
3225   rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3226 				    : GET_MODE (output_rvec[0])),
3227 				   ggc_strdup (gimple_asm_string (stmt)),
3228 				   "", 0, argvec, constraintvec,
3229 				   labelvec, locus);
3230   MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3231 
3232   for (i = 0; i < ninputs; ++i)
3233     {
3234       ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3235       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3236 	= gen_rtx_ASM_INPUT_loc (input_mode[i],
3237 				 constraints[i + noutputs],
3238 				 locus);
3239     }
3240 
3241   /* Copy labels to the vector.  */
3242   rtx_code_label *fallthru_label = NULL;
3243   if (nlabels > 0)
3244     {
3245       basic_block fallthru_bb = NULL;
3246       edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3247       if (fallthru)
3248 	fallthru_bb = fallthru->dest;
3249 
3250       for (i = 0; i < nlabels; ++i)
3251 	{
3252 	  tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3253 	  rtx_insn *r;
3254 	  /* If asm goto has any labels in the fallthru basic block, use
3255 	     a label that we emit immediately after the asm goto.  Expansion
3256 	     may insert further instructions into the same basic block after
3257 	     asm goto and if we don't do this, insertion of instructions on
3258 	     the fallthru edge might misbehave.  See PR58670.  */
3259 	  if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3260 	    {
3261 	      if (fallthru_label == NULL_RTX)
3262 	        fallthru_label = gen_label_rtx ();
3263 	      r = fallthru_label;
3264 	    }
3265 	  else
3266 	    r = label_rtx (label);
3267 	  ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3268 	}
3269     }
3270 
3271   /* Now, for each output, construct an rtx
3272      (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3273 			       ARGVEC CONSTRAINTS OPNAMES))
3274      If there is more than one, put them inside a PARALLEL.  */
3275 
3276   if (nlabels > 0 && nclobbers == 0)
3277     {
3278       gcc_assert (noutputs == 0);
3279       emit_jump_insn (body);
3280     }
3281   else if (noutputs == 0 && nclobbers == 0)
3282     {
3283       /* No output operands: put in a raw ASM_OPERANDS rtx.  */
3284       emit_insn (body);
3285     }
3286   else if (noutputs == 1 && nclobbers == 0)
3287     {
3288       ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3289       emit_insn (gen_rtx_SET (output_rvec[0], body));
3290     }
3291   else
3292     {
3293       rtx obody = body;
3294       int num = noutputs;
3295 
3296       if (num == 0)
3297 	num = 1;
3298 
3299       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3300 
3301       /* For each output operand, store a SET.  */
3302       for (i = 0; i < noutputs; ++i)
3303 	{
3304 	  rtx src, o = output_rvec[i];
3305 	  if (i == 0)
3306 	    {
3307 	      ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3308 	      src = obody;
3309 	    }
3310 	  else
3311 	    {
3312 	      src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3313 					  ASM_OPERANDS_TEMPLATE (obody),
3314 					  constraints[i], i, argvec,
3315 					  constraintvec, labelvec, locus);
3316 	      MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3317 	    }
3318 	  XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3319 	}
3320 
3321       /* If there are no outputs (but there are some clobbers)
3322 	 store the bare ASM_OPERANDS into the PARALLEL.  */
3323       if (i == 0)
3324 	XVECEXP (body, 0, i++) = obody;
3325 
3326       /* Store (clobber REG) for each clobbered register specified.  */
3327       for (unsigned j = 0; j < nclobbers; ++j)
3328 	{
3329 	  rtx clobbered_reg = clobber_rvec[j];
3330 
3331 	  /* Do sanity check for overlap between clobbers and respectively
3332 	     input and outputs that hasn't been handled.  Such overlap
3333 	     should have been detected and reported above.  */
3334 	  if (!clobber_conflict_found && REG_P (clobbered_reg))
3335 	    {
3336 	      /* We test the old body (obody) contents to avoid
3337 		 tripping over the under-construction body.  */
3338 	      for (unsigned k = 0; k < noutputs; ++k)
3339 		if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3340 		  internal_error ("asm clobber conflict with output operand");
3341 
3342 	      for (unsigned k = 0; k < ninputs - ninout; ++k)
3343 		if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3344 		  internal_error ("asm clobber conflict with input operand");
3345 	    }
3346 
3347 	  XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3348 	}
3349 
3350       if (nlabels > 0)
3351 	emit_jump_insn (body);
3352       else
3353 	emit_insn (body);
3354     }
3355 
3356   generating_concat_p = old_generating_concat_p;
3357 
3358   if (fallthru_label)
3359     emit_label (fallthru_label);
3360 
3361   if (after_md_seq)
3362     emit_insn (after_md_seq);
3363   if (after_rtl_seq)
3364     emit_insn (after_rtl_seq);
3365 
3366   free_temp_slots ();
3367   crtl->has_asm_statement = 1;
3368 }
3369 
3370 /* Emit code to jump to the address
3371    specified by the pointer expression EXP.  */
3372 
3373 static void
expand_computed_goto(tree exp)3374 expand_computed_goto (tree exp)
3375 {
3376   rtx x = expand_normal (exp);
3377 
3378   do_pending_stack_adjust ();
3379   emit_indirect_jump (x);
3380 }
3381 
3382 /* Generate RTL code for a `goto' statement with target label LABEL.
3383    LABEL should be a LABEL_DECL tree node that was or will later be
3384    defined with `expand_label'.  */
3385 
3386 static void
expand_goto(tree label)3387 expand_goto (tree label)
3388 {
3389   if (flag_checking)
3390     {
3391       /* Check for a nonlocal goto to a containing function.  Should have
3392 	 gotten translated to __builtin_nonlocal_goto.  */
3393       tree context = decl_function_context (label);
3394       gcc_assert (!context || context == current_function_decl);
3395     }
3396 
3397   emit_jump (jump_target_rtx (label));
3398 }
3399 
3400 /* Output a return with no value.  */
3401 
3402 static void
expand_null_return_1(void)3403 expand_null_return_1 (void)
3404 {
3405   clear_pending_stack_adjust ();
3406   do_pending_stack_adjust ();
3407   emit_jump (return_label);
3408 }
3409 
3410 /* Generate RTL to return from the current function, with no value.
3411    (That is, we do not do anything about returning any value.)  */
3412 
3413 void
expand_null_return(void)3414 expand_null_return (void)
3415 {
3416   /* If this function was declared to return a value, but we
3417      didn't, clobber the return registers so that they are not
3418      propagated live to the rest of the function.  */
3419   clobber_return_register ();
3420 
3421   expand_null_return_1 ();
3422 }
3423 
3424 /* Generate RTL to return from the current function, with value VAL.  */
3425 
3426 static void
expand_value_return(rtx val)3427 expand_value_return (rtx val)
3428 {
3429   /* Copy the value to the return location unless it's already there.  */
3430 
3431   tree decl = DECL_RESULT (current_function_decl);
3432   rtx return_reg = DECL_RTL (decl);
3433   if (return_reg != val)
3434     {
3435       tree funtype = TREE_TYPE (current_function_decl);
3436       tree type = TREE_TYPE (decl);
3437       int unsignedp = TYPE_UNSIGNED (type);
3438       machine_mode old_mode = DECL_MODE (decl);
3439       machine_mode mode;
3440       if (DECL_BY_REFERENCE (decl))
3441         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3442       else
3443         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3444 
3445       if (mode != old_mode)
3446 	val = convert_modes (mode, old_mode, val, unsignedp);
3447 
3448       if (GET_CODE (return_reg) == PARALLEL)
3449 	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3450       else
3451 	emit_move_insn (return_reg, val);
3452     }
3453 
3454   expand_null_return_1 ();
3455 }
3456 
3457 /* Generate RTL to evaluate the expression RETVAL and return it
3458    from the current function.  */
3459 
3460 static void
expand_return(tree retval,tree bounds)3461 expand_return (tree retval, tree bounds)
3462 {
3463   rtx result_rtl;
3464   rtx val = 0;
3465   tree retval_rhs;
3466   rtx bounds_rtl;
3467 
3468   /* If function wants no value, give it none.  */
3469   if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3470     {
3471       expand_normal (retval);
3472       expand_null_return ();
3473       return;
3474     }
3475 
3476   if (retval == error_mark_node)
3477     {
3478       /* Treat this like a return of no value from a function that
3479 	 returns a value.  */
3480       expand_null_return ();
3481       return;
3482     }
3483   else if ((TREE_CODE (retval) == MODIFY_EXPR
3484 	    || TREE_CODE (retval) == INIT_EXPR)
3485 	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3486     retval_rhs = TREE_OPERAND (retval, 1);
3487   else
3488     retval_rhs = retval;
3489 
3490   result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3491 
3492   /* Put returned bounds to the right place.  */
3493   bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3494   if (bounds_rtl)
3495     {
3496       rtx addr = NULL;
3497       rtx bnd = NULL;
3498 
3499       if (bounds && bounds != error_mark_node)
3500 	{
3501 	  bnd = expand_normal (bounds);
3502 	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3503 	}
3504       else if (REG_P (bounds_rtl))
3505 	{
3506 	  if (bounds)
3507 	    bnd = chkp_expand_zero_bounds ();
3508 	  else
3509 	    {
3510 	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3511 	      addr = gen_rtx_MEM (Pmode, addr);
3512 	      bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3513 	    }
3514 
3515 	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3516 	}
3517       else
3518 	{
3519 	  int n;
3520 
3521 	  gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3522 
3523 	  if (bounds)
3524 	    bnd = chkp_expand_zero_bounds ();
3525 	  else
3526 	    {
3527 	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3528 	      addr = gen_rtx_MEM (Pmode, addr);
3529 	    }
3530 
3531 	  for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3532 	    {
3533 	      rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3534 	      if (!bounds)
3535 		{
3536 		  rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3537 		  rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3538 		  bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3539 		}
3540 	      targetm.calls.store_returned_bounds (slot, bnd);
3541 	    }
3542 	}
3543     }
3544   else if (chkp_function_instrumented_p (current_function_decl)
3545 	   && !BOUNDED_P (retval_rhs)
3546 	   && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3547 	   && TREE_CODE (retval_rhs) != RESULT_DECL)
3548     {
3549       rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3550       addr = gen_rtx_MEM (Pmode, addr);
3551 
3552       gcc_assert (MEM_P (result_rtl));
3553 
3554       chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3555     }
3556 
3557   /* If we are returning the RESULT_DECL, then the value has already
3558      been stored into it, so we don't have to do anything special.  */
3559   if (TREE_CODE (retval_rhs) == RESULT_DECL)
3560     expand_value_return (result_rtl);
3561 
3562   /* If the result is an aggregate that is being returned in one (or more)
3563      registers, load the registers here.  */
3564 
3565   else if (retval_rhs != 0
3566 	   && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3567 	   && REG_P (result_rtl))
3568     {
3569       val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3570       if (val)
3571 	{
3572 	  /* Use the mode of the result value on the return register.  */
3573 	  PUT_MODE (result_rtl, GET_MODE (val));
3574 	  expand_value_return (val);
3575 	}
3576       else
3577 	expand_null_return ();
3578     }
3579   else if (retval_rhs != 0
3580 	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3581 	   && (REG_P (result_rtl)
3582 	       || (GET_CODE (result_rtl) == PARALLEL)))
3583     {
3584       /* Compute the return value into a temporary (usually a pseudo reg).  */
3585       val
3586 	= assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3587       val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3588       val = force_not_mem (val);
3589       expand_value_return (val);
3590     }
3591   else
3592     {
3593       /* No hard reg used; calculate value into hard return reg.  */
3594       expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3595       expand_value_return (result_rtl);
3596     }
3597 }
3598 
3599 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3600    STMT that doesn't require special handling for outgoing edges.  That
3601    is no tailcalls and no GIMPLE_COND.  */
3602 
3603 static void
expand_gimple_stmt_1(gimple * stmt)3604 expand_gimple_stmt_1 (gimple *stmt)
3605 {
3606   tree op0;
3607 
3608   set_curr_insn_location (gimple_location (stmt));
3609 
3610   switch (gimple_code (stmt))
3611     {
3612     case GIMPLE_GOTO:
3613       op0 = gimple_goto_dest (stmt);
3614       if (TREE_CODE (op0) == LABEL_DECL)
3615 	expand_goto (op0);
3616       else
3617 	expand_computed_goto (op0);
3618       break;
3619     case GIMPLE_LABEL:
3620       expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3621       break;
3622     case GIMPLE_NOP:
3623     case GIMPLE_PREDICT:
3624       break;
3625     case GIMPLE_SWITCH:
3626       {
3627 	gswitch *swtch = as_a <gswitch *> (stmt);
3628 	if (gimple_switch_num_labels (swtch) == 1)
3629 	  expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3630 	else
3631 	  expand_case (swtch);
3632       }
3633       break;
3634     case GIMPLE_ASM:
3635       expand_asm_stmt (as_a <gasm *> (stmt));
3636       break;
3637     case GIMPLE_CALL:
3638       expand_call_stmt (as_a <gcall *> (stmt));
3639       break;
3640 
3641     case GIMPLE_RETURN:
3642       {
3643 	tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3644 	op0 = gimple_return_retval (as_a <greturn *> (stmt));
3645 
3646 	if (op0 && op0 != error_mark_node)
3647 	  {
3648 	    tree result = DECL_RESULT (current_function_decl);
3649 
3650 	    /* Mark we have return statement with missing bounds.  */
3651 	    if (!bnd
3652 		&& chkp_function_instrumented_p (cfun->decl)
3653 		&& !DECL_P (op0))
3654 	      bnd = error_mark_node;
3655 
3656 	    /* If we are not returning the current function's RESULT_DECL,
3657 	       build an assignment to it.  */
3658 	    if (op0 != result)
3659 	      {
3660 		/* I believe that a function's RESULT_DECL is unique.  */
3661 		gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3662 
3663 		/* ??? We'd like to use simply expand_assignment here,
3664 		   but this fails if the value is of BLKmode but the return
3665 		   decl is a register.  expand_return has special handling
3666 		   for this combination, which eventually should move
3667 		   to common code.  See comments there.  Until then, let's
3668 		   build a modify expression :-/  */
3669 		op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3670 			      result, op0);
3671 	      }
3672 	  }
3673 
3674 	if (!op0)
3675 	  expand_null_return ();
3676 	else
3677 	  expand_return (op0, bnd);
3678       }
3679       break;
3680 
3681     case GIMPLE_ASSIGN:
3682       {
3683 	gassign *assign_stmt = as_a <gassign *> (stmt);
3684 	tree lhs = gimple_assign_lhs (assign_stmt);
3685 
3686 	/* Tree expand used to fiddle with |= and &= of two bitfield
3687 	   COMPONENT_REFs here.  This can't happen with gimple, the LHS
3688 	   of binary assigns must be a gimple reg.  */
3689 
3690 	if (TREE_CODE (lhs) != SSA_NAME
3691 	    || get_gimple_rhs_class (gimple_expr_code (stmt))
3692 	       == GIMPLE_SINGLE_RHS)
3693 	  {
3694 	    tree rhs = gimple_assign_rhs1 (assign_stmt);
3695 	    gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3696 			== GIMPLE_SINGLE_RHS);
3697 	    if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3698 		/* Do not put locations on possibly shared trees.  */
3699 		&& !is_gimple_min_invariant (rhs))
3700 	      SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3701 	    if (TREE_CLOBBER_P (rhs))
3702 	      /* This is a clobber to mark the going out of scope for
3703 		 this LHS.  */
3704 	      ;
3705 	    else
3706 	      expand_assignment (lhs, rhs,
3707 				 gimple_assign_nontemporal_move_p (
3708 				   assign_stmt));
3709 	  }
3710 	else
3711 	  {
3712 	    rtx target, temp;
3713 	    bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3714 	    struct separate_ops ops;
3715 	    bool promoted = false;
3716 
3717 	    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3718 	    if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3719 	      promoted = true;
3720 
3721 	    ops.code = gimple_assign_rhs_code (assign_stmt);
3722 	    ops.type = TREE_TYPE (lhs);
3723 	    switch (get_gimple_rhs_class (ops.code))
3724 	      {
3725 		case GIMPLE_TERNARY_RHS:
3726 		  ops.op2 = gimple_assign_rhs3 (assign_stmt);
3727 		  /* Fallthru */
3728 		case GIMPLE_BINARY_RHS:
3729 		  ops.op1 = gimple_assign_rhs2 (assign_stmt);
3730 		  /* Fallthru */
3731 		case GIMPLE_UNARY_RHS:
3732 		  ops.op0 = gimple_assign_rhs1 (assign_stmt);
3733 		  break;
3734 		default:
3735 		  gcc_unreachable ();
3736 	      }
3737 	    ops.location = gimple_location (stmt);
3738 
3739 	    /* If we want to use a nontemporal store, force the value to
3740 	       register first.  If we store into a promoted register,
3741 	       don't directly expand to target.  */
3742 	    temp = nontemporal || promoted ? NULL_RTX : target;
3743 	    temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3744 				       EXPAND_NORMAL);
3745 
3746 	    if (temp == target)
3747 	      ;
3748 	    else if (promoted)
3749 	      {
3750 		int unsignedp = SUBREG_PROMOTED_SIGN (target);
3751 		/* If TEMP is a VOIDmode constant, use convert_modes to make
3752 		   sure that we properly convert it.  */
3753 		if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3754 		  {
3755 		    temp = convert_modes (GET_MODE (target),
3756 					  TYPE_MODE (ops.type),
3757 					  temp, unsignedp);
3758 		    temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3759 					  GET_MODE (target), temp, unsignedp);
3760 		  }
3761 
3762 		convert_move (SUBREG_REG (target), temp, unsignedp);
3763 	      }
3764 	    else if (nontemporal && emit_storent_insn (target, temp))
3765 	      ;
3766 	    else
3767 	      {
3768 		temp = force_operand (temp, target);
3769 		if (temp != target)
3770 		  emit_move_insn (target, temp);
3771 	      }
3772 	  }
3773       }
3774       break;
3775 
3776     default:
3777       gcc_unreachable ();
3778     }
3779 }
3780 
3781 /* Expand one gimple statement STMT and return the last RTL instruction
3782    before any of the newly generated ones.
3783 
3784    In addition to generating the necessary RTL instructions this also
3785    sets REG_EH_REGION notes if necessary and sets the current source
3786    location for diagnostics.  */
3787 
3788 static rtx_insn *
expand_gimple_stmt(gimple * stmt)3789 expand_gimple_stmt (gimple *stmt)
3790 {
3791   location_t saved_location = input_location;
3792   rtx_insn *last = get_last_insn ();
3793   int lp_nr;
3794 
3795   gcc_assert (cfun);
3796 
3797   /* We need to save and restore the current source location so that errors
3798      discovered during expansion are emitted with the right location.  But
3799      it would be better if the diagnostic routines used the source location
3800      embedded in the tree nodes rather than globals.  */
3801   if (gimple_has_location (stmt))
3802     input_location = gimple_location (stmt);
3803 
3804   expand_gimple_stmt_1 (stmt);
3805 
3806   /* Free any temporaries used to evaluate this statement.  */
3807   free_temp_slots ();
3808 
3809   input_location = saved_location;
3810 
3811   /* Mark all insns that may trap.  */
3812   lp_nr = lookup_stmt_eh_lp (stmt);
3813   if (lp_nr)
3814     {
3815       rtx_insn *insn;
3816       for (insn = next_real_insn (last); insn;
3817 	   insn = next_real_insn (insn))
3818 	{
3819 	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3820 	      /* If we want exceptions for non-call insns, any
3821 		 may_trap_p instruction may throw.  */
3822 	      && GET_CODE (PATTERN (insn)) != CLOBBER
3823 	      && GET_CODE (PATTERN (insn)) != USE
3824 	      && insn_could_throw_p (insn))
3825 	    make_reg_eh_region_note (insn, 0, lp_nr);
3826 	}
3827     }
3828 
3829   return last;
3830 }
3831 
3832 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
3833    that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
3834    generated a tail call (something that might be denied by the ABI
3835    rules governing the call; see calls.c).
3836 
3837    Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3838    can still reach the rest of BB.  The case here is __builtin_sqrt,
3839    where the NaN result goes through the external function (with a
3840    tailcall) and the normal result happens via a sqrt instruction.  */
3841 
3842 static basic_block
expand_gimple_tailcall(basic_block bb,gcall * stmt,bool * can_fallthru)3843 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3844 {
3845   rtx_insn *last2, *last;
3846   edge e;
3847   edge_iterator ei;
3848   profile_probability probability;
3849 
3850   last2 = last = expand_gimple_stmt (stmt);
3851 
3852   for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3853     if (CALL_P (last) && SIBLING_CALL_P (last))
3854       goto found;
3855 
3856   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3857 
3858   *can_fallthru = true;
3859   return NULL;
3860 
3861  found:
3862   /* ??? Wouldn't it be better to just reset any pending stack adjust?
3863      Any instructions emitted here are about to be deleted.  */
3864   do_pending_stack_adjust ();
3865 
3866   /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
3867   /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
3868      EH or abnormal edges, we shouldn't have created a tail call in
3869      the first place.  So it seems to me we should just be removing
3870      all edges here, or redirecting the existing fallthru edge to
3871      the exit block.  */
3872 
3873   probability = profile_probability::never ();
3874 
3875   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3876     {
3877       if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3878 	{
3879 	  if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3880 	    e->dest->count -= e->count ();
3881 	  probability += e->probability;
3882 	  remove_edge (e);
3883 	}
3884       else
3885 	ei_next (&ei);
3886     }
3887 
3888   /* This is somewhat ugly: the call_expr expander often emits instructions
3889      after the sibcall (to perform the function return).  These confuse the
3890      find_many_sub_basic_blocks code, so we need to get rid of these.  */
3891   last = NEXT_INSN (last);
3892   gcc_assert (BARRIER_P (last));
3893 
3894   *can_fallthru = false;
3895   while (NEXT_INSN (last))
3896     {
3897       /* For instance an sqrt builtin expander expands if with
3898 	 sibcall in the then and label for `else`.  */
3899       if (LABEL_P (NEXT_INSN (last)))
3900 	{
3901 	  *can_fallthru = true;
3902 	  break;
3903 	}
3904       delete_insn (NEXT_INSN (last));
3905     }
3906 
3907   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3908 		 | EDGE_SIBCALL);
3909   e->probability = probability;
3910   BB_END (bb) = last;
3911   update_bb_for_insn (bb);
3912 
3913   if (NEXT_INSN (last))
3914     {
3915       bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3916 
3917       last = BB_END (bb);
3918       if (BARRIER_P (last))
3919 	BB_END (bb) = PREV_INSN (last);
3920     }
3921 
3922   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3923 
3924   return bb;
3925 }
3926 
3927 /* Return the difference between the floor and the truncated result of
3928    a signed division by OP1 with remainder MOD.  */
3929 static rtx
floor_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)3930 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3931 {
3932   /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3933   return gen_rtx_IF_THEN_ELSE
3934     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3935      gen_rtx_IF_THEN_ELSE
3936      (mode, gen_rtx_LT (BImode,
3937 			gen_rtx_DIV (mode, op1, mod),
3938 			const0_rtx),
3939       constm1_rtx, const0_rtx),
3940      const0_rtx);
3941 }
3942 
3943 /* Return the difference between the ceil and the truncated result of
3944    a signed division by OP1 with remainder MOD.  */
3945 static rtx
ceil_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)3946 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3947 {
3948   /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3949   return gen_rtx_IF_THEN_ELSE
3950     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3951      gen_rtx_IF_THEN_ELSE
3952      (mode, gen_rtx_GT (BImode,
3953 			gen_rtx_DIV (mode, op1, mod),
3954 			const0_rtx),
3955       const1_rtx, const0_rtx),
3956      const0_rtx);
3957 }
3958 
3959 /* Return the difference between the ceil and the truncated result of
3960    an unsigned division by OP1 with remainder MOD.  */
3961 static rtx
ceil_udiv_adjust(machine_mode mode,rtx mod,rtx op1 ATTRIBUTE_UNUSED)3962 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3963 {
3964   /* (mod != 0 ? 1 : 0) */
3965   return gen_rtx_IF_THEN_ELSE
3966     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3967      const1_rtx, const0_rtx);
3968 }
3969 
3970 /* Return the difference between the rounded and the truncated result
3971    of a signed division by OP1 with remainder MOD.  Halfway cases are
3972    rounded away from zero, rather than to the nearest even number.  */
3973 static rtx
round_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)3974 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3975 {
3976   /* (abs (mod) >= abs (op1) - abs (mod)
3977       ? (op1 / mod > 0 ? 1 : -1)
3978       : 0) */
3979   return gen_rtx_IF_THEN_ELSE
3980     (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3981 		       gen_rtx_MINUS (mode,
3982 				      gen_rtx_ABS (mode, op1),
3983 				      gen_rtx_ABS (mode, mod))),
3984      gen_rtx_IF_THEN_ELSE
3985      (mode, gen_rtx_GT (BImode,
3986 			gen_rtx_DIV (mode, op1, mod),
3987 			const0_rtx),
3988       const1_rtx, constm1_rtx),
3989      const0_rtx);
3990 }
3991 
3992 /* Return the difference between the rounded and the truncated result
3993    of a unsigned division by OP1 with remainder MOD.  Halfway cases
3994    are rounded away from zero, rather than to the nearest even
3995    number.  */
3996 static rtx
round_udiv_adjust(machine_mode mode,rtx mod,rtx op1)3997 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3998 {
3999   /* (mod >= op1 - mod ? 1 : 0) */
4000   return gen_rtx_IF_THEN_ELSE
4001     (mode, gen_rtx_GE (BImode, mod,
4002 		       gen_rtx_MINUS (mode, op1, mod)),
4003      const1_rtx, const0_rtx);
4004 }
4005 
4006 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4007    any rtl.  */
4008 
4009 static rtx
convert_debug_memory_address(scalar_int_mode mode,rtx x,addr_space_t as)4010 convert_debug_memory_address (scalar_int_mode mode, rtx x,
4011 			      addr_space_t as)
4012 {
4013 #ifndef POINTERS_EXTEND_UNSIGNED
4014   gcc_assert (mode == Pmode
4015 	      || mode == targetm.addr_space.address_mode (as));
4016   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
4017 #else
4018   rtx temp;
4019 
4020   gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
4021 
4022   if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4023     return x;
4024 
4025   /* X must have some form of address mode already.  */
4026   scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4027   if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4028     x = lowpart_subreg (mode, x, xmode);
4029   else if (POINTERS_EXTEND_UNSIGNED > 0)
4030     x = gen_rtx_ZERO_EXTEND (mode, x);
4031   else if (!POINTERS_EXTEND_UNSIGNED)
4032     x = gen_rtx_SIGN_EXTEND (mode, x);
4033   else
4034     {
4035       switch (GET_CODE (x))
4036 	{
4037 	case SUBREG:
4038 	  if ((SUBREG_PROMOTED_VAR_P (x)
4039 	       || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4040 	       || (GET_CODE (SUBREG_REG (x)) == PLUS
4041 		   && REG_P (XEXP (SUBREG_REG (x), 0))
4042 		   && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4043 		   && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4044 	      && GET_MODE (SUBREG_REG (x)) == mode)
4045 	    return SUBREG_REG (x);
4046 	  break;
4047 	case LABEL_REF:
4048 	  temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4049 	  LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4050 	  return temp;
4051 	case SYMBOL_REF:
4052 	  temp = shallow_copy_rtx (x);
4053 	  PUT_MODE (temp, mode);
4054 	  return temp;
4055 	case CONST:
4056 	  temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4057 	  if (temp)
4058 	    temp = gen_rtx_CONST (mode, temp);
4059 	  return temp;
4060 	case PLUS:
4061 	case MINUS:
4062 	  if (CONST_INT_P (XEXP (x, 1)))
4063 	    {
4064 	      temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4065 	      if (temp)
4066 		return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4067 	    }
4068 	  break;
4069 	default:
4070 	  break;
4071 	}
4072       /* Don't know how to express ptr_extend as operation in debug info.  */
4073       return NULL;
4074     }
4075 #endif /* POINTERS_EXTEND_UNSIGNED */
4076 
4077   return x;
4078 }
4079 
4080 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4081    by avoid_deep_ter_for_debug.  */
4082 
4083 static hash_map<tree, tree> *deep_ter_debug_map;
4084 
4085 /* Split too deep TER chains for debug stmts using debug temporaries.  */
4086 
4087 static void
avoid_deep_ter_for_debug(gimple * stmt,int depth)4088 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4089 {
4090   use_operand_p use_p;
4091   ssa_op_iter iter;
4092   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4093     {
4094       tree use = USE_FROM_PTR (use_p);
4095       if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4096 	continue;
4097       gimple *g = get_gimple_for_ssa_name (use);
4098       if (g == NULL)
4099 	continue;
4100       if (depth > 6 && !stmt_ends_bb_p (g))
4101 	{
4102 	  if (deep_ter_debug_map == NULL)
4103 	    deep_ter_debug_map = new hash_map<tree, tree>;
4104 
4105 	  tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4106 	  if (vexpr != NULL)
4107 	    continue;
4108 	  vexpr = make_node (DEBUG_EXPR_DECL);
4109 	  gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4110 	  DECL_ARTIFICIAL (vexpr) = 1;
4111 	  TREE_TYPE (vexpr) = TREE_TYPE (use);
4112 	  SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4113 	  gimple_stmt_iterator gsi = gsi_for_stmt (g);
4114 	  gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4115 	  avoid_deep_ter_for_debug (def_temp, 0);
4116 	}
4117       else
4118 	avoid_deep_ter_for_debug (g, depth + 1);
4119     }
4120 }
4121 
4122 /* Return an RTX equivalent to the value of the parameter DECL.  */
4123 
4124 static rtx
expand_debug_parm_decl(tree decl)4125 expand_debug_parm_decl (tree decl)
4126 {
4127   rtx incoming = DECL_INCOMING_RTL (decl);
4128 
4129   if (incoming
4130       && GET_MODE (incoming) != BLKmode
4131       && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4132 	  || (MEM_P (incoming)
4133 	      && REG_P (XEXP (incoming, 0))
4134 	      && HARD_REGISTER_P (XEXP (incoming, 0)))))
4135     {
4136       rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4137 
4138 #ifdef HAVE_window_save
4139       /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4140 	 If the target machine has an explicit window save instruction, the
4141 	 actual entry value is the corresponding OUTGOING_REGNO instead.  */
4142       if (REG_P (incoming)
4143 	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4144 	incoming
4145 	  = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4146 				OUTGOING_REGNO (REGNO (incoming)), 0);
4147       else if (MEM_P (incoming))
4148 	{
4149 	  rtx reg = XEXP (incoming, 0);
4150 	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4151 	    {
4152 	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4153 	      incoming = replace_equiv_address_nv (incoming, reg);
4154 	    }
4155 	  else
4156 	    incoming = copy_rtx (incoming);
4157 	}
4158 #endif
4159 
4160       ENTRY_VALUE_EXP (rtl) = incoming;
4161       return rtl;
4162     }
4163 
4164   if (incoming
4165       && GET_MODE (incoming) != BLKmode
4166       && !TREE_ADDRESSABLE (decl)
4167       && MEM_P (incoming)
4168       && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4169 	  || (GET_CODE (XEXP (incoming, 0)) == PLUS
4170 	      && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4171 	      && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4172     return copy_rtx (incoming);
4173 
4174   return NULL_RTX;
4175 }
4176 
4177 /* Return an RTX equivalent to the value of the tree expression EXP.  */
4178 
4179 static rtx
expand_debug_expr(tree exp)4180 expand_debug_expr (tree exp)
4181 {
4182   rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4183   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4184   machine_mode inner_mode = VOIDmode;
4185   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4186   addr_space_t as;
4187   scalar_int_mode op0_mode, op1_mode, addr_mode;
4188 
4189   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4190     {
4191     case tcc_expression:
4192       switch (TREE_CODE (exp))
4193 	{
4194 	case COND_EXPR:
4195 	case DOT_PROD_EXPR:
4196 	case SAD_EXPR:
4197 	case WIDEN_MULT_PLUS_EXPR:
4198 	case WIDEN_MULT_MINUS_EXPR:
4199 	case FMA_EXPR:
4200 	  goto ternary;
4201 
4202 	case TRUTH_ANDIF_EXPR:
4203 	case TRUTH_ORIF_EXPR:
4204 	case TRUTH_AND_EXPR:
4205 	case TRUTH_OR_EXPR:
4206 	case TRUTH_XOR_EXPR:
4207 	  goto binary;
4208 
4209 	case TRUTH_NOT_EXPR:
4210 	  goto unary;
4211 
4212 	default:
4213 	  break;
4214 	}
4215       break;
4216 
4217     ternary:
4218       op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4219       if (!op2)
4220 	return NULL_RTX;
4221       /* Fall through.  */
4222 
4223     binary:
4224     case tcc_binary:
4225       if (mode == BLKmode)
4226 	return NULL_RTX;
4227       op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4228       if (!op1)
4229 	return NULL_RTX;
4230       switch (TREE_CODE (exp))
4231 	{
4232 	case LSHIFT_EXPR:
4233 	case RSHIFT_EXPR:
4234 	case LROTATE_EXPR:
4235 	case RROTATE_EXPR:
4236 	case WIDEN_LSHIFT_EXPR:
4237 	  /* Ensure second operand isn't wider than the first one.  */
4238 	  inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4239 	  if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4240 	      && (GET_MODE_UNIT_PRECISION (mode)
4241 		  < GET_MODE_PRECISION (op1_mode)))
4242 	    op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4243 	  break;
4244 	default:
4245 	  break;
4246 	}
4247       /* Fall through.  */
4248 
4249     unary:
4250     case tcc_unary:
4251       if (mode == BLKmode)
4252 	return NULL_RTX;
4253       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4254       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4255       if (!op0)
4256 	return NULL_RTX;
4257       break;
4258 
4259     case tcc_comparison:
4260       unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4261       goto binary;
4262 
4263     case tcc_type:
4264     case tcc_statement:
4265       gcc_unreachable ();
4266 
4267     case tcc_constant:
4268     case tcc_exceptional:
4269     case tcc_declaration:
4270     case tcc_reference:
4271     case tcc_vl_exp:
4272       break;
4273     }
4274 
4275   switch (TREE_CODE (exp))
4276     {
4277     case STRING_CST:
4278       if (!lookup_constant_def (exp))
4279 	{
4280 	  if (strlen (TREE_STRING_POINTER (exp)) + 1
4281 	      != (size_t) TREE_STRING_LENGTH (exp))
4282 	    return NULL_RTX;
4283 	  op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4284 	  op0 = gen_rtx_MEM (BLKmode, op0);
4285 	  set_mem_attributes (op0, exp, 0);
4286 	  return op0;
4287 	}
4288       /* Fall through.  */
4289 
4290     case INTEGER_CST:
4291     case REAL_CST:
4292     case FIXED_CST:
4293       op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4294       return op0;
4295 
4296     case POLY_INT_CST:
4297       return immed_wide_int_const (poly_int_cst_value (exp), mode);
4298 
4299     case COMPLEX_CST:
4300       gcc_assert (COMPLEX_MODE_P (mode));
4301       op0 = expand_debug_expr (TREE_REALPART (exp));
4302       op1 = expand_debug_expr (TREE_IMAGPART (exp));
4303       return gen_rtx_CONCAT (mode, op0, op1);
4304 
4305     case DEBUG_EXPR_DECL:
4306       op0 = DECL_RTL_IF_SET (exp);
4307 
4308       if (op0)
4309 	return op0;
4310 
4311       op0 = gen_rtx_DEBUG_EXPR (mode);
4312       DEBUG_EXPR_TREE_DECL (op0) = exp;
4313       SET_DECL_RTL (exp, op0);
4314 
4315       return op0;
4316 
4317     case VAR_DECL:
4318     case PARM_DECL:
4319     case FUNCTION_DECL:
4320     case LABEL_DECL:
4321     case CONST_DECL:
4322     case RESULT_DECL:
4323       op0 = DECL_RTL_IF_SET (exp);
4324 
4325       /* This decl was probably optimized away.  */
4326       if (!op0)
4327 	{
4328 	  if (!VAR_P (exp)
4329 	      || DECL_EXTERNAL (exp)
4330 	      || !TREE_STATIC (exp)
4331 	      || !DECL_NAME (exp)
4332 	      || DECL_HARD_REGISTER (exp)
4333 	      || DECL_IN_CONSTANT_POOL (exp)
4334 	      || mode == VOIDmode)
4335 	    return NULL;
4336 
4337 	  op0 = make_decl_rtl_for_debug (exp);
4338 	  if (!MEM_P (op0)
4339 	      || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4340 	      || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4341 	    return NULL;
4342 	}
4343       else
4344 	op0 = copy_rtx (op0);
4345 
4346       if (GET_MODE (op0) == BLKmode
4347 	  /* If op0 is not BLKmode, but mode is, adjust_mode
4348 	     below would ICE.  While it is likely a FE bug,
4349 	     try to be robust here.  See PR43166.  */
4350 	  || mode == BLKmode
4351 	  || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4352 	{
4353 	  gcc_assert (MEM_P (op0));
4354 	  op0 = adjust_address_nv (op0, mode, 0);
4355 	  return op0;
4356 	}
4357 
4358       /* Fall through.  */
4359 
4360     adjust_mode:
4361     case PAREN_EXPR:
4362     CASE_CONVERT:
4363       {
4364 	inner_mode = GET_MODE (op0);
4365 
4366 	if (mode == inner_mode)
4367 	  return op0;
4368 
4369 	if (inner_mode == VOIDmode)
4370 	  {
4371 	    if (TREE_CODE (exp) == SSA_NAME)
4372 	      inner_mode = TYPE_MODE (TREE_TYPE (exp));
4373 	    else
4374 	      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4375 	    if (mode == inner_mode)
4376 	      return op0;
4377 	  }
4378 
4379 	if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4380 	  {
4381 	    if (GET_MODE_UNIT_BITSIZE (mode)
4382 		== GET_MODE_UNIT_BITSIZE (inner_mode))
4383 	      op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4384 	    else if (GET_MODE_UNIT_BITSIZE (mode)
4385 		     < GET_MODE_UNIT_BITSIZE (inner_mode))
4386 	      op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4387 	    else
4388 	      op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4389 	  }
4390 	else if (FLOAT_MODE_P (mode))
4391 	  {
4392 	    gcc_assert (TREE_CODE (exp) != SSA_NAME);
4393 	    if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4394 	      op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4395 	    else
4396 	      op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4397 	  }
4398 	else if (FLOAT_MODE_P (inner_mode))
4399 	  {
4400 	    if (unsignedp)
4401 	      op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4402 	    else
4403 	      op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4404 	  }
4405 	else if (GET_MODE_UNIT_PRECISION (mode)
4406 		 == GET_MODE_UNIT_PRECISION (inner_mode))
4407 	  op0 = lowpart_subreg (mode, op0, inner_mode);
4408 	else if (GET_MODE_UNIT_PRECISION (mode)
4409 		 < GET_MODE_UNIT_PRECISION (inner_mode))
4410 	  op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4411 	else if (UNARY_CLASS_P (exp)
4412 		 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4413 		 : unsignedp)
4414 	  op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4415 	else
4416 	  op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4417 
4418 	return op0;
4419       }
4420 
4421     case MEM_REF:
4422       if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4423 	{
4424 	  tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4425 				     TREE_OPERAND (exp, 0),
4426 				     TREE_OPERAND (exp, 1));
4427 	  if (newexp)
4428 	    return expand_debug_expr (newexp);
4429 	}
4430       /* FALLTHROUGH */
4431     case INDIRECT_REF:
4432       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4433       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4434       if (!op0)
4435 	return NULL;
4436 
4437       if (TREE_CODE (exp) == MEM_REF)
4438 	{
4439 	  if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4440 	      || (GET_CODE (op0) == PLUS
4441 		  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4442 	    /* (mem (debug_implicit_ptr)) might confuse aliasing.
4443 	       Instead just use get_inner_reference.  */
4444 	    goto component_ref;
4445 
4446 	  op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4447 	  if (!op1 || !CONST_INT_P (op1))
4448 	    return NULL;
4449 
4450 	  op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4451 	}
4452 
4453       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4454 
4455       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4456 					  op0, as);
4457       if (op0 == NULL_RTX)
4458 	return NULL;
4459 
4460       op0 = gen_rtx_MEM (mode, op0);
4461       set_mem_attributes (op0, exp, 0);
4462       if (TREE_CODE (exp) == MEM_REF
4463 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4464 	set_mem_expr (op0, NULL_TREE);
4465       set_mem_addr_space (op0, as);
4466 
4467       return op0;
4468 
4469     case TARGET_MEM_REF:
4470       if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4471 	  && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4472 	return NULL;
4473 
4474       op0 = expand_debug_expr
4475 	    (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4476       if (!op0)
4477 	return NULL;
4478 
4479       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4480       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4481 					  op0, as);
4482       if (op0 == NULL_RTX)
4483 	return NULL;
4484 
4485       op0 = gen_rtx_MEM (mode, op0);
4486 
4487       set_mem_attributes (op0, exp, 0);
4488       set_mem_addr_space (op0, as);
4489 
4490       return op0;
4491 
4492     component_ref:
4493     case ARRAY_REF:
4494     case ARRAY_RANGE_REF:
4495     case COMPONENT_REF:
4496     case BIT_FIELD_REF:
4497     case REALPART_EXPR:
4498     case IMAGPART_EXPR:
4499     case VIEW_CONVERT_EXPR:
4500       {
4501 	machine_mode mode1;
4502 	poly_int64 bitsize, bitpos;
4503 	tree offset;
4504 	int reversep, volatilep = 0;
4505 	tree tem
4506 	  = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4507 				 &unsignedp, &reversep, &volatilep);
4508 	rtx orig_op0;
4509 
4510 	if (known_eq (bitsize, 0))
4511 	  return NULL;
4512 
4513 	orig_op0 = op0 = expand_debug_expr (tem);
4514 
4515 	if (!op0)
4516 	  return NULL;
4517 
4518 	if (offset)
4519 	  {
4520 	    machine_mode addrmode, offmode;
4521 
4522 	    if (!MEM_P (op0))
4523 	      return NULL;
4524 
4525 	    op0 = XEXP (op0, 0);
4526 	    addrmode = GET_MODE (op0);
4527 	    if (addrmode == VOIDmode)
4528 	      addrmode = Pmode;
4529 
4530 	    op1 = expand_debug_expr (offset);
4531 	    if (!op1)
4532 	      return NULL;
4533 
4534 	    offmode = GET_MODE (op1);
4535 	    if (offmode == VOIDmode)
4536 	      offmode = TYPE_MODE (TREE_TYPE (offset));
4537 
4538 	    if (addrmode != offmode)
4539 	      op1 = lowpart_subreg (addrmode, op1, offmode);
4540 
4541 	    /* Don't use offset_address here, we don't need a
4542 	       recognizable address, and we don't want to generate
4543 	       code.  */
4544 	    op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4545 							  op0, op1));
4546 	  }
4547 
4548 	if (MEM_P (op0))
4549 	  {
4550 	    if (mode1 == VOIDmode)
4551 	      {
4552 		if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4553 		  return NULL;
4554 		/* Bitfield.  */
4555 		mode1 = smallest_int_mode_for_size (bitsize);
4556 	      }
4557 	    poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4558 	    if (maybe_ne (bytepos, 0))
4559 	      {
4560 		op0 = adjust_address_nv (op0, mode1, bytepos);
4561 		bitpos = num_trailing_bits (bitpos);
4562 	      }
4563 	    else if (known_eq (bitpos, 0)
4564 		     && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4565 	      op0 = adjust_address_nv (op0, mode, 0);
4566 	    else if (GET_MODE (op0) != mode1)
4567 	      op0 = adjust_address_nv (op0, mode1, 0);
4568 	    else
4569 	      op0 = copy_rtx (op0);
4570 	    if (op0 == orig_op0)
4571 	      op0 = shallow_copy_rtx (op0);
4572 	    set_mem_attributes (op0, exp, 0);
4573 	  }
4574 
4575 	if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4576 	  return op0;
4577 
4578 	if (maybe_lt (bitpos, 0))
4579           return NULL;
4580 
4581 	if (GET_MODE (op0) == BLKmode || mode == BLKmode)
4582 	  return NULL;
4583 
4584 	poly_int64 bytepos;
4585 	if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4586 	    && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4587 	  {
4588 	    machine_mode opmode = GET_MODE (op0);
4589 
4590 	    if (opmode == VOIDmode)
4591 	      opmode = TYPE_MODE (TREE_TYPE (tem));
4592 
4593 	    /* This condition may hold if we're expanding the address
4594 	       right past the end of an array that turned out not to
4595 	       be addressable (i.e., the address was only computed in
4596 	       debug stmts).  The gen_subreg below would rightfully
4597 	       crash, and the address doesn't really exist, so just
4598 	       drop it.  */
4599 	    if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4600 	      return NULL;
4601 
4602 	    if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4603 	      return simplify_gen_subreg (mode, op0, opmode, bytepos);
4604 	  }
4605 
4606 	return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4607 				     && TYPE_UNSIGNED (TREE_TYPE (exp))
4608 				     ? SIGN_EXTRACT
4609 				     : ZERO_EXTRACT, mode,
4610 				     GET_MODE (op0) != VOIDmode
4611 				     ? GET_MODE (op0)
4612 				     : TYPE_MODE (TREE_TYPE (tem)),
4613 				     op0, gen_int_mode (bitsize, word_mode),
4614 				     gen_int_mode (bitpos, word_mode));
4615       }
4616 
4617     case ABS_EXPR:
4618       return simplify_gen_unary (ABS, mode, op0, mode);
4619 
4620     case NEGATE_EXPR:
4621       return simplify_gen_unary (NEG, mode, op0, mode);
4622 
4623     case BIT_NOT_EXPR:
4624       return simplify_gen_unary (NOT, mode, op0, mode);
4625 
4626     case FLOAT_EXPR:
4627       return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4628 									 0)))
4629 				 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4630 				 inner_mode);
4631 
4632     case FIX_TRUNC_EXPR:
4633       return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4634 				 inner_mode);
4635 
4636     case POINTER_PLUS_EXPR:
4637       /* For the rare target where pointers are not the same size as
4638 	 size_t, we need to check for mis-matched modes and correct
4639 	 the addend.  */
4640       if (op0 && op1
4641 	  && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4642 	  && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4643 	  && op0_mode != op1_mode)
4644 	{
4645 	  if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4646 	      /* If OP0 is a partial mode, then we must truncate, even
4647 		 if it has the same bitsize as OP1 as GCC's
4648 		 representation of partial modes is opaque.  */
4649 	      || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4650 		  && (GET_MODE_BITSIZE (op0_mode)
4651 		      == GET_MODE_BITSIZE (op1_mode))))
4652 	    op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4653 	  else
4654 	    /* We always sign-extend, regardless of the signedness of
4655 	       the operand, because the operand is always unsigned
4656 	       here even if the original C expression is signed.  */
4657 	    op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4658 	}
4659       /* Fall through.  */
4660     case PLUS_EXPR:
4661       return simplify_gen_binary (PLUS, mode, op0, op1);
4662 
4663     case MINUS_EXPR:
4664     case POINTER_DIFF_EXPR:
4665       return simplify_gen_binary (MINUS, mode, op0, op1);
4666 
4667     case MULT_EXPR:
4668       return simplify_gen_binary (MULT, mode, op0, op1);
4669 
4670     case RDIV_EXPR:
4671     case TRUNC_DIV_EXPR:
4672     case EXACT_DIV_EXPR:
4673       if (unsignedp)
4674 	return simplify_gen_binary (UDIV, mode, op0, op1);
4675       else
4676 	return simplify_gen_binary (DIV, mode, op0, op1);
4677 
4678     case TRUNC_MOD_EXPR:
4679       return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4680 
4681     case FLOOR_DIV_EXPR:
4682       if (unsignedp)
4683 	return simplify_gen_binary (UDIV, mode, op0, op1);
4684       else
4685 	{
4686 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4687 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4688 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4689 	  return simplify_gen_binary (PLUS, mode, div, adj);
4690 	}
4691 
4692     case FLOOR_MOD_EXPR:
4693       if (unsignedp)
4694 	return simplify_gen_binary (UMOD, mode, op0, op1);
4695       else
4696 	{
4697 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4698 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4699 	  adj = simplify_gen_unary (NEG, mode,
4700 				    simplify_gen_binary (MULT, mode, adj, op1),
4701 				    mode);
4702 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4703 	}
4704 
4705     case CEIL_DIV_EXPR:
4706       if (unsignedp)
4707 	{
4708 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4709 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4710 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4711 	  return simplify_gen_binary (PLUS, mode, div, adj);
4712 	}
4713       else
4714 	{
4715 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4716 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4717 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4718 	  return simplify_gen_binary (PLUS, mode, div, adj);
4719 	}
4720 
4721     case CEIL_MOD_EXPR:
4722       if (unsignedp)
4723 	{
4724 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4725 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4726 	  adj = simplify_gen_unary (NEG, mode,
4727 				    simplify_gen_binary (MULT, mode, adj, op1),
4728 				    mode);
4729 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4730 	}
4731       else
4732 	{
4733 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4734 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4735 	  adj = simplify_gen_unary (NEG, mode,
4736 				    simplify_gen_binary (MULT, mode, adj, op1),
4737 				    mode);
4738 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4739 	}
4740 
4741     case ROUND_DIV_EXPR:
4742       if (unsignedp)
4743 	{
4744 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4745 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4746 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4747 	  return simplify_gen_binary (PLUS, mode, div, adj);
4748 	}
4749       else
4750 	{
4751 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4752 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4753 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4754 	  return simplify_gen_binary (PLUS, mode, div, adj);
4755 	}
4756 
4757     case ROUND_MOD_EXPR:
4758       if (unsignedp)
4759 	{
4760 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4761 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4762 	  adj = simplify_gen_unary (NEG, mode,
4763 				    simplify_gen_binary (MULT, mode, adj, op1),
4764 				    mode);
4765 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4766 	}
4767       else
4768 	{
4769 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4770 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4771 	  adj = simplify_gen_unary (NEG, mode,
4772 				    simplify_gen_binary (MULT, mode, adj, op1),
4773 				    mode);
4774 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4775 	}
4776 
4777     case LSHIFT_EXPR:
4778       return simplify_gen_binary (ASHIFT, mode, op0, op1);
4779 
4780     case RSHIFT_EXPR:
4781       if (unsignedp)
4782 	return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4783       else
4784 	return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4785 
4786     case LROTATE_EXPR:
4787       return simplify_gen_binary (ROTATE, mode, op0, op1);
4788 
4789     case RROTATE_EXPR:
4790       return simplify_gen_binary (ROTATERT, mode, op0, op1);
4791 
4792     case MIN_EXPR:
4793       return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4794 
4795     case MAX_EXPR:
4796       return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4797 
4798     case BIT_AND_EXPR:
4799     case TRUTH_AND_EXPR:
4800       return simplify_gen_binary (AND, mode, op0, op1);
4801 
4802     case BIT_IOR_EXPR:
4803     case TRUTH_OR_EXPR:
4804       return simplify_gen_binary (IOR, mode, op0, op1);
4805 
4806     case BIT_XOR_EXPR:
4807     case TRUTH_XOR_EXPR:
4808       return simplify_gen_binary (XOR, mode, op0, op1);
4809 
4810     case TRUTH_ANDIF_EXPR:
4811       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4812 
4813     case TRUTH_ORIF_EXPR:
4814       return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4815 
4816     case TRUTH_NOT_EXPR:
4817       return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4818 
4819     case LT_EXPR:
4820       return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4821 				      op0, op1);
4822 
4823     case LE_EXPR:
4824       return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4825 				      op0, op1);
4826 
4827     case GT_EXPR:
4828       return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4829 				      op0, op1);
4830 
4831     case GE_EXPR:
4832       return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4833 				      op0, op1);
4834 
4835     case EQ_EXPR:
4836       return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4837 
4838     case NE_EXPR:
4839       return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4840 
4841     case UNORDERED_EXPR:
4842       return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4843 
4844     case ORDERED_EXPR:
4845       return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4846 
4847     case UNLT_EXPR:
4848       return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4849 
4850     case UNLE_EXPR:
4851       return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4852 
4853     case UNGT_EXPR:
4854       return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4855 
4856     case UNGE_EXPR:
4857       return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4858 
4859     case UNEQ_EXPR:
4860       return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4861 
4862     case LTGT_EXPR:
4863       return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4864 
4865     case COND_EXPR:
4866       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4867 
4868     case COMPLEX_EXPR:
4869       gcc_assert (COMPLEX_MODE_P (mode));
4870       if (GET_MODE (op0) == VOIDmode)
4871 	op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4872       if (GET_MODE (op1) == VOIDmode)
4873 	op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4874       return gen_rtx_CONCAT (mode, op0, op1);
4875 
4876     case CONJ_EXPR:
4877       if (GET_CODE (op0) == CONCAT)
4878 	return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4879 			       simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4880 						   XEXP (op0, 1),
4881 						   GET_MODE_INNER (mode)));
4882       else
4883 	{
4884 	  scalar_mode imode = GET_MODE_INNER (mode);
4885 	  rtx re, im;
4886 
4887 	  if (MEM_P (op0))
4888 	    {
4889 	      re = adjust_address_nv (op0, imode, 0);
4890 	      im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4891 	    }
4892 	  else
4893 	    {
4894 	      scalar_int_mode ifmode;
4895 	      scalar_int_mode ihmode;
4896 	      rtx halfsize;
4897 	      if (!int_mode_for_mode (mode).exists (&ifmode)
4898 		  || !int_mode_for_mode (imode).exists (&ihmode))
4899 		return NULL;
4900 	      halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4901 	      re = op0;
4902 	      if (mode != ifmode)
4903 		re = gen_rtx_SUBREG (ifmode, re, 0);
4904 	      re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4905 	      if (imode != ihmode)
4906 		re = gen_rtx_SUBREG (imode, re, 0);
4907 	      im = copy_rtx (op0);
4908 	      if (mode != ifmode)
4909 		im = gen_rtx_SUBREG (ifmode, im, 0);
4910 	      im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4911 	      if (imode != ihmode)
4912 		im = gen_rtx_SUBREG (imode, im, 0);
4913 	    }
4914 	  im = gen_rtx_NEG (imode, im);
4915 	  return gen_rtx_CONCAT (mode, re, im);
4916 	}
4917 
4918     case ADDR_EXPR:
4919       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4920       if (!op0 || !MEM_P (op0))
4921 	{
4922 	  if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4923 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4924 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4925 	      && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4926 		  || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4927 	    return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4928 
4929 	  if (handled_component_p (TREE_OPERAND (exp, 0)))
4930 	    {
4931 	      poly_int64 bitoffset, bitsize, maxsize, byteoffset;
4932 	      bool reverse;
4933 	      tree decl
4934 		= get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4935 					   &bitsize, &maxsize, &reverse);
4936 	      if ((VAR_P (decl)
4937 		   || TREE_CODE (decl) == PARM_DECL
4938 		   || TREE_CODE (decl) == RESULT_DECL)
4939 		  && (!TREE_ADDRESSABLE (decl)
4940 		      || target_for_debug_bind (decl))
4941 		  && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
4942 		  && known_gt (bitsize, 0)
4943 		  && known_eq (bitsize, maxsize))
4944 		{
4945 		  rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4946 		  return plus_constant (mode, base, byteoffset);
4947 		}
4948 	    }
4949 
4950 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4951 	      && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4952 		 == ADDR_EXPR)
4953 	    {
4954 	      op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4955 						     0));
4956 	      if (op0 != NULL
4957 		  && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4958 		      || (GET_CODE (op0) == PLUS
4959 			  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4960 			  && CONST_INT_P (XEXP (op0, 1)))))
4961 		{
4962 		  op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4963 							 1));
4964 		  if (!op1 || !CONST_INT_P (op1))
4965 		    return NULL;
4966 
4967 		  return plus_constant (mode, op0, INTVAL (op1));
4968 		}
4969 	    }
4970 
4971 	  return NULL;
4972 	}
4973 
4974       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4975       addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
4976       op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
4977 
4978       return op0;
4979 
4980     case VECTOR_CST:
4981       {
4982 	unsigned HOST_WIDE_INT i, nelts;
4983 
4984 	if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
4985 	  return NULL;
4986 
4987 	op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
4988 
4989 	for (i = 0; i < nelts; ++i)
4990 	  {
4991 	    op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4992 	    if (!op1)
4993 	      return NULL;
4994 	    XVECEXP (op0, 0, i) = op1;
4995 	  }
4996 
4997 	return op0;
4998       }
4999 
5000     case CONSTRUCTOR:
5001       if (TREE_CLOBBER_P (exp))
5002 	return NULL;
5003       else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
5004 	{
5005 	  unsigned i;
5006 	  unsigned HOST_WIDE_INT nelts;
5007 	  tree val;
5008 
5009 	  if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
5010 	    goto flag_unsupported;
5011 
5012 	  op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5013 
5014 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
5015 	    {
5016 	      op1 = expand_debug_expr (val);
5017 	      if (!op1)
5018 		return NULL;
5019 	      XVECEXP (op0, 0, i) = op1;
5020 	    }
5021 
5022 	  if (i < nelts)
5023 	    {
5024 	      op1 = expand_debug_expr
5025 		(build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
5026 
5027 	      if (!op1)
5028 		return NULL;
5029 
5030 	      for (; i < nelts; i++)
5031 		XVECEXP (op0, 0, i) = op1;
5032 	    }
5033 
5034 	  return op0;
5035 	}
5036       else
5037 	goto flag_unsupported;
5038 
5039     case CALL_EXPR:
5040       /* ??? Maybe handle some builtins?  */
5041       return NULL;
5042 
5043     case SSA_NAME:
5044       {
5045 	gimple *g = get_gimple_for_ssa_name (exp);
5046 	if (g)
5047 	  {
5048 	    tree t = NULL_TREE;
5049 	    if (deep_ter_debug_map)
5050 	      {
5051 		tree *slot = deep_ter_debug_map->get (exp);
5052 		if (slot)
5053 		  t = *slot;
5054 	      }
5055 	    if (t == NULL_TREE)
5056 	      t = gimple_assign_rhs_to_tree (g);
5057 	    op0 = expand_debug_expr (t);
5058 	    if (!op0)
5059 	      return NULL;
5060 	  }
5061 	else
5062 	  {
5063 	    /* If this is a reference to an incoming value of
5064 	       parameter that is never used in the code or where the
5065 	       incoming value is never used in the code, use
5066 	       PARM_DECL's DECL_RTL if set.  */
5067 	    if (SSA_NAME_IS_DEFAULT_DEF (exp)
5068 		&& SSA_NAME_VAR (exp)
5069 		&& TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5070 		&& has_zero_uses (exp))
5071 	      {
5072 		op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5073 		if (op0)
5074 		  goto adjust_mode;
5075 		op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5076 		if (op0)
5077 		  goto adjust_mode;
5078 	      }
5079 
5080 	    int part = var_to_partition (SA.map, exp);
5081 
5082 	    if (part == NO_PARTITION)
5083 	      return NULL;
5084 
5085 	    gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5086 
5087 	    op0 = copy_rtx (SA.partition_to_pseudo[part]);
5088 	  }
5089 	goto adjust_mode;
5090       }
5091 
5092     case ERROR_MARK:
5093       return NULL;
5094 
5095     /* Vector stuff.  For most of the codes we don't have rtl codes.  */
5096     case REALIGN_LOAD_EXPR:
5097     case VEC_COND_EXPR:
5098     case VEC_PACK_FIX_TRUNC_EXPR:
5099     case VEC_PACK_SAT_EXPR:
5100     case VEC_PACK_TRUNC_EXPR:
5101     case VEC_UNPACK_FLOAT_HI_EXPR:
5102     case VEC_UNPACK_FLOAT_LO_EXPR:
5103     case VEC_UNPACK_HI_EXPR:
5104     case VEC_UNPACK_LO_EXPR:
5105     case VEC_WIDEN_MULT_HI_EXPR:
5106     case VEC_WIDEN_MULT_LO_EXPR:
5107     case VEC_WIDEN_MULT_EVEN_EXPR:
5108     case VEC_WIDEN_MULT_ODD_EXPR:
5109     case VEC_WIDEN_LSHIFT_HI_EXPR:
5110     case VEC_WIDEN_LSHIFT_LO_EXPR:
5111     case VEC_PERM_EXPR:
5112     case VEC_DUPLICATE_EXPR:
5113     case VEC_SERIES_EXPR:
5114       return NULL;
5115 
5116     /* Misc codes.  */
5117     case ADDR_SPACE_CONVERT_EXPR:
5118     case FIXED_CONVERT_EXPR:
5119     case OBJ_TYPE_REF:
5120     case WITH_SIZE_EXPR:
5121     case BIT_INSERT_EXPR:
5122       return NULL;
5123 
5124     case DOT_PROD_EXPR:
5125       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5126 	  && SCALAR_INT_MODE_P (mode))
5127 	{
5128 	  op0
5129 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5130 									  0)))
5131 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5132 				  inner_mode);
5133 	  op1
5134 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5135 									  1)))
5136 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5137 				  inner_mode);
5138 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5139 	  return simplify_gen_binary (PLUS, mode, op0, op2);
5140 	}
5141       return NULL;
5142 
5143     case WIDEN_MULT_EXPR:
5144     case WIDEN_MULT_PLUS_EXPR:
5145     case WIDEN_MULT_MINUS_EXPR:
5146       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5147 	  && SCALAR_INT_MODE_P (mode))
5148 	{
5149 	  inner_mode = GET_MODE (op0);
5150 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5151 	    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5152 	  else
5153 	    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5154 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5155 	    op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5156 	  else
5157 	    op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5158 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5159 	  if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5160 	    return op0;
5161 	  else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5162 	    return simplify_gen_binary (PLUS, mode, op0, op2);
5163 	  else
5164 	    return simplify_gen_binary (MINUS, mode, op2, op0);
5165 	}
5166       return NULL;
5167 
5168     case MULT_HIGHPART_EXPR:
5169       /* ??? Similar to the above.  */
5170       return NULL;
5171 
5172     case WIDEN_SUM_EXPR:
5173     case WIDEN_LSHIFT_EXPR:
5174       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5175 	  && SCALAR_INT_MODE_P (mode))
5176 	{
5177 	  op0
5178 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5179 									  0)))
5180 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5181 				  inner_mode);
5182 	  return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5183 				      ? ASHIFT : PLUS, mode, op0, op1);
5184 	}
5185       return NULL;
5186 
5187     case FMA_EXPR:
5188       return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
5189 
5190     default:
5191     flag_unsupported:
5192       if (flag_checking)
5193 	{
5194 	  debug_tree (exp);
5195 	  gcc_unreachable ();
5196 	}
5197       return NULL;
5198     }
5199 }
5200 
5201 /* Return an RTX equivalent to the source bind value of the tree expression
5202    EXP.  */
5203 
5204 static rtx
expand_debug_source_expr(tree exp)5205 expand_debug_source_expr (tree exp)
5206 {
5207   rtx op0 = NULL_RTX;
5208   machine_mode mode = VOIDmode, inner_mode;
5209 
5210   switch (TREE_CODE (exp))
5211     {
5212     case PARM_DECL:
5213       {
5214 	mode = DECL_MODE (exp);
5215 	op0 = expand_debug_parm_decl (exp);
5216 	if (op0)
5217 	   break;
5218 	/* See if this isn't an argument that has been completely
5219 	   optimized out.  */
5220 	if (!DECL_RTL_SET_P (exp)
5221 	    && !DECL_INCOMING_RTL (exp)
5222 	    && DECL_ABSTRACT_ORIGIN (current_function_decl))
5223 	  {
5224 	    tree aexp = DECL_ORIGIN (exp);
5225 	    if (DECL_CONTEXT (aexp)
5226 		== DECL_ABSTRACT_ORIGIN (current_function_decl))
5227 	      {
5228 		vec<tree, va_gc> **debug_args;
5229 		unsigned int ix;
5230 		tree ddecl;
5231 		debug_args = decl_debug_args_lookup (current_function_decl);
5232 		if (debug_args != NULL)
5233 		  {
5234 		    for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5235 			 ix += 2)
5236 		      if (ddecl == aexp)
5237 			return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5238 		  }
5239 	      }
5240 	  }
5241 	break;
5242       }
5243     default:
5244       break;
5245     }
5246 
5247   if (op0 == NULL_RTX)
5248     return NULL_RTX;
5249 
5250   inner_mode = GET_MODE (op0);
5251   if (mode == inner_mode)
5252     return op0;
5253 
5254   if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5255     {
5256       if (GET_MODE_UNIT_BITSIZE (mode)
5257 	  == GET_MODE_UNIT_BITSIZE (inner_mode))
5258 	op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5259       else if (GET_MODE_UNIT_BITSIZE (mode)
5260 	       < GET_MODE_UNIT_BITSIZE (inner_mode))
5261 	op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5262       else
5263 	op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5264     }
5265   else if (FLOAT_MODE_P (mode))
5266     gcc_unreachable ();
5267   else if (FLOAT_MODE_P (inner_mode))
5268     {
5269       if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5270 	op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5271       else
5272 	op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5273     }
5274   else if (GET_MODE_UNIT_PRECISION (mode)
5275 	   == GET_MODE_UNIT_PRECISION (inner_mode))
5276     op0 = lowpart_subreg (mode, op0, inner_mode);
5277   else if (GET_MODE_UNIT_PRECISION (mode)
5278 	   < GET_MODE_UNIT_PRECISION (inner_mode))
5279     op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5280   else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5281     op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5282   else
5283     op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5284 
5285   return op0;
5286 }
5287 
5288 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5289    Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5290    deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN.  */
5291 
5292 static void
avoid_complex_debug_insns(rtx_insn * insn,rtx * exp_p,int depth)5293 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5294 {
5295   rtx exp = *exp_p;
5296 
5297   if (exp == NULL_RTX)
5298     return;
5299 
5300   if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5301     return;
5302 
5303   if (depth == 4)
5304     {
5305       /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
5306       rtx dval = make_debug_expr_from_rtl (exp);
5307 
5308       /* Emit a debug bind insn before INSN.  */
5309       rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5310 				       DEBUG_EXPR_TREE_DECL (dval), exp,
5311 				       VAR_INIT_STATUS_INITIALIZED);
5312 
5313       emit_debug_insn_before (bind, insn);
5314       *exp_p = dval;
5315       return;
5316     }
5317 
5318   const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5319   int i, j;
5320   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5321     switch (*format_ptr++)
5322       {
5323       case 'e':
5324 	avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5325 	break;
5326 
5327       case 'E':
5328       case 'V':
5329 	for (j = 0; j < XVECLEN (exp, i); j++)
5330 	  avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5331 	break;
5332 
5333       default:
5334 	break;
5335       }
5336 }
5337 
5338 /* Expand the _LOCs in debug insns.  We run this after expanding all
5339    regular insns, so that any variables referenced in the function
5340    will have their DECL_RTLs set.  */
5341 
5342 static void
expand_debug_locations(void)5343 expand_debug_locations (void)
5344 {
5345   rtx_insn *insn;
5346   rtx_insn *last = get_last_insn ();
5347   int save_strict_alias = flag_strict_aliasing;
5348 
5349   /* New alias sets while setting up memory attributes cause
5350      -fcompare-debug failures, even though it doesn't bring about any
5351      codegen changes.  */
5352   flag_strict_aliasing = 0;
5353 
5354   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5355     if (DEBUG_BIND_INSN_P (insn))
5356       {
5357 	tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5358 	rtx val;
5359 	rtx_insn *prev_insn, *insn2;
5360 	machine_mode mode;
5361 
5362 	if (value == NULL_TREE)
5363 	  val = NULL_RTX;
5364 	else
5365 	  {
5366 	    if (INSN_VAR_LOCATION_STATUS (insn)
5367 		== VAR_INIT_STATUS_UNINITIALIZED)
5368 	      val = expand_debug_source_expr (value);
5369 	    /* The avoid_deep_ter_for_debug function inserts
5370 	       debug bind stmts after SSA_NAME definition, with the
5371 	       SSA_NAME as the whole bind location.  Disable temporarily
5372 	       expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5373 	       being defined in this DEBUG_INSN.  */
5374 	    else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5375 	      {
5376 		tree *slot = deep_ter_debug_map->get (value);
5377 		if (slot)
5378 		  {
5379 		    if (*slot == INSN_VAR_LOCATION_DECL (insn))
5380 		      *slot = NULL_TREE;
5381 		    else
5382 		      slot = NULL;
5383 		  }
5384 		val = expand_debug_expr (value);
5385 		if (slot)
5386 		  *slot = INSN_VAR_LOCATION_DECL (insn);
5387 	      }
5388 	    else
5389 	      val = expand_debug_expr (value);
5390 	    gcc_assert (last == get_last_insn ());
5391 	  }
5392 
5393 	if (!val)
5394 	  val = gen_rtx_UNKNOWN_VAR_LOC ();
5395 	else
5396 	  {
5397 	    mode = GET_MODE (INSN_VAR_LOCATION (insn));
5398 
5399 	    gcc_assert (mode == GET_MODE (val)
5400 			|| (GET_MODE (val) == VOIDmode
5401 			    && (CONST_SCALAR_INT_P (val)
5402 				|| GET_CODE (val) == CONST_FIXED
5403 				|| GET_CODE (val) == LABEL_REF)));
5404 	  }
5405 
5406 	INSN_VAR_LOCATION_LOC (insn) = val;
5407 	prev_insn = PREV_INSN (insn);
5408 	for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5409 	  avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5410       }
5411 
5412   flag_strict_aliasing = save_strict_alias;
5413 }
5414 
5415 /* Performs swapping operands of commutative operations to expand
5416    the expensive one first.  */
5417 
5418 static void
reorder_operands(basic_block bb)5419 reorder_operands (basic_block bb)
5420 {
5421   unsigned int *lattice;  /* Hold cost of each statement.  */
5422   unsigned int i = 0, n = 0;
5423   gimple_stmt_iterator gsi;
5424   gimple_seq stmts;
5425   gimple *stmt;
5426   bool swap;
5427   tree op0, op1;
5428   ssa_op_iter iter;
5429   use_operand_p use_p;
5430   gimple *def0, *def1;
5431 
5432   /* Compute cost of each statement using estimate_num_insns.  */
5433   stmts = bb_seq (bb);
5434   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5435     {
5436       stmt = gsi_stmt (gsi);
5437       if (!is_gimple_debug (stmt))
5438         gimple_set_uid (stmt, n++);
5439     }
5440   lattice = XNEWVEC (unsigned int, n);
5441   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5442     {
5443       unsigned cost;
5444       stmt = gsi_stmt (gsi);
5445       if (is_gimple_debug (stmt))
5446 	continue;
5447       cost = estimate_num_insns (stmt, &eni_size_weights);
5448       lattice[i] = cost;
5449       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5450 	{
5451 	  tree use = USE_FROM_PTR (use_p);
5452 	  gimple *def_stmt;
5453 	  if (TREE_CODE (use) != SSA_NAME)
5454 	    continue;
5455 	  def_stmt = get_gimple_for_ssa_name (use);
5456 	  if (!def_stmt)
5457 	    continue;
5458 	  lattice[i] += lattice[gimple_uid (def_stmt)];
5459 	}
5460       i++;
5461       if (!is_gimple_assign (stmt)
5462 	  || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5463 	continue;
5464       op0 = gimple_op (stmt, 1);
5465       op1 = gimple_op (stmt, 2);
5466       if (TREE_CODE (op0) != SSA_NAME
5467 	  || TREE_CODE (op1) != SSA_NAME)
5468 	continue;
5469       /* Swap operands if the second one is more expensive.  */
5470       def0 = get_gimple_for_ssa_name (op0);
5471       def1 = get_gimple_for_ssa_name (op1);
5472       if (!def1)
5473 	continue;
5474       swap = false;
5475       if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5476 	swap = true;
5477       if (swap)
5478 	{
5479 	  if (dump_file && (dump_flags & TDF_DETAILS))
5480 	    {
5481 	      fprintf (dump_file, "Swap operands in stmt:\n");
5482 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5483 	      fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5484 		       def0 ? lattice[gimple_uid (def0)] : 0,
5485 		       lattice[gimple_uid (def1)]);
5486 	    }
5487 	  swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5488 			     gimple_assign_rhs2_ptr (stmt));
5489 	}
5490     }
5491   XDELETE (lattice);
5492 }
5493 
5494 /* Expand basic block BB from GIMPLE trees to RTL.  */
5495 
5496 static basic_block
expand_gimple_basic_block(basic_block bb,bool disable_tail_calls)5497 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5498 {
5499   gimple_stmt_iterator gsi;
5500   gimple_seq stmts;
5501   gimple *stmt = NULL;
5502   rtx_note *note = NULL;
5503   rtx_insn *last;
5504   edge e;
5505   edge_iterator ei;
5506 
5507   if (dump_file)
5508     fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5509 	     bb->index);
5510 
5511   /* Note that since we are now transitioning from GIMPLE to RTL, we
5512      cannot use the gsi_*_bb() routines because they expect the basic
5513      block to be in GIMPLE, instead of RTL.  Therefore, we need to
5514      access the BB sequence directly.  */
5515   if (optimize)
5516     reorder_operands (bb);
5517   stmts = bb_seq (bb);
5518   bb->il.gimple.seq = NULL;
5519   bb->il.gimple.phi_nodes = NULL;
5520   rtl_profile_for_bb (bb);
5521   init_rtl_bb_info (bb);
5522   bb->flags |= BB_RTL;
5523 
5524   /* Remove the RETURN_EXPR if we may fall though to the exit
5525      instead.  */
5526   gsi = gsi_last (stmts);
5527   if (!gsi_end_p (gsi)
5528       && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5529     {
5530       greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5531 
5532       gcc_assert (single_succ_p (bb));
5533       gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5534 
5535       if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5536 	  && !gimple_return_retval (ret_stmt))
5537 	{
5538 	  gsi_remove (&gsi, false);
5539 	  single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5540 	}
5541     }
5542 
5543   gsi = gsi_start (stmts);
5544   if (!gsi_end_p (gsi))
5545     {
5546       stmt = gsi_stmt (gsi);
5547       if (gimple_code (stmt) != GIMPLE_LABEL)
5548 	stmt = NULL;
5549     }
5550 
5551   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5552 
5553   if (stmt || elt)
5554     {
5555       gcc_checking_assert (!note);
5556       last = get_last_insn ();
5557 
5558       if (stmt)
5559 	{
5560 	  expand_gimple_stmt (stmt);
5561 	  gsi_next (&gsi);
5562 	}
5563 
5564       if (elt)
5565 	emit_label (*elt);
5566 
5567       BB_HEAD (bb) = NEXT_INSN (last);
5568       if (NOTE_P (BB_HEAD (bb)))
5569 	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5570       gcc_assert (LABEL_P (BB_HEAD (bb)));
5571       note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5572 
5573       maybe_dump_rtl_for_gimple_stmt (stmt, last);
5574     }
5575   else
5576     BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5577 
5578   if (note)
5579     NOTE_BASIC_BLOCK (note) = bb;
5580 
5581   for (; !gsi_end_p (gsi); gsi_next (&gsi))
5582     {
5583       basic_block new_bb;
5584 
5585       stmt = gsi_stmt (gsi);
5586 
5587       /* If this statement is a non-debug one, and we generate debug
5588 	 insns, then this one might be the last real use of a TERed
5589 	 SSA_NAME, but where there are still some debug uses further
5590 	 down.  Expanding the current SSA name in such further debug
5591 	 uses by their RHS might lead to wrong debug info, as coalescing
5592 	 might make the operands of such RHS be placed into the same
5593 	 pseudo as something else.  Like so:
5594 	   a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
5595 	   use(a_1);
5596 	   a_2 = ...
5597            #DEBUG ... => a_1
5598 	 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5599 	 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5600 	 the write to a_2 would actually have clobbered the place which
5601 	 formerly held a_0.
5602 
5603 	 So, instead of that, we recognize the situation, and generate
5604 	 debug temporaries at the last real use of TERed SSA names:
5605 	   a_1 = a_0 + 1;
5606            #DEBUG #D1 => a_1
5607 	   use(a_1);
5608 	   a_2 = ...
5609            #DEBUG ... => #D1
5610 	 */
5611       if (MAY_HAVE_DEBUG_BIND_INSNS
5612 	  && SA.values
5613 	  && !is_gimple_debug (stmt))
5614 	{
5615 	  ssa_op_iter iter;
5616 	  tree op;
5617 	  gimple *def;
5618 
5619 	  location_t sloc = curr_insn_location ();
5620 
5621 	  /* Look for SSA names that have their last use here (TERed
5622 	     names always have only one real use).  */
5623 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5624 	    if ((def = get_gimple_for_ssa_name (op)))
5625 	      {
5626 		imm_use_iterator imm_iter;
5627 		use_operand_p use_p;
5628 		bool have_debug_uses = false;
5629 
5630 		FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5631 		  {
5632 		    if (gimple_debug_bind_p (USE_STMT (use_p)))
5633 		      {
5634 			have_debug_uses = true;
5635 			break;
5636 		      }
5637 		  }
5638 
5639 		if (have_debug_uses)
5640 		  {
5641 		    /* OP is a TERed SSA name, with DEF its defining
5642 		       statement, and where OP is used in further debug
5643 		       instructions.  Generate a debug temporary, and
5644 		       replace all uses of OP in debug insns with that
5645 		       temporary.  */
5646 		    gimple *debugstmt;
5647 		    tree value = gimple_assign_rhs_to_tree (def);
5648 		    tree vexpr = make_node (DEBUG_EXPR_DECL);
5649 		    rtx val;
5650 		    machine_mode mode;
5651 
5652 		    set_curr_insn_location (gimple_location (def));
5653 
5654 		    DECL_ARTIFICIAL (vexpr) = 1;
5655 		    TREE_TYPE (vexpr) = TREE_TYPE (value);
5656 		    if (DECL_P (value))
5657 		      mode = DECL_MODE (value);
5658 		    else
5659 		      mode = TYPE_MODE (TREE_TYPE (value));
5660 		    SET_DECL_MODE (vexpr, mode);
5661 
5662 		    val = gen_rtx_VAR_LOCATION
5663 			(mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5664 
5665 		    emit_debug_insn (val);
5666 
5667 		    FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5668 		      {
5669 			if (!gimple_debug_bind_p (debugstmt))
5670 			  continue;
5671 
5672 			FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5673 			  SET_USE (use_p, vexpr);
5674 
5675 			update_stmt (debugstmt);
5676 		      }
5677 		  }
5678 	      }
5679 	  set_curr_insn_location (sloc);
5680 	}
5681 
5682       currently_expanding_gimple_stmt = stmt;
5683 
5684       /* Expand this statement, then evaluate the resulting RTL and
5685 	 fixup the CFG accordingly.  */
5686       if (gimple_code (stmt) == GIMPLE_COND)
5687 	{
5688 	  new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5689 	  if (new_bb)
5690 	    return new_bb;
5691 	}
5692       else if (is_gimple_debug (stmt))
5693 	{
5694 	  location_t sloc = curr_insn_location ();
5695 	  gimple_stmt_iterator nsi = gsi;
5696 
5697 	  for (;;)
5698 	    {
5699 	      tree var;
5700 	      tree value = NULL_TREE;
5701 	      rtx val = NULL_RTX;
5702 	      machine_mode mode;
5703 
5704 	      if (!gimple_debug_nonbind_marker_p (stmt))
5705 		{
5706 		  if (gimple_debug_bind_p (stmt))
5707 		    {
5708 		      var = gimple_debug_bind_get_var (stmt);
5709 
5710 		      if (TREE_CODE (var) != DEBUG_EXPR_DECL
5711 			  && TREE_CODE (var) != LABEL_DECL
5712 			  && !target_for_debug_bind (var))
5713 			goto delink_debug_stmt;
5714 
5715 		      if (DECL_P (var))
5716 			mode = DECL_MODE (var);
5717 		      else
5718 			mode = TYPE_MODE (TREE_TYPE (var));
5719 
5720 		      if (gimple_debug_bind_has_value_p (stmt))
5721 			value = gimple_debug_bind_get_value (stmt);
5722 
5723 		      val = gen_rtx_VAR_LOCATION
5724 			(mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5725 		    }
5726 		  else if (gimple_debug_source_bind_p (stmt))
5727 		    {
5728 		      var = gimple_debug_source_bind_get_var (stmt);
5729 
5730 		      value = gimple_debug_source_bind_get_value (stmt);
5731 
5732 		      mode = DECL_MODE (var);
5733 
5734 		      val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5735 						  VAR_INIT_STATUS_UNINITIALIZED);
5736 		    }
5737 		  else
5738 		    gcc_unreachable ();
5739 		}
5740 	      /* If this function was first compiled with markers
5741 		 enabled, but they're now disable (e.g. LTO), drop
5742 		 them on the floor.  */
5743 	      else if (gimple_debug_nonbind_marker_p (stmt)
5744 		       && !MAY_HAVE_DEBUG_MARKER_INSNS)
5745 		goto delink_debug_stmt;
5746 	      else if (gimple_debug_begin_stmt_p (stmt))
5747 		val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5748 	      else if (gimple_debug_inline_entry_p (stmt))
5749 		{
5750 		  tree block = gimple_block (stmt);
5751 
5752 		  if (block)
5753 		    val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5754 		  else
5755 		    goto delink_debug_stmt;
5756 		}
5757 	      else
5758 		gcc_unreachable ();
5759 
5760 	      last = get_last_insn ();
5761 
5762 	      set_curr_insn_location (gimple_location (stmt));
5763 
5764 	      emit_debug_insn (val);
5765 
5766 	      if (dump_file && (dump_flags & TDF_DETAILS))
5767 		{
5768 		  /* We can't dump the insn with a TREE where an RTX
5769 		     is expected.  */
5770 		  if (GET_CODE (val) == VAR_LOCATION)
5771 		    {
5772 		      gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5773 		      PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5774 		    }
5775 		  maybe_dump_rtl_for_gimple_stmt (stmt, last);
5776 		  if (GET_CODE (val) == VAR_LOCATION)
5777 		    PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5778 		}
5779 
5780 	    delink_debug_stmt:
5781 	      /* In order not to generate too many debug temporaries,
5782 	         we delink all uses of debug statements we already expanded.
5783 		 Therefore debug statements between definition and real
5784 		 use of TERed SSA names will continue to use the SSA name,
5785 		 and not be replaced with debug temps.  */
5786 	      delink_stmt_imm_use (stmt);
5787 
5788 	      gsi = nsi;
5789 	      gsi_next (&nsi);
5790 	      if (gsi_end_p (nsi))
5791 		break;
5792 	      stmt = gsi_stmt (nsi);
5793 	      if (!is_gimple_debug (stmt))
5794 		break;
5795 	    }
5796 
5797 	  set_curr_insn_location (sloc);
5798 	}
5799       else
5800 	{
5801 	  gcall *call_stmt = dyn_cast <gcall *> (stmt);
5802 	  if (call_stmt
5803 	      && gimple_call_tail_p (call_stmt)
5804 	      && disable_tail_calls)
5805 	    gimple_call_set_tail (call_stmt, false);
5806 
5807 	  if (call_stmt && gimple_call_tail_p (call_stmt))
5808 	    {
5809 	      bool can_fallthru;
5810 	      new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5811 	      if (new_bb)
5812 		{
5813 		  if (can_fallthru)
5814 		    bb = new_bb;
5815 		  else
5816 		    return new_bb;
5817 		}
5818 	    }
5819 	  else
5820 	    {
5821 	      def_operand_p def_p;
5822 	      def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5823 
5824 	      if (def_p != NULL)
5825 		{
5826 		  /* Ignore this stmt if it is in the list of
5827 		     replaceable expressions.  */
5828 		  if (SA.values
5829 		      && bitmap_bit_p (SA.values,
5830 				       SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5831 		    continue;
5832 		}
5833 	      last = expand_gimple_stmt (stmt);
5834 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5835 	    }
5836 	}
5837     }
5838 
5839   currently_expanding_gimple_stmt = NULL;
5840 
5841   /* Expand implicit goto and convert goto_locus.  */
5842   FOR_EACH_EDGE (e, ei, bb->succs)
5843     {
5844       if (e->goto_locus != UNKNOWN_LOCATION)
5845 	set_curr_insn_location (e->goto_locus);
5846       if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5847 	{
5848 	  emit_jump (label_rtx_for_bb (e->dest));
5849 	  e->flags &= ~EDGE_FALLTHRU;
5850 	}
5851     }
5852 
5853   /* Expanded RTL can create a jump in the last instruction of block.
5854      This later might be assumed to be a jump to successor and break edge insertion.
5855      We need to insert dummy move to prevent this. PR41440. */
5856   if (single_succ_p (bb)
5857       && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5858       && (last = get_last_insn ())
5859       && (JUMP_P (last)
5860 	  || (DEBUG_INSN_P (last)
5861 	      && JUMP_P (prev_nondebug_insn (last)))))
5862     {
5863       rtx dummy = gen_reg_rtx (SImode);
5864       emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5865     }
5866 
5867   do_pending_stack_adjust ();
5868 
5869   /* Find the block tail.  The last insn in the block is the insn
5870      before a barrier and/or table jump insn.  */
5871   last = get_last_insn ();
5872   if (BARRIER_P (last))
5873     last = PREV_INSN (last);
5874   if (JUMP_TABLE_DATA_P (last))
5875     last = PREV_INSN (PREV_INSN (last));
5876   BB_END (bb) = last;
5877 
5878   update_bb_for_insn (bb);
5879 
5880   return bb;
5881 }
5882 
5883 
5884 /* Create a basic block for initialization code.  */
5885 
5886 static basic_block
construct_init_block(void)5887 construct_init_block (void)
5888 {
5889   basic_block init_block, first_block;
5890   edge e = NULL;
5891   int flags;
5892 
5893   /* Multiple entry points not supported yet.  */
5894   gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5895   init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5896   init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5897   ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5898   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5899 
5900   e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5901 
5902   /* When entry edge points to first basic block, we don't need jump,
5903      otherwise we have to jump into proper target.  */
5904   if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5905     {
5906       tree label = gimple_block_label (e->dest);
5907 
5908       emit_jump (jump_target_rtx (label));
5909       flags = 0;
5910     }
5911   else
5912     flags = EDGE_FALLTHRU;
5913 
5914   init_block = create_basic_block (NEXT_INSN (get_insns ()),
5915 				   get_last_insn (),
5916 				   ENTRY_BLOCK_PTR_FOR_FN (cfun));
5917   init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5918   add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5919   if (e)
5920     {
5921       first_block = e->dest;
5922       redirect_edge_succ (e, init_block);
5923       e = make_single_succ_edge (init_block, first_block, flags);
5924     }
5925   else
5926     e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5927 			       EDGE_FALLTHRU);
5928 
5929   update_bb_for_insn (init_block);
5930   return init_block;
5931 }
5932 
5933 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5934    found in the block tree.  */
5935 
5936 static void
set_block_levels(tree block,int level)5937 set_block_levels (tree block, int level)
5938 {
5939   while (block)
5940     {
5941       BLOCK_NUMBER (block) = level;
5942       set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5943       block = BLOCK_CHAIN (block);
5944     }
5945 }
5946 
5947 /* Create a block containing landing pads and similar stuff.  */
5948 
5949 static void
construct_exit_block(void)5950 construct_exit_block (void)
5951 {
5952   rtx_insn *head = get_last_insn ();
5953   rtx_insn *end;
5954   basic_block exit_block;
5955   edge e, e2;
5956   unsigned ix;
5957   edge_iterator ei;
5958   basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5959   rtx_insn *orig_end = BB_END (prev_bb);
5960 
5961   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5962 
5963   /* Make sure the locus is set to the end of the function, so that
5964      epilogue line numbers and warnings are set properly.  */
5965   if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5966     input_location = cfun->function_end_locus;
5967 
5968   /* Generate rtl for function exit.  */
5969   expand_function_end ();
5970 
5971   end = get_last_insn ();
5972   if (head == end)
5973     return;
5974   /* While emitting the function end we could move end of the last basic
5975      block.  */
5976   BB_END (prev_bb) = orig_end;
5977   while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5978     head = NEXT_INSN (head);
5979   /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5980      bb count counting will be confused.  Any instructions before that
5981      label are emitted for the case where PREV_BB falls through into the
5982      exit block, so append those instructions to prev_bb in that case.  */
5983   if (NEXT_INSN (head) != return_label)
5984     {
5985       while (NEXT_INSN (head) != return_label)
5986 	{
5987 	  if (!NOTE_P (NEXT_INSN (head)))
5988 	    BB_END (prev_bb) = NEXT_INSN (head);
5989 	  head = NEXT_INSN (head);
5990 	}
5991     }
5992   exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5993   exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5994   add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5995 
5996   ix = 0;
5997   while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5998     {
5999       e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
6000       if (!(e->flags & EDGE_ABNORMAL))
6001 	redirect_edge_succ (e, exit_block);
6002       else
6003 	ix++;
6004     }
6005 
6006   e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6007 			     EDGE_FALLTHRU);
6008   FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6009     if (e2 != e)
6010       {
6011 	exit_block->count -= e2->count ();
6012       }
6013   update_bb_for_insn (exit_block);
6014 }
6015 
6016 /* Helper function for discover_nonconstant_array_refs.
6017    Look for ARRAY_REF nodes with non-constant indexes and mark them
6018    addressable.  */
6019 
6020 static tree
discover_nonconstant_array_refs_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)6021 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
6022 				   void *data ATTRIBUTE_UNUSED)
6023 {
6024   tree t = *tp;
6025 
6026   if (IS_TYPE_OR_DECL_P (t))
6027     *walk_subtrees = 0;
6028   else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6029     {
6030       while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6031 	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6032 	      && (!TREE_OPERAND (t, 2)
6033 		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6034 	     || (TREE_CODE (t) == COMPONENT_REF
6035 		 && (!TREE_OPERAND (t,2)
6036 		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6037 	     || TREE_CODE (t) == BIT_FIELD_REF
6038 	     || TREE_CODE (t) == REALPART_EXPR
6039 	     || TREE_CODE (t) == IMAGPART_EXPR
6040 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
6041 	     || CONVERT_EXPR_P (t))
6042 	t = TREE_OPERAND (t, 0);
6043 
6044       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6045 	{
6046 	  t = get_base_address (t);
6047 	  if (t && DECL_P (t)
6048               && DECL_MODE (t) != BLKmode)
6049 	    TREE_ADDRESSABLE (t) = 1;
6050 	}
6051 
6052       *walk_subtrees = 0;
6053     }
6054 
6055   return NULL_TREE;
6056 }
6057 
6058 /* RTL expansion is not able to compile array references with variable
6059    offsets for arrays stored in single register.  Discover such
6060    expressions and mark variables as addressable to avoid this
6061    scenario.  */
6062 
6063 static void
discover_nonconstant_array_refs(void)6064 discover_nonconstant_array_refs (void)
6065 {
6066   basic_block bb;
6067   gimple_stmt_iterator gsi;
6068 
6069   FOR_EACH_BB_FN (bb, cfun)
6070     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6071       {
6072 	gimple *stmt = gsi_stmt (gsi);
6073 	if (!is_gimple_debug (stmt))
6074 	  walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6075       }
6076 }
6077 
6078 /* This function sets crtl->args.internal_arg_pointer to a virtual
6079    register if DRAP is needed.  Local register allocator will replace
6080    virtual_incoming_args_rtx with the virtual register.  */
6081 
6082 static void
expand_stack_alignment(void)6083 expand_stack_alignment (void)
6084 {
6085   rtx drap_rtx;
6086   unsigned int preferred_stack_boundary;
6087 
6088   if (! SUPPORTS_STACK_ALIGNMENT)
6089     return;
6090 
6091   if (cfun->calls_alloca
6092       || cfun->has_nonlocal_label
6093       || crtl->has_nonlocal_goto)
6094     crtl->need_drap = true;
6095 
6096   /* Call update_stack_boundary here again to update incoming stack
6097      boundary.  It may set incoming stack alignment to a different
6098      value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
6099      use the minimum incoming stack alignment to check if it is OK
6100      to perform sibcall optimization since sibcall optimization will
6101      only align the outgoing stack to incoming stack boundary.  */
6102   if (targetm.calls.update_stack_boundary)
6103     targetm.calls.update_stack_boundary ();
6104 
6105   /* The incoming stack frame has to be aligned at least at
6106      parm_stack_boundary.  */
6107   gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6108 
6109   /* Update crtl->stack_alignment_estimated and use it later to align
6110      stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
6111      exceptions since callgraph doesn't collect incoming stack alignment
6112      in this case.  */
6113   if (cfun->can_throw_non_call_exceptions
6114       && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6115     preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6116   else
6117     preferred_stack_boundary = crtl->preferred_stack_boundary;
6118   if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6119     crtl->stack_alignment_estimated = preferred_stack_boundary;
6120   if (preferred_stack_boundary > crtl->stack_alignment_needed)
6121     crtl->stack_alignment_needed = preferred_stack_boundary;
6122 
6123   gcc_assert (crtl->stack_alignment_needed
6124 	      <= crtl->stack_alignment_estimated);
6125 
6126   crtl->stack_realign_needed
6127     = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6128   crtl->stack_realign_tried = crtl->stack_realign_needed;
6129 
6130   crtl->stack_realign_processed = true;
6131 
6132   /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6133      alignment.  */
6134   gcc_assert (targetm.calls.get_drap_rtx != NULL);
6135   drap_rtx = targetm.calls.get_drap_rtx ();
6136 
6137   /* stack_realign_drap and drap_rtx must match.  */
6138   gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6139 
6140   /* Do nothing if NULL is returned, which means DRAP is not needed.  */
6141   if (drap_rtx != NULL)
6142     {
6143       crtl->args.internal_arg_pointer = drap_rtx;
6144 
6145       /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6146          needed. */
6147       fixup_tail_calls ();
6148     }
6149 }
6150 
6151 
6152 static void
expand_main_function(void)6153 expand_main_function (void)
6154 {
6155 #if (defined(INVOKE__main)				\
6156      || (!defined(HAS_INIT_SECTION)			\
6157 	 && !defined(INIT_SECTION_ASM_OP)		\
6158 	 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6159   emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6160 #endif
6161 }
6162 
6163 
6164 /* Expand code to initialize the stack_protect_guard.  This is invoked at
6165    the beginning of a function to be protected.  */
6166 
6167 static void
stack_protect_prologue(void)6168 stack_protect_prologue (void)
6169 {
6170   tree guard_decl = targetm.stack_protect_guard ();
6171   rtx x, y;
6172 
6173   x = expand_normal (crtl->stack_protect_guard);
6174   if (guard_decl)
6175     y = expand_normal (guard_decl);
6176   else
6177     y = const0_rtx;
6178 
6179   /* Allow the target to copy from Y to X without leaking Y into a
6180      register.  */
6181   if (targetm.have_stack_protect_set ())
6182     if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6183       {
6184 	emit_insn (insn);
6185 	return;
6186       }
6187 
6188   /* Otherwise do a straight move.  */
6189   emit_move_insn (x, y);
6190 }
6191 
6192 /* Translate the intermediate representation contained in the CFG
6193    from GIMPLE trees to RTL.
6194 
6195    We do conversion per basic block and preserve/update the tree CFG.
6196    This implies we have to do some magic as the CFG can simultaneously
6197    consist of basic blocks containing RTL and GIMPLE trees.  This can
6198    confuse the CFG hooks, so be careful to not manipulate CFG during
6199    the expansion.  */
6200 
6201 namespace {
6202 
6203 const pass_data pass_data_expand =
6204 {
6205   RTL_PASS, /* type */
6206   "expand", /* name */
6207   OPTGROUP_NONE, /* optinfo_flags */
6208   TV_EXPAND, /* tv_id */
6209   ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6210     | PROP_gimple_lcx
6211     | PROP_gimple_lvec
6212     | PROP_gimple_lva), /* properties_required */
6213   PROP_rtl, /* properties_provided */
6214   ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6215   0, /* todo_flags_start */
6216   0, /* todo_flags_finish */
6217 };
6218 
6219 class pass_expand : public rtl_opt_pass
6220 {
6221 public:
pass_expand(gcc::context * ctxt)6222   pass_expand (gcc::context *ctxt)
6223     : rtl_opt_pass (pass_data_expand, ctxt)
6224   {}
6225 
6226   /* opt_pass methods: */
6227   virtual unsigned int execute (function *);
6228 
6229 }; // class pass_expand
6230 
6231 unsigned int
execute(function * fun)6232 pass_expand::execute (function *fun)
6233 {
6234   basic_block bb, init_block;
6235   edge_iterator ei;
6236   edge e;
6237   rtx_insn *var_seq, *var_ret_seq;
6238   unsigned i;
6239 
6240   timevar_push (TV_OUT_OF_SSA);
6241   rewrite_out_of_ssa (&SA);
6242   timevar_pop (TV_OUT_OF_SSA);
6243   SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6244 
6245   if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6246     {
6247       gimple_stmt_iterator gsi;
6248       FOR_EACH_BB_FN (bb, cfun)
6249 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6250 	  if (gimple_debug_bind_p (gsi_stmt (gsi)))
6251 	    avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6252     }
6253 
6254   /* Make sure all values used by the optimization passes have sane
6255      defaults.  */
6256   reg_renumber = 0;
6257 
6258   /* Some backends want to know that we are expanding to RTL.  */
6259   currently_expanding_to_rtl = 1;
6260   /* Dominators are not kept up-to-date as we may create new basic-blocks.  */
6261   free_dominance_info (CDI_DOMINATORS);
6262 
6263   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6264 
6265   if (chkp_function_instrumented_p (current_function_decl))
6266     chkp_reset_rtl_bounds ();
6267 
6268   insn_locations_init ();
6269   if (!DECL_IS_BUILTIN (current_function_decl))
6270     {
6271       /* Eventually, all FEs should explicitly set function_start_locus.  */
6272       if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6273 	set_curr_insn_location
6274 	  (DECL_SOURCE_LOCATION (current_function_decl));
6275       else
6276 	set_curr_insn_location (fun->function_start_locus);
6277     }
6278   else
6279     set_curr_insn_location (UNKNOWN_LOCATION);
6280   prologue_location = curr_insn_location ();
6281 
6282 #ifdef INSN_SCHEDULING
6283   init_sched_attrs ();
6284 #endif
6285 
6286   /* Make sure first insn is a note even if we don't want linenums.
6287      This makes sure the first insn will never be deleted.
6288      Also, final expects a note to appear there.  */
6289   emit_note (NOTE_INSN_DELETED);
6290 
6291   /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
6292   discover_nonconstant_array_refs ();
6293 
6294   targetm.expand_to_rtl_hook ();
6295   crtl->init_stack_alignment ();
6296   fun->cfg->max_jumptable_ents = 0;
6297 
6298   /* Resovle the function section.  Some targets, like ARM EABI rely on knowledge
6299      of the function section at exapnsion time to predict distance of calls.  */
6300   resolve_unique_section (current_function_decl, 0, flag_function_sections);
6301 
6302   /* Expand the variables recorded during gimple lowering.  */
6303   timevar_push (TV_VAR_EXPAND);
6304   start_sequence ();
6305 
6306   var_ret_seq = expand_used_vars ();
6307 
6308   var_seq = get_insns ();
6309   end_sequence ();
6310   timevar_pop (TV_VAR_EXPAND);
6311 
6312   /* Honor stack protection warnings.  */
6313   if (warn_stack_protect)
6314     {
6315       if (fun->calls_alloca)
6316 	warning (OPT_Wstack_protector,
6317 		 "stack protector not protecting local variables: "
6318 		 "variable length buffer");
6319       if (has_short_buffer && !crtl->stack_protect_guard)
6320 	warning (OPT_Wstack_protector,
6321 		 "stack protector not protecting function: "
6322 		 "all local arrays are less than %d bytes long",
6323 		 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6324     }
6325 
6326   /* Set up parameters and prepare for return, for the function.  */
6327   expand_function_start (current_function_decl);
6328 
6329   /* If we emitted any instructions for setting up the variables,
6330      emit them before the FUNCTION_START note.  */
6331   if (var_seq)
6332     {
6333       emit_insn_before (var_seq, parm_birth_insn);
6334 
6335       /* In expand_function_end we'll insert the alloca save/restore
6336 	 before parm_birth_insn.  We've just insertted an alloca call.
6337 	 Adjust the pointer to match.  */
6338       parm_birth_insn = var_seq;
6339     }
6340 
6341   /* Now propagate the RTL assignment of each partition to the
6342      underlying var of each SSA_NAME.  */
6343   tree name;
6344 
6345   FOR_EACH_SSA_NAME (i, name, cfun)
6346     {
6347       /* We might have generated new SSA names in
6348 	 update_alias_info_with_stack_vars.  They will have a NULL
6349 	 defining statements, and won't be part of the partitioning,
6350 	 so ignore those.  */
6351       if (!SSA_NAME_DEF_STMT (name))
6352 	continue;
6353 
6354       adjust_one_expanded_partition_var (name);
6355     }
6356 
6357   /* Clean up RTL of variables that straddle across multiple
6358      partitions, and check that the rtl of any PARM_DECLs that are not
6359      cleaned up is that of their default defs.  */
6360   FOR_EACH_SSA_NAME (i, name, cfun)
6361     {
6362       int part;
6363 
6364       /* We might have generated new SSA names in
6365 	 update_alias_info_with_stack_vars.  They will have a NULL
6366 	 defining statements, and won't be part of the partitioning,
6367 	 so ignore those.  */
6368       if (!SSA_NAME_DEF_STMT (name))
6369 	continue;
6370       part = var_to_partition (SA.map, name);
6371       if (part == NO_PARTITION)
6372 	continue;
6373 
6374       /* If this decl was marked as living in multiple places, reset
6375 	 this now to NULL.  */
6376       tree var = SSA_NAME_VAR (name);
6377       if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6378 	SET_DECL_RTL (var, NULL);
6379       /* Check that the pseudos chosen by assign_parms are those of
6380 	 the corresponding default defs.  */
6381       else if (SSA_NAME_IS_DEFAULT_DEF (name)
6382 	       && (TREE_CODE (var) == PARM_DECL
6383 		   || TREE_CODE (var) == RESULT_DECL))
6384 	{
6385 	  rtx in = DECL_RTL_IF_SET (var);
6386 	  gcc_assert (in);
6387 	  rtx out = SA.partition_to_pseudo[part];
6388 	  gcc_assert (in == out);
6389 
6390 	  /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6391 	     those expected by debug backends for each parm and for
6392 	     the result.  This is particularly important for stabs,
6393 	     whose register elimination from parm's DECL_RTL may cause
6394 	     -fcompare-debug differences as SET_DECL_RTL changes reg's
6395 	     attrs.  So, make sure the RTL already has the parm as the
6396 	     EXPR, so that it won't change.  */
6397 	  SET_DECL_RTL (var, NULL_RTX);
6398 	  if (MEM_P (in))
6399 	    set_mem_attributes (in, var, true);
6400 	  SET_DECL_RTL (var, in);
6401 	}
6402     }
6403 
6404   /* If this function is `main', emit a call to `__main'
6405      to run global initializers, etc.  */
6406   if (DECL_NAME (current_function_decl)
6407       && MAIN_NAME_P (DECL_NAME (current_function_decl))
6408       && DECL_FILE_SCOPE_P (current_function_decl))
6409     expand_main_function ();
6410 
6411   /* Initialize the stack_protect_guard field.  This must happen after the
6412      call to __main (if any) so that the external decl is initialized.  */
6413   if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6414     stack_protect_prologue ();
6415 
6416   expand_phi_nodes (&SA);
6417 
6418   /* Release any stale SSA redirection data.  */
6419   redirect_edge_var_map_empty ();
6420 
6421   /* Register rtl specific functions for cfg.  */
6422   rtl_register_cfg_hooks ();
6423 
6424   init_block = construct_init_block ();
6425 
6426   /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
6427      remaining edges later.  */
6428   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6429     e->flags &= ~EDGE_EXECUTABLE;
6430 
6431   /* If the function has too many markers, drop them while expanding.  */
6432   if (cfun->debug_marker_count
6433       >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
6434     cfun->debug_nonbind_markers = false;
6435 
6436   lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6437   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6438 		  next_bb)
6439     bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6440 
6441   if (MAY_HAVE_DEBUG_BIND_INSNS)
6442     expand_debug_locations ();
6443 
6444   if (deep_ter_debug_map)
6445     {
6446       delete deep_ter_debug_map;
6447       deep_ter_debug_map = NULL;
6448     }
6449 
6450   /* Free stuff we no longer need after GIMPLE optimizations.  */
6451   free_dominance_info (CDI_DOMINATORS);
6452   free_dominance_info (CDI_POST_DOMINATORS);
6453   delete_tree_cfg_annotations (fun);
6454 
6455   timevar_push (TV_OUT_OF_SSA);
6456   finish_out_of_ssa (&SA);
6457   timevar_pop (TV_OUT_OF_SSA);
6458 
6459   timevar_push (TV_POST_EXPAND);
6460   /* We are no longer in SSA form.  */
6461   fun->gimple_df->in_ssa_p = false;
6462   loops_state_clear (LOOP_CLOSED_SSA);
6463 
6464   /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6465      conservatively to true until they are all profile aware.  */
6466   delete lab_rtx_for_bb;
6467   free_histograms (fun);
6468 
6469   construct_exit_block ();
6470   insn_locations_finalize ();
6471 
6472   if (var_ret_seq)
6473     {
6474       rtx_insn *after = return_label;
6475       rtx_insn *next = NEXT_INSN (after);
6476       if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6477 	after = next;
6478       emit_insn_after (var_ret_seq, after);
6479     }
6480 
6481   /* Zap the tree EH table.  */
6482   set_eh_throw_stmt_table (fun, NULL);
6483 
6484   /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6485      split edges which edge insertions might do.  */
6486   rebuild_jump_labels (get_insns ());
6487 
6488   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6489 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6490     {
6491       edge e;
6492       edge_iterator ei;
6493       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6494 	{
6495 	  if (e->insns.r)
6496 	    {
6497 	      rebuild_jump_labels_chain (e->insns.r);
6498 	      /* Put insns after parm birth, but before
6499 		 NOTE_INSNS_FUNCTION_BEG.  */
6500 	      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6501 		  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6502 		{
6503 		  rtx_insn *insns = e->insns.r;
6504 		  e->insns.r = NULL;
6505 		  if (NOTE_P (parm_birth_insn)
6506 		      && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6507 		    emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6508 		  else
6509 		    emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6510 		}
6511 	      else
6512 		commit_one_edge_insertion (e);
6513 	    }
6514 	  else
6515 	    ei_next (&ei);
6516 	}
6517     }
6518 
6519   /* We're done expanding trees to RTL.  */
6520   currently_expanding_to_rtl = 0;
6521 
6522   flush_mark_addressable_queue ();
6523 
6524   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6525 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6526     {
6527       edge e;
6528       edge_iterator ei;
6529       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6530 	{
6531 	  /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
6532 	  e->flags &= ~EDGE_EXECUTABLE;
6533 
6534 	  /* At the moment not all abnormal edges match the RTL
6535 	     representation.  It is safe to remove them here as
6536 	     find_many_sub_basic_blocks will rediscover them.
6537 	     In the future we should get this fixed properly.  */
6538 	  if ((e->flags & EDGE_ABNORMAL)
6539 	      && !(e->flags & EDGE_SIBCALL))
6540 	    remove_edge (e);
6541 	  else
6542 	    ei_next (&ei);
6543 	}
6544     }
6545 
6546   auto_sbitmap blocks (last_basic_block_for_fn (fun));
6547   bitmap_ones (blocks);
6548   find_many_sub_basic_blocks (blocks);
6549   purge_all_dead_edges ();
6550 
6551   /* After initial rtl generation, call back to finish generating
6552      exception support code.  We need to do this before cleaning up
6553      the CFG as the code does not expect dead landing pads.  */
6554   if (fun->eh->region_tree != NULL)
6555     finish_eh_generation ();
6556 
6557   /* Call expand_stack_alignment after finishing all
6558      updates to crtl->preferred_stack_boundary.  */
6559   expand_stack_alignment ();
6560 
6561   /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6562      function.  */
6563   if (crtl->tail_call_emit)
6564     fixup_tail_calls ();
6565 
6566   /* BB subdivision may have created basic blocks that are are only reachable
6567      from unlikely bbs but not marked as such in the profile.  */
6568   if (optimize)
6569     propagate_unlikely_bbs_forward ();
6570 
6571   /* Remove unreachable blocks, otherwise we cannot compute dominators
6572      which are needed for loop state verification.  As a side-effect
6573      this also compacts blocks.
6574      ???  We cannot remove trivially dead insns here as for example
6575      the DRAP reg on i?86 is not magically live at this point.
6576      gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise.  */
6577   cleanup_cfg (CLEANUP_NO_INSN_DEL);
6578 
6579   checking_verify_flow_info ();
6580 
6581   /* Initialize pseudos allocated for hard registers.  */
6582   emit_initial_value_sets ();
6583 
6584   /* And finally unshare all RTL.  */
6585   unshare_all_rtl ();
6586 
6587   /* There's no need to defer outputting this function any more; we
6588      know we want to output it.  */
6589   DECL_DEFER_OUTPUT (current_function_decl) = 0;
6590 
6591   /* Now that we're done expanding trees to RTL, we shouldn't have any
6592      more CONCATs anywhere.  */
6593   generating_concat_p = 0;
6594 
6595   if (dump_file)
6596     {
6597       fprintf (dump_file,
6598 	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6599       /* And the pass manager will dump RTL for us.  */
6600     }
6601 
6602   /* If we're emitting a nested function, make sure its parent gets
6603      emitted as well.  Doing otherwise confuses debug info.  */
6604     {
6605       tree parent;
6606       for (parent = DECL_CONTEXT (current_function_decl);
6607 	   parent != NULL_TREE;
6608 	   parent = get_containing_scope (parent))
6609 	if (TREE_CODE (parent) == FUNCTION_DECL)
6610 	  TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6611     }
6612 
6613   TREE_ASM_WRITTEN (current_function_decl) = 1;
6614 
6615   /* After expanding, the return labels are no longer needed. */
6616   return_label = NULL;
6617   naked_return_label = NULL;
6618 
6619   /* After expanding, the tm_restart map is no longer needed.  */
6620   if (fun->gimple_df->tm_restart)
6621     fun->gimple_df->tm_restart = NULL;
6622 
6623   /* Tag the blocks with a depth number so that change_scope can find
6624      the common parent easily.  */
6625   set_block_levels (DECL_INITIAL (fun->decl), 0);
6626   default_rtl_profile ();
6627 
6628   /* For -dx discard loops now, otherwise IL verify in clean_state will
6629      ICE.  */
6630   if (rtl_dump_and_exit)
6631     {
6632       cfun->curr_properties &= ~PROP_loops;
6633       loop_optimizer_finalize ();
6634     }
6635 
6636   timevar_pop (TV_POST_EXPAND);
6637 
6638   return 0;
6639 }
6640 
6641 } // anon namespace
6642 
6643 rtl_opt_pass *
make_pass_expand(gcc::context * ctxt)6644 make_pass_expand (gcc::context *ctxt)
6645 {
6646   return new pass_expand (ctxt);
6647 }
6648