1 /* A pass for lowering trees to RTL.
2    Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber.  */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "params.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
69 #include "cfgloop.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING.  */
71 #include "stringpool.h"
72 #include "attribs.h"
73 #include "asan.h"
74 #include "tree-ssa-address.h"
75 #include "output.h"
76 #include "builtins.h"
77 #include "tree-chkp.h"
78 #include "rtl-chkp.h"
79 
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81    cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82    give the same symbol without quotes for an alternative entry point.  You
83    must define both, or neither.  */
84 #ifndef NAME__MAIN
85 #define NAME__MAIN "__main"
86 #endif
87 
88 /* This variable holds information helping the rewriting of SSA trees
89    into RTL.  */
90 struct ssaexpand SA;
91 
92 /* This variable holds the currently expanded gimple statement for purposes
93    of comminucating the profile info to the builtin expanders.  */
94 gimple *currently_expanding_gimple_stmt;
95 
96 static rtx expand_debug_expr (tree);
97 
98 static bool defer_stack_allocation (tree, bool);
99 
100 static void record_alignment_for_reg_var (unsigned int);
101 
102 /* Return an expression tree corresponding to the RHS of GIMPLE
103    statement STMT.  */
104 
105 tree
gimple_assign_rhs_to_tree(gimple * stmt)106 gimple_assign_rhs_to_tree (gimple *stmt)
107 {
108   tree t;
109   enum gimple_rhs_class grhs_class;
110 
111   grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
112 
113   if (grhs_class == GIMPLE_TERNARY_RHS)
114     t = build3 (gimple_assign_rhs_code (stmt),
115 		TREE_TYPE (gimple_assign_lhs (stmt)),
116 		gimple_assign_rhs1 (stmt),
117 		gimple_assign_rhs2 (stmt),
118 		gimple_assign_rhs3 (stmt));
119   else if (grhs_class == GIMPLE_BINARY_RHS)
120     t = build2 (gimple_assign_rhs_code (stmt),
121 		TREE_TYPE (gimple_assign_lhs (stmt)),
122 		gimple_assign_rhs1 (stmt),
123 		gimple_assign_rhs2 (stmt));
124   else if (grhs_class == GIMPLE_UNARY_RHS)
125     t = build1 (gimple_assign_rhs_code (stmt),
126 		TREE_TYPE (gimple_assign_lhs (stmt)),
127 		gimple_assign_rhs1 (stmt));
128   else if (grhs_class == GIMPLE_SINGLE_RHS)
129     {
130       t = gimple_assign_rhs1 (stmt);
131       /* Avoid modifying this tree in place below.  */
132       if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
133 	   && gimple_location (stmt) != EXPR_LOCATION (t))
134 	  || (gimple_block (stmt)
135 	      && currently_expanding_to_rtl
136 	      && EXPR_P (t)))
137 	t = copy_node (t);
138     }
139   else
140     gcc_unreachable ();
141 
142   if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
143     SET_EXPR_LOCATION (t, gimple_location (stmt));
144 
145   return t;
146 }
147 
148 
149 #ifndef STACK_ALIGNMENT_NEEDED
150 #define STACK_ALIGNMENT_NEEDED 1
151 #endif
152 
153 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
154 
155 /* Choose either CUR or NEXT as the leader DECL for a partition.
156    Prefer ignored decls, to simplify debug dumps and reduce ambiguity
157    out of the same user variable being in multiple partitions (this is
158    less likely for compiler-introduced temps).  */
159 
160 static tree
leader_merge(tree cur,tree next)161 leader_merge (tree cur, tree next)
162 {
163   if (cur == NULL || cur == next)
164     return next;
165 
166   if (DECL_P (cur) && DECL_IGNORED_P (cur))
167     return cur;
168 
169   if (DECL_P (next) && DECL_IGNORED_P (next))
170     return next;
171 
172   return cur;
173 }
174 
175 /* Associate declaration T with storage space X.  If T is no
176    SSA name this is exactly SET_DECL_RTL, otherwise make the
177    partition of T associated with X.  */
178 static inline void
set_rtl(tree t,rtx x)179 set_rtl (tree t, rtx x)
180 {
181   gcc_checking_assert (!x
182 		       || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
183 		       || (use_register_for_decl (t)
184 			   ? (REG_P (x)
185 			      || (GET_CODE (x) == CONCAT
186 				  && (REG_P (XEXP (x, 0))
187 				      || SUBREG_P (XEXP (x, 0)))
188 				  && (REG_P (XEXP (x, 1))
189 				      || SUBREG_P (XEXP (x, 1))))
190 			      /* We need to accept PARALLELs for RESUT_DECLs
191 				 because of vector types with BLKmode returned
192 				 in multiple registers, but they are supposed
193 				 to be uncoalesced.  */
194 			      || (GET_CODE (x) == PARALLEL
195 				  && SSAVAR (t)
196 				  && TREE_CODE (SSAVAR (t)) == RESULT_DECL
197 				  && (GET_MODE (x) == BLKmode
198 				      || !flag_tree_coalesce_vars)))
199 			   : (MEM_P (x) || x == pc_rtx
200 			      || (GET_CODE (x) == CONCAT
201 				  && MEM_P (XEXP (x, 0))
202 				  && MEM_P (XEXP (x, 1))))));
203   /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
204      RESULT_DECLs has the expected mode.  For memory, we accept
205      unpromoted modes, since that's what we're likely to get.  For
206      PARM_DECLs and RESULT_DECLs, we'll have been called by
207      set_parm_rtl, which will give us the default def, so we don't
208      have to compute it ourselves.  For RESULT_DECLs, we accept mode
209      mismatches too, as long as we have BLKmode or are not coalescing
210      across variables, so that we don't reject BLKmode PARALLELs or
211      unpromoted REGs.  */
212   gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
213 		       || (SSAVAR (t)
214 			   && TREE_CODE (SSAVAR (t)) == RESULT_DECL
215 			   && (promote_ssa_mode (t, NULL) == BLKmode
216 			       || !flag_tree_coalesce_vars))
217 		       || !use_register_for_decl (t)
218 		       || GET_MODE (x) == promote_ssa_mode (t, NULL));
219 
220   if (x)
221     {
222       bool skip = false;
223       tree cur = NULL_TREE;
224       rtx xm = x;
225 
226     retry:
227       if (MEM_P (xm))
228 	cur = MEM_EXPR (xm);
229       else if (REG_P (xm))
230 	cur = REG_EXPR (xm);
231       else if (SUBREG_P (xm))
232 	{
233 	  gcc_assert (subreg_lowpart_p (xm));
234 	  xm = SUBREG_REG (xm);
235 	  goto retry;
236 	}
237       else if (GET_CODE (xm) == CONCAT)
238 	{
239 	  xm = XEXP (xm, 0);
240 	  goto retry;
241 	}
242       else if (GET_CODE (xm) == PARALLEL)
243 	{
244 	  xm = XVECEXP (xm, 0, 0);
245 	  gcc_assert (GET_CODE (xm) == EXPR_LIST);
246 	  xm = XEXP (xm, 0);
247 	  goto retry;
248 	}
249       else if (xm == pc_rtx)
250 	skip = true;
251       else
252 	gcc_unreachable ();
253 
254       tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
255 
256       if (cur != next)
257 	{
258 	  if (MEM_P (x))
259 	    set_mem_attributes (x,
260 				next && TREE_CODE (next) == SSA_NAME
261 				? TREE_TYPE (next)
262 				: next, true);
263 	  else
264 	    set_reg_attrs_for_decl_rtl (next, x);
265 	}
266     }
267 
268   if (TREE_CODE (t) == SSA_NAME)
269     {
270       int part = var_to_partition (SA.map, t);
271       if (part != NO_PARTITION)
272 	{
273 	  if (SA.partition_to_pseudo[part])
274 	    gcc_assert (SA.partition_to_pseudo[part] == x);
275 	  else if (x != pc_rtx)
276 	    SA.partition_to_pseudo[part] = x;
277 	}
278       /* For the benefit of debug information at -O0 (where
279          vartracking doesn't run) record the place also in the base
280          DECL.  For PARMs and RESULTs, do so only when setting the
281          default def.  */
282       if (x && x != pc_rtx && SSA_NAME_VAR (t)
283 	  && (VAR_P (SSA_NAME_VAR (t))
284 	      || SSA_NAME_IS_DEFAULT_DEF (t)))
285 	{
286 	  tree var = SSA_NAME_VAR (t);
287 	  /* If we don't yet have something recorded, just record it now.  */
288 	  if (!DECL_RTL_SET_P (var))
289 	    SET_DECL_RTL (var, x);
290 	  /* If we have it set already to "multiple places" don't
291 	     change this.  */
292 	  else if (DECL_RTL (var) == pc_rtx)
293 	    ;
294 	  /* If we have something recorded and it's not the same place
295 	     as we want to record now, we have multiple partitions for the
296 	     same base variable, with different places.  We can't just
297 	     randomly chose one, hence we have to say that we don't know.
298 	     This only happens with optimization, and there var-tracking
299 	     will figure out the right thing.  */
300 	  else if (DECL_RTL (var) != x)
301 	    SET_DECL_RTL (var, pc_rtx);
302 	}
303     }
304   else
305     SET_DECL_RTL (t, x);
306 }
307 
308 /* This structure holds data relevant to one variable that will be
309    placed in a stack slot.  */
310 struct stack_var
311 {
312   /* The Variable.  */
313   tree decl;
314 
315   /* Initially, the size of the variable.  Later, the size of the partition,
316      if this variable becomes it's partition's representative.  */
317   poly_uint64 size;
318 
319   /* The *byte* alignment required for this variable.  Or as, with the
320      size, the alignment for this partition.  */
321   unsigned int alignb;
322 
323   /* The partition representative.  */
324   size_t representative;
325 
326   /* The next stack variable in the partition, or EOC.  */
327   size_t next;
328 
329   /* The numbers of conflicting stack variables.  */
330   bitmap conflicts;
331 };
332 
333 #define EOC  ((size_t)-1)
334 
335 /* We have an array of such objects while deciding allocation.  */
336 static struct stack_var *stack_vars;
337 static size_t stack_vars_alloc;
338 static size_t stack_vars_num;
339 static hash_map<tree, size_t> *decl_to_stack_part;
340 
341 /* Conflict bitmaps go on this obstack.  This allows us to destroy
342    all of them in one big sweep.  */
343 static bitmap_obstack stack_var_bitmap_obstack;
344 
345 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
346    is non-decreasing.  */
347 static size_t *stack_vars_sorted;
348 
349 /* The phase of the stack frame.  This is the known misalignment of
350    virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
351    (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
352 static int frame_phase;
353 
354 /* Used during expand_used_vars to remember if we saw any decls for
355    which we'd like to enable stack smashing protection.  */
356 static bool has_protected_decls;
357 
358 /* Used during expand_used_vars.  Remember if we say a character buffer
359    smaller than our cutoff threshold.  Used for -Wstack-protector.  */
360 static bool has_short_buffer;
361 
362 /* Compute the byte alignment to use for DECL.  Ignore alignment
363    we can't do with expected alignment of the stack boundary.  */
364 
365 static unsigned int
align_local_variable(tree decl)366 align_local_variable (tree decl)
367 {
368   unsigned int align;
369 
370   if (TREE_CODE (decl) == SSA_NAME)
371     align = TYPE_ALIGN (TREE_TYPE (decl));
372   else
373     {
374       align = LOCAL_DECL_ALIGNMENT (decl);
375       SET_DECL_ALIGN (decl, align);
376     }
377   return align / BITS_PER_UNIT;
378 }
379 
380 /* Align given offset BASE with ALIGN.  Truncate up if ALIGN_UP is true,
381    down otherwise.  Return truncated BASE value.  */
382 
383 static inline unsigned HOST_WIDE_INT
align_base(HOST_WIDE_INT base,unsigned HOST_WIDE_INT align,bool align_up)384 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
385 {
386   return align_up ? (base + align - 1) & -align : base & -align;
387 }
388 
389 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
390    Return the frame offset.  */
391 
392 static poly_int64
alloc_stack_frame_space(poly_int64 size,unsigned HOST_WIDE_INT align)393 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
394 {
395   poly_int64 offset, new_frame_offset;
396 
397   if (FRAME_GROWS_DOWNWARD)
398     {
399       new_frame_offset
400 	= aligned_lower_bound (frame_offset - frame_phase - size,
401 			       align) + frame_phase;
402       offset = new_frame_offset;
403     }
404   else
405     {
406       new_frame_offset
407 	= aligned_upper_bound (frame_offset - frame_phase,
408 			       align) + frame_phase;
409       offset = new_frame_offset;
410       new_frame_offset += size;
411     }
412   frame_offset = new_frame_offset;
413 
414   if (frame_offset_overflow (frame_offset, cfun->decl))
415     frame_offset = offset = 0;
416 
417   return offset;
418 }
419 
420 /* Accumulate DECL into STACK_VARS.  */
421 
422 static void
add_stack_var(tree decl)423 add_stack_var (tree decl)
424 {
425   struct stack_var *v;
426 
427   if (stack_vars_num >= stack_vars_alloc)
428     {
429       if (stack_vars_alloc)
430 	stack_vars_alloc = stack_vars_alloc * 3 / 2;
431       else
432 	stack_vars_alloc = 32;
433       stack_vars
434 	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
435     }
436   if (!decl_to_stack_part)
437     decl_to_stack_part = new hash_map<tree, size_t>;
438 
439   v = &stack_vars[stack_vars_num];
440   decl_to_stack_part->put (decl, stack_vars_num);
441 
442   v->decl = decl;
443   tree size = TREE_CODE (decl) == SSA_NAME
444     ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
445     : DECL_SIZE_UNIT (decl);
446   v->size = tree_to_poly_uint64 (size);
447   /* Ensure that all variables have size, so that &a != &b for any two
448      variables that are simultaneously live.  */
449   if (known_eq (v->size, 0U))
450     v->size = 1;
451   v->alignb = align_local_variable (decl);
452   /* An alignment of zero can mightily confuse us later.  */
453   gcc_assert (v->alignb != 0);
454 
455   /* All variables are initially in their own partition.  */
456   v->representative = stack_vars_num;
457   v->next = EOC;
458 
459   /* All variables initially conflict with no other.  */
460   v->conflicts = NULL;
461 
462   /* Ensure that this decl doesn't get put onto the list twice.  */
463   set_rtl (decl, pc_rtx);
464 
465   stack_vars_num++;
466 }
467 
468 /* Make the decls associated with luid's X and Y conflict.  */
469 
470 static void
add_stack_var_conflict(size_t x,size_t y)471 add_stack_var_conflict (size_t x, size_t y)
472 {
473   struct stack_var *a = &stack_vars[x];
474   struct stack_var *b = &stack_vars[y];
475   if (!a->conflicts)
476     a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
477   if (!b->conflicts)
478     b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
479   bitmap_set_bit (a->conflicts, y);
480   bitmap_set_bit (b->conflicts, x);
481 }
482 
483 /* Check whether the decls associated with luid's X and Y conflict.  */
484 
485 static bool
stack_var_conflict_p(size_t x,size_t y)486 stack_var_conflict_p (size_t x, size_t y)
487 {
488   struct stack_var *a = &stack_vars[x];
489   struct stack_var *b = &stack_vars[y];
490   if (x == y)
491     return false;
492   /* Partitions containing an SSA name result from gimple registers
493      with things like unsupported modes.  They are top-level and
494      hence conflict with everything else.  */
495   if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
496     return true;
497 
498   if (!a->conflicts || !b->conflicts)
499     return false;
500   return bitmap_bit_p (a->conflicts, y);
501 }
502 
503 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
504    enter its partition number into bitmap DATA.  */
505 
506 static bool
visit_op(gimple *,tree op,tree,void * data)507 visit_op (gimple *, tree op, tree, void *data)
508 {
509   bitmap active = (bitmap)data;
510   op = get_base_address (op);
511   if (op
512       && DECL_P (op)
513       && DECL_RTL_IF_SET (op) == pc_rtx)
514     {
515       size_t *v = decl_to_stack_part->get (op);
516       if (v)
517 	bitmap_set_bit (active, *v);
518     }
519   return false;
520 }
521 
522 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
523    record conflicts between it and all currently active other partitions
524    from bitmap DATA.  */
525 
526 static bool
visit_conflict(gimple *,tree op,tree,void * data)527 visit_conflict (gimple *, tree op, tree, void *data)
528 {
529   bitmap active = (bitmap)data;
530   op = get_base_address (op);
531   if (op
532       && DECL_P (op)
533       && DECL_RTL_IF_SET (op) == pc_rtx)
534     {
535       size_t *v = decl_to_stack_part->get (op);
536       if (v && bitmap_set_bit (active, *v))
537 	{
538 	  size_t num = *v;
539 	  bitmap_iterator bi;
540 	  unsigned i;
541 	  gcc_assert (num < stack_vars_num);
542 	  EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
543 	    add_stack_var_conflict (num, i);
544 	}
545     }
546   return false;
547 }
548 
549 /* Helper routine for add_scope_conflicts, calculating the active partitions
550    at the end of BB, leaving the result in WORK.  We're called to generate
551    conflicts when FOR_CONFLICT is true, otherwise we're just tracking
552    liveness.  */
553 
554 static void
add_scope_conflicts_1(basic_block bb,bitmap work,bool for_conflict)555 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
556 {
557   edge e;
558   edge_iterator ei;
559   gimple_stmt_iterator gsi;
560   walk_stmt_load_store_addr_fn visit;
561 
562   bitmap_clear (work);
563   FOR_EACH_EDGE (e, ei, bb->preds)
564     bitmap_ior_into (work, (bitmap)e->src->aux);
565 
566   visit = visit_op;
567 
568   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
569     {
570       gimple *stmt = gsi_stmt (gsi);
571       walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
572     }
573   for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
574     {
575       gimple *stmt = gsi_stmt (gsi);
576 
577       if (gimple_clobber_p (stmt))
578 	{
579 	  tree lhs = gimple_assign_lhs (stmt);
580 	  size_t *v;
581 	  /* Nested function lowering might introduce LHSs
582 	     that are COMPONENT_REFs.  */
583 	  if (!VAR_P (lhs))
584 	    continue;
585 	  if (DECL_RTL_IF_SET (lhs) == pc_rtx
586 	      && (v = decl_to_stack_part->get (lhs)))
587 	    bitmap_clear_bit (work, *v);
588 	}
589       else if (!is_gimple_debug (stmt))
590 	{
591 	  if (for_conflict
592 	      && visit == visit_op)
593 	    {
594 	      /* If this is the first real instruction in this BB we need
595 	         to add conflicts for everything live at this point now.
596 		 Unlike classical liveness for named objects we can't
597 		 rely on seeing a def/use of the names we're interested in.
598 		 There might merely be indirect loads/stores.  We'd not add any
599 		 conflicts for such partitions.  */
600 	      bitmap_iterator bi;
601 	      unsigned i;
602 	      EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
603 		{
604 		  struct stack_var *a = &stack_vars[i];
605 		  if (!a->conflicts)
606 		    a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
607 		  bitmap_ior_into (a->conflicts, work);
608 		}
609 	      visit = visit_conflict;
610 	    }
611 	  walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
612 	}
613     }
614 }
615 
616 /* Generate stack partition conflicts between all partitions that are
617    simultaneously live.  */
618 
619 static void
add_scope_conflicts(void)620 add_scope_conflicts (void)
621 {
622   basic_block bb;
623   bool changed;
624   bitmap work = BITMAP_ALLOC (NULL);
625   int *rpo;
626   int n_bbs;
627 
628   /* We approximate the live range of a stack variable by taking the first
629      mention of its name as starting point(s), and by the end-of-scope
630      death clobber added by gimplify as ending point(s) of the range.
631      This overapproximates in the case we for instance moved an address-taken
632      operation upward, without also moving a dereference to it upwards.
633      But it's conservatively correct as a variable never can hold values
634      before its name is mentioned at least once.
635 
636      We then do a mostly classical bitmap liveness algorithm.  */
637 
638   FOR_ALL_BB_FN (bb, cfun)
639     bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
640 
641   rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
642   n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
643 
644   changed = true;
645   while (changed)
646     {
647       int i;
648       changed = false;
649       for (i = 0; i < n_bbs; i++)
650 	{
651 	  bitmap active;
652 	  bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
653 	  active = (bitmap)bb->aux;
654 	  add_scope_conflicts_1 (bb, work, false);
655 	  if (bitmap_ior_into (active, work))
656 	    changed = true;
657 	}
658     }
659 
660   FOR_EACH_BB_FN (bb, cfun)
661     add_scope_conflicts_1 (bb, work, true);
662 
663   free (rpo);
664   BITMAP_FREE (work);
665   FOR_ALL_BB_FN (bb, cfun)
666     BITMAP_FREE (bb->aux);
667 }
668 
669 /* A subroutine of partition_stack_vars.  A comparison function for qsort,
670    sorting an array of indices by the properties of the object.  */
671 
672 static int
stack_var_cmp(const void * a,const void * b)673 stack_var_cmp (const void *a, const void *b)
674 {
675   size_t ia = *(const size_t *)a;
676   size_t ib = *(const size_t *)b;
677   unsigned int aligna = stack_vars[ia].alignb;
678   unsigned int alignb = stack_vars[ib].alignb;
679   poly_int64 sizea = stack_vars[ia].size;
680   poly_int64 sizeb = stack_vars[ib].size;
681   tree decla = stack_vars[ia].decl;
682   tree declb = stack_vars[ib].decl;
683   bool largea, largeb;
684   unsigned int uida, uidb;
685 
686   /* Primary compare on "large" alignment.  Large comes first.  */
687   largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
688   largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
689   if (largea != largeb)
690     return (int)largeb - (int)largea;
691 
692   /* Secondary compare on size, decreasing  */
693   int diff = compare_sizes_for_sort (sizeb, sizea);
694   if (diff != 0)
695     return diff;
696 
697   /* Tertiary compare on true alignment, decreasing.  */
698   if (aligna < alignb)
699     return -1;
700   if (aligna > alignb)
701     return 1;
702 
703   /* Final compare on ID for sort stability, increasing.
704      Two SSA names are compared by their version, SSA names come before
705      non-SSA names, and two normal decls are compared by their DECL_UID.  */
706   if (TREE_CODE (decla) == SSA_NAME)
707     {
708       if (TREE_CODE (declb) == SSA_NAME)
709 	uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
710       else
711 	return -1;
712     }
713   else if (TREE_CODE (declb) == SSA_NAME)
714     return 1;
715   else
716     uida = DECL_UID (decla), uidb = DECL_UID (declb);
717   if (uida < uidb)
718     return 1;
719   if (uida > uidb)
720     return -1;
721   return 0;
722 }
723 
724 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
725 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
726 
727 /* If the points-to solution *PI points to variables that are in a partition
728    together with other variables add all partition members to the pointed-to
729    variables bitmap.  */
730 
731 static void
add_partitioned_vars_to_ptset(struct pt_solution * pt,part_hashmap * decls_to_partitions,hash_set<bitmap> * visited,bitmap temp)732 add_partitioned_vars_to_ptset (struct pt_solution *pt,
733 			       part_hashmap *decls_to_partitions,
734 			       hash_set<bitmap> *visited, bitmap temp)
735 {
736   bitmap_iterator bi;
737   unsigned i;
738   bitmap *part;
739 
740   if (pt->anything
741       || pt->vars == NULL
742       /* The pointed-to vars bitmap is shared, it is enough to
743 	 visit it once.  */
744       || visited->add (pt->vars))
745     return;
746 
747   bitmap_clear (temp);
748 
749   /* By using a temporary bitmap to store all members of the partitions
750      we have to add we make sure to visit each of the partitions only
751      once.  */
752   EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
753     if ((!temp
754 	 || !bitmap_bit_p (temp, i))
755 	&& (part = decls_to_partitions->get (i)))
756       bitmap_ior_into (temp, *part);
757   if (!bitmap_empty_p (temp))
758     bitmap_ior_into (pt->vars, temp);
759 }
760 
761 /* Update points-to sets based on partition info, so we can use them on RTL.
762    The bitmaps representing stack partitions will be saved until expand,
763    where partitioned decls used as bases in memory expressions will be
764    rewritten.  */
765 
766 static void
update_alias_info_with_stack_vars(void)767 update_alias_info_with_stack_vars (void)
768 {
769   part_hashmap *decls_to_partitions = NULL;
770   size_t i, j;
771   tree var = NULL_TREE;
772 
773   for (i = 0; i < stack_vars_num; i++)
774     {
775       bitmap part = NULL;
776       tree name;
777       struct ptr_info_def *pi;
778 
779       /* Not interested in partitions with single variable.  */
780       if (stack_vars[i].representative != i
781           || stack_vars[i].next == EOC)
782         continue;
783 
784       if (!decls_to_partitions)
785 	{
786 	  decls_to_partitions = new part_hashmap;
787 	  cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
788 	}
789 
790       /* Create an SSA_NAME that points to the partition for use
791          as base during alias-oracle queries on RTL for bases that
792 	 have been partitioned.  */
793       if (var == NULL_TREE)
794 	var = create_tmp_var (ptr_type_node);
795       name = make_ssa_name (var);
796 
797       /* Create bitmaps representing partitions.  They will be used for
798          points-to sets later, so use GGC alloc.  */
799       part = BITMAP_GGC_ALLOC ();
800       for (j = i; j != EOC; j = stack_vars[j].next)
801 	{
802 	  tree decl = stack_vars[j].decl;
803 	  unsigned int uid = DECL_PT_UID (decl);
804 	  bitmap_set_bit (part, uid);
805 	  decls_to_partitions->put (uid, part);
806 	  cfun->gimple_df->decls_to_pointers->put (decl, name);
807 	  if (TREE_ADDRESSABLE (decl))
808 	    TREE_ADDRESSABLE (name) = 1;
809 	}
810 
811       /* Make the SSA name point to all partition members.  */
812       pi = get_ptr_info (name);
813       pt_solution_set (&pi->pt, part, false);
814     }
815 
816   /* Make all points-to sets that contain one member of a partition
817      contain all members of the partition.  */
818   if (decls_to_partitions)
819     {
820       unsigned i;
821       tree name;
822       hash_set<bitmap> visited;
823       bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
824 
825       FOR_EACH_SSA_NAME (i, name, cfun)
826 	{
827 	  struct ptr_info_def *pi;
828 
829 	  if (POINTER_TYPE_P (TREE_TYPE (name))
830 	      && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
831 	    add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
832 					   &visited, temp);
833 	}
834 
835       add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
836 				     decls_to_partitions, &visited, temp);
837 
838       delete decls_to_partitions;
839       BITMAP_FREE (temp);
840     }
841 }
842 
843 /* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
844    partitioning algorithm.  Partitions A and B are known to be non-conflicting.
845    Merge them into a single partition A.  */
846 
847 static void
union_stack_vars(size_t a,size_t b)848 union_stack_vars (size_t a, size_t b)
849 {
850   struct stack_var *vb = &stack_vars[b];
851   bitmap_iterator bi;
852   unsigned u;
853 
854   gcc_assert (stack_vars[b].next == EOC);
855    /* Add B to A's partition.  */
856   stack_vars[b].next = stack_vars[a].next;
857   stack_vars[b].representative = a;
858   stack_vars[a].next = b;
859 
860   /* Update the required alignment of partition A to account for B.  */
861   if (stack_vars[a].alignb < stack_vars[b].alignb)
862     stack_vars[a].alignb = stack_vars[b].alignb;
863 
864   /* Update the interference graph and merge the conflicts.  */
865   if (vb->conflicts)
866     {
867       EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
868 	add_stack_var_conflict (a, stack_vars[u].representative);
869       BITMAP_FREE (vb->conflicts);
870     }
871 }
872 
873 /* A subroutine of expand_used_vars.  Binpack the variables into
874    partitions constrained by the interference graph.  The overall
875    algorithm used is as follows:
876 
877 	Sort the objects by size in descending order.
878 	For each object A {
879 	  S = size(A)
880 	  O = 0
881 	  loop {
882 	    Look for the largest non-conflicting object B with size <= S.
883 	    UNION (A, B)
884 	  }
885 	}
886 */
887 
888 static void
partition_stack_vars(void)889 partition_stack_vars (void)
890 {
891   size_t si, sj, n = stack_vars_num;
892 
893   stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
894   for (si = 0; si < n; ++si)
895     stack_vars_sorted[si] = si;
896 
897   if (n == 1)
898     return;
899 
900   qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
901 
902   for (si = 0; si < n; ++si)
903     {
904       size_t i = stack_vars_sorted[si];
905       unsigned int ialign = stack_vars[i].alignb;
906       poly_int64 isize = stack_vars[i].size;
907 
908       /* Ignore objects that aren't partition representatives. If we
909          see a var that is not a partition representative, it must
910          have been merged earlier.  */
911       if (stack_vars[i].representative != i)
912         continue;
913 
914       for (sj = si + 1; sj < n; ++sj)
915 	{
916 	  size_t j = stack_vars_sorted[sj];
917 	  unsigned int jalign = stack_vars[j].alignb;
918 	  poly_int64 jsize = stack_vars[j].size;
919 
920 	  /* Ignore objects that aren't partition representatives.  */
921 	  if (stack_vars[j].representative != j)
922 	    continue;
923 
924 	  /* Do not mix objects of "small" (supported) alignment
925 	     and "large" (unsupported) alignment.  */
926 	  if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
927 	      != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
928 	    break;
929 
930 	  /* For Address Sanitizer do not mix objects with different
931 	     sizes, as the shorter vars wouldn't be adequately protected.
932 	     Don't do that for "large" (unsupported) alignment objects,
933 	     those aren't protected anyway.  */
934 	  if (asan_sanitize_stack_p ()
935 	      && maybe_ne (isize, jsize)
936 	      && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 	    break;
938 
939 	  /* Ignore conflicting objects.  */
940 	  if (stack_var_conflict_p (i, j))
941 	    continue;
942 
943 	  /* UNION the objects, placing J at OFFSET.  */
944 	  union_stack_vars (i, j);
945 	}
946     }
947 
948   update_alias_info_with_stack_vars ();
949 }
950 
951 /* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
952 
953 static void
dump_stack_var_partition(void)954 dump_stack_var_partition (void)
955 {
956   size_t si, i, j, n = stack_vars_num;
957 
958   for (si = 0; si < n; ++si)
959     {
960       i = stack_vars_sorted[si];
961 
962       /* Skip variables that aren't partition representatives, for now.  */
963       if (stack_vars[i].representative != i)
964 	continue;
965 
966       fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
967       print_dec (stack_vars[i].size, dump_file);
968       fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
969 
970       for (j = i; j != EOC; j = stack_vars[j].next)
971 	{
972 	  fputc ('\t', dump_file);
973 	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
974 	}
975       fputc ('\n', dump_file);
976     }
977 }
978 
979 /* Assign rtl to DECL at BASE + OFFSET.  */
980 
981 static void
expand_one_stack_var_at(tree decl,rtx base,unsigned base_align,poly_int64 offset)982 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
983 			 poly_int64 offset)
984 {
985   unsigned align;
986   rtx x;
987 
988   /* If this fails, we've overflowed the stack frame.  Error nicely?  */
989   gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
990 
991   x = plus_constant (Pmode, base, offset);
992   x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
993 		   ? TYPE_MODE (TREE_TYPE (decl))
994 		   : DECL_MODE (SSAVAR (decl)), x);
995 
996   if (TREE_CODE (decl) != SSA_NAME)
997     {
998       /* Set alignment we actually gave this decl if it isn't an SSA name.
999          If it is we generate stack slots only accidentally so it isn't as
1000 	 important, we'll simply use the alignment that is already set.  */
1001       if (base == virtual_stack_vars_rtx)
1002 	offset -= frame_phase;
1003       align = known_alignment (offset);
1004       align *= BITS_PER_UNIT;
1005       if (align == 0 || align > base_align)
1006 	align = base_align;
1007 
1008       /* One would think that we could assert that we're not decreasing
1009 	 alignment here, but (at least) the i386 port does exactly this
1010 	 via the MINIMUM_ALIGNMENT hook.  */
1011 
1012       SET_DECL_ALIGN (decl, align);
1013       DECL_USER_ALIGN (decl) = 0;
1014     }
1015 
1016   set_rtl (decl, x);
1017 }
1018 
1019 struct stack_vars_data
1020 {
1021   /* Vector of offset pairs, always end of some padding followed
1022      by start of the padding that needs Address Sanitizer protection.
1023      The vector is in reversed, highest offset pairs come first.  */
1024   auto_vec<HOST_WIDE_INT> asan_vec;
1025 
1026   /* Vector of partition representative decls in between the paddings.  */
1027   auto_vec<tree> asan_decl_vec;
1028 
1029   /* Base pseudo register for Address Sanitizer protected automatic vars.  */
1030   rtx asan_base;
1031 
1032   /* Alignment needed for the Address Sanitizer protected automatic vars.  */
1033   unsigned int asan_alignb;
1034 };
1035 
1036 /* A subroutine of expand_used_vars.  Give each partition representative
1037    a unique location within the stack frame.  Update each partition member
1038    with that location.  */
1039 
1040 static void
expand_stack_vars(bool (* pred)(size_t),struct stack_vars_data * data)1041 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1042 {
1043   size_t si, i, j, n = stack_vars_num;
1044   poly_uint64 large_size = 0, large_alloc = 0;
1045   rtx large_base = NULL;
1046   unsigned large_align = 0;
1047   bool large_allocation_done = false;
1048   tree decl;
1049 
1050   /* Determine if there are any variables requiring "large" alignment.
1051      Since these are dynamically allocated, we only process these if
1052      no predicate involved.  */
1053   large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1054   if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1055     {
1056       /* Find the total size of these variables.  */
1057       for (si = 0; si < n; ++si)
1058 	{
1059 	  unsigned alignb;
1060 
1061 	  i = stack_vars_sorted[si];
1062 	  alignb = stack_vars[i].alignb;
1063 
1064 	  /* All "large" alignment decls come before all "small" alignment
1065 	     decls, but "large" alignment decls are not sorted based on
1066 	     their alignment.  Increase large_align to track the largest
1067 	     required alignment.  */
1068 	  if ((alignb * BITS_PER_UNIT) > large_align)
1069 	    large_align = alignb * BITS_PER_UNIT;
1070 
1071 	  /* Stop when we get to the first decl with "small" alignment.  */
1072 	  if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1073 	    break;
1074 
1075 	  /* Skip variables that aren't partition representatives.  */
1076 	  if (stack_vars[i].representative != i)
1077 	    continue;
1078 
1079 	  /* Skip variables that have already had rtl assigned.  See also
1080 	     add_stack_var where we perpetrate this pc_rtx hack.  */
1081 	  decl = stack_vars[i].decl;
1082 	  if (TREE_CODE (decl) == SSA_NAME
1083 	      ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1084 	      : DECL_RTL (decl) != pc_rtx)
1085 	    continue;
1086 
1087 	  large_size = aligned_upper_bound (large_size, alignb);
1088 	  large_size += stack_vars[i].size;
1089 	}
1090     }
1091 
1092   for (si = 0; si < n; ++si)
1093     {
1094       rtx base;
1095       unsigned base_align, alignb;
1096       poly_int64 offset;
1097 
1098       i = stack_vars_sorted[si];
1099 
1100       /* Skip variables that aren't partition representatives, for now.  */
1101       if (stack_vars[i].representative != i)
1102 	continue;
1103 
1104       /* Skip variables that have already had rtl assigned.  See also
1105 	 add_stack_var where we perpetrate this pc_rtx hack.  */
1106       decl = stack_vars[i].decl;
1107       if (TREE_CODE (decl) == SSA_NAME
1108 	  ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1109 	  : DECL_RTL (decl) != pc_rtx)
1110 	continue;
1111 
1112       /* Check the predicate to see whether this variable should be
1113 	 allocated in this pass.  */
1114       if (pred && !pred (i))
1115 	continue;
1116 
1117       alignb = stack_vars[i].alignb;
1118       if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1119 	{
1120 	  base = virtual_stack_vars_rtx;
1121 	  /* ASAN description strings don't yet have a syntax for expressing
1122 	     polynomial offsets.  */
1123 	  HOST_WIDE_INT prev_offset;
1124 	  if (asan_sanitize_stack_p ()
1125 	      && pred
1126 	      && frame_offset.is_constant (&prev_offset)
1127 	      && stack_vars[i].size.is_constant ())
1128 	    {
1129 	      prev_offset = align_base (prev_offset,
1130 					MAX (alignb, ASAN_RED_ZONE_SIZE),
1131 					!FRAME_GROWS_DOWNWARD);
1132 	      tree repr_decl = NULL_TREE;
1133 	      offset
1134 		= alloc_stack_frame_space (stack_vars[i].size
1135 					   + ASAN_RED_ZONE_SIZE,
1136 					   MAX (alignb, ASAN_RED_ZONE_SIZE));
1137 
1138 	      data->asan_vec.safe_push (prev_offset);
1139 	      /* Allocating a constant amount of space from a constant
1140 		 starting offset must give a constant result.  */
1141 	      data->asan_vec.safe_push ((offset + stack_vars[i].size)
1142 					.to_constant ());
1143 	      /* Find best representative of the partition.
1144 		 Prefer those with DECL_NAME, even better
1145 		 satisfying asan_protect_stack_decl predicate.  */
1146 	      for (j = i; j != EOC; j = stack_vars[j].next)
1147 		if (asan_protect_stack_decl (stack_vars[j].decl)
1148 		    && DECL_NAME (stack_vars[j].decl))
1149 		  {
1150 		    repr_decl = stack_vars[j].decl;
1151 		    break;
1152 		  }
1153 		else if (repr_decl == NULL_TREE
1154 			 && DECL_P (stack_vars[j].decl)
1155 			 && DECL_NAME (stack_vars[j].decl))
1156 		  repr_decl = stack_vars[j].decl;
1157 	      if (repr_decl == NULL_TREE)
1158 		repr_decl = stack_vars[i].decl;
1159 	      data->asan_decl_vec.safe_push (repr_decl);
1160 
1161 	      /* Make sure a representative is unpoison if another
1162 		 variable in the partition is handled by
1163 		 use-after-scope sanitization.  */
1164 	      if (asan_handled_variables != NULL
1165 		  && !asan_handled_variables->contains (repr_decl))
1166 		{
1167 		  for (j = i; j != EOC; j = stack_vars[j].next)
1168 		    if (asan_handled_variables->contains (stack_vars[j].decl))
1169 		      break;
1170 		  if (j != EOC)
1171 		    asan_handled_variables->add (repr_decl);
1172 		}
1173 
1174 	      data->asan_alignb = MAX (data->asan_alignb, alignb);
1175 	      if (data->asan_base == NULL)
1176 		data->asan_base = gen_reg_rtx (Pmode);
1177 	      base = data->asan_base;
1178 
1179 	      if (!STRICT_ALIGNMENT)
1180 		base_align = crtl->max_used_stack_slot_alignment;
1181 	      else
1182 		base_align = MAX (crtl->max_used_stack_slot_alignment,
1183 				  GET_MODE_ALIGNMENT (SImode)
1184 				  << ASAN_SHADOW_SHIFT);
1185 	    }
1186 	  else
1187 	    {
1188 	      offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1189 	      base_align = crtl->max_used_stack_slot_alignment;
1190 	    }
1191 	}
1192       else
1193 	{
1194 	  /* Large alignment is only processed in the last pass.  */
1195 	  if (pred)
1196 	    continue;
1197 
1198 	  /* If there were any variables requiring "large" alignment, allocate
1199 	     space.  */
1200 	  if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1201 	    {
1202 	      poly_int64 loffset;
1203 	      rtx large_allocsize;
1204 
1205 	      large_allocsize = gen_int_mode (large_size, Pmode);
1206 	      get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1207 	      loffset = alloc_stack_frame_space
1208 		(rtx_to_poly_int64 (large_allocsize),
1209 		 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1210 	      large_base = get_dynamic_stack_base (loffset, large_align);
1211 	      large_allocation_done = true;
1212 	    }
1213 	  gcc_assert (large_base != NULL);
1214 
1215 	  large_alloc = aligned_upper_bound (large_alloc, alignb);
1216 	  offset = large_alloc;
1217 	  large_alloc += stack_vars[i].size;
1218 
1219 	  base = large_base;
1220 	  base_align = large_align;
1221 	}
1222 
1223       /* Create rtl for each variable based on their location within the
1224 	 partition.  */
1225       for (j = i; j != EOC; j = stack_vars[j].next)
1226 	{
1227 	  expand_one_stack_var_at (stack_vars[j].decl,
1228 				   base, base_align,
1229 				   offset);
1230 	}
1231     }
1232 
1233   gcc_assert (known_eq (large_alloc, large_size));
1234 }
1235 
1236 /* Take into account all sizes of partitions and reset DECL_RTLs.  */
1237 static poly_uint64
account_stack_vars(void)1238 account_stack_vars (void)
1239 {
1240   size_t si, j, i, n = stack_vars_num;
1241   poly_uint64 size = 0;
1242 
1243   for (si = 0; si < n; ++si)
1244     {
1245       i = stack_vars_sorted[si];
1246 
1247       /* Skip variables that aren't partition representatives, for now.  */
1248       if (stack_vars[i].representative != i)
1249 	continue;
1250 
1251       size += stack_vars[i].size;
1252       for (j = i; j != EOC; j = stack_vars[j].next)
1253 	set_rtl (stack_vars[j].decl, NULL);
1254     }
1255   return size;
1256 }
1257 
1258 /* Record the RTL assignment X for the default def of PARM.  */
1259 
1260 extern void
set_parm_rtl(tree parm,rtx x)1261 set_parm_rtl (tree parm, rtx x)
1262 {
1263   gcc_assert (TREE_CODE (parm) == PARM_DECL
1264 	      || TREE_CODE (parm) == RESULT_DECL);
1265 
1266   if (x && !MEM_P (x))
1267     {
1268       unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1269 					      TYPE_MODE (TREE_TYPE (parm)),
1270 					      TYPE_ALIGN (TREE_TYPE (parm)));
1271 
1272       /* If the variable alignment is very large we'll dynamicaly
1273 	 allocate it, which means that in-frame portion is just a
1274 	 pointer.  ??? We've got a pseudo for sure here, do we
1275 	 actually dynamically allocate its spilling area if needed?
1276 	 ??? Isn't it a problem when Pmode alignment also exceeds
1277 	 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32?  */
1278       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1279 	align = GET_MODE_ALIGNMENT (Pmode);
1280 
1281       record_alignment_for_reg_var (align);
1282     }
1283 
1284   tree ssa = ssa_default_def (cfun, parm);
1285   if (!ssa)
1286     return set_rtl (parm, x);
1287 
1288   int part = var_to_partition (SA.map, ssa);
1289   gcc_assert (part != NO_PARTITION);
1290 
1291   bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1292   gcc_assert (changed);
1293 
1294   set_rtl (ssa, x);
1295   gcc_assert (DECL_RTL (parm) == x);
1296 }
1297 
1298 /* A subroutine of expand_one_var.  Called to immediately assign rtl
1299    to a variable to be allocated in the stack frame.  */
1300 
1301 static void
expand_one_stack_var_1(tree var)1302 expand_one_stack_var_1 (tree var)
1303 {
1304   poly_uint64 size;
1305   poly_int64 offset;
1306   unsigned byte_align;
1307 
1308   if (TREE_CODE (var) == SSA_NAME)
1309     {
1310       tree type = TREE_TYPE (var);
1311       size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1312       byte_align = TYPE_ALIGN_UNIT (type);
1313     }
1314   else
1315     {
1316       size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1317       byte_align = align_local_variable (var);
1318     }
1319 
1320   /* We handle highly aligned variables in expand_stack_vars.  */
1321   gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1322 
1323   offset = alloc_stack_frame_space (size, byte_align);
1324 
1325   expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1326 			   crtl->max_used_stack_slot_alignment, offset);
1327 }
1328 
1329 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1330    already assigned some MEM.  */
1331 
1332 static void
expand_one_stack_var(tree var)1333 expand_one_stack_var (tree var)
1334 {
1335   if (TREE_CODE (var) == SSA_NAME)
1336     {
1337       int part = var_to_partition (SA.map, var);
1338       if (part != NO_PARTITION)
1339 	{
1340 	  rtx x = SA.partition_to_pseudo[part];
1341 	  gcc_assert (x);
1342 	  gcc_assert (MEM_P (x));
1343 	  return;
1344 	}
1345     }
1346 
1347   return expand_one_stack_var_1 (var);
1348 }
1349 
1350 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1351    that will reside in a hard register.  */
1352 
1353 static void
expand_one_hard_reg_var(tree var)1354 expand_one_hard_reg_var (tree var)
1355 {
1356   rest_of_decl_compilation (var, 0, 0);
1357 }
1358 
1359 /* Record the alignment requirements of some variable assigned to a
1360    pseudo.  */
1361 
1362 static void
record_alignment_for_reg_var(unsigned int align)1363 record_alignment_for_reg_var (unsigned int align)
1364 {
1365   if (SUPPORTS_STACK_ALIGNMENT
1366       && crtl->stack_alignment_estimated < align)
1367     {
1368       /* stack_alignment_estimated shouldn't change after stack
1369          realign decision made */
1370       gcc_assert (!crtl->stack_realign_processed);
1371       crtl->stack_alignment_estimated = align;
1372     }
1373 
1374   /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1375      So here we only make sure stack_alignment_needed >= align.  */
1376   if (crtl->stack_alignment_needed < align)
1377     crtl->stack_alignment_needed = align;
1378   if (crtl->max_used_stack_slot_alignment < align)
1379     crtl->max_used_stack_slot_alignment = align;
1380 }
1381 
1382 /* Create RTL for an SSA partition.  */
1383 
1384 static void
expand_one_ssa_partition(tree var)1385 expand_one_ssa_partition (tree var)
1386 {
1387   int part = var_to_partition (SA.map, var);
1388   gcc_assert (part != NO_PARTITION);
1389 
1390   if (SA.partition_to_pseudo[part])
1391     return;
1392 
1393   unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1394 					  TYPE_MODE (TREE_TYPE (var)),
1395 					  TYPE_ALIGN (TREE_TYPE (var)));
1396 
1397   /* If the variable alignment is very large we'll dynamicaly allocate
1398      it, which means that in-frame portion is just a pointer.  */
1399   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1400     align = GET_MODE_ALIGNMENT (Pmode);
1401 
1402   record_alignment_for_reg_var (align);
1403 
1404   if (!use_register_for_decl (var))
1405     {
1406       if (defer_stack_allocation (var, true))
1407 	add_stack_var (var);
1408       else
1409 	expand_one_stack_var_1 (var);
1410       return;
1411     }
1412 
1413   machine_mode reg_mode = promote_ssa_mode (var, NULL);
1414   rtx x = gen_reg_rtx (reg_mode);
1415 
1416   set_rtl (var, x);
1417 
1418   /* For a promoted variable, X will not be used directly but wrapped in a
1419      SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1420      will assume that its upper bits can be inferred from its lower bits.
1421      Therefore, if X isn't initialized on every path from the entry, then
1422      we must do it manually in order to fulfill the above assumption.  */
1423   if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1424       && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1425     emit_move_insn (x, CONST0_RTX (reg_mode));
1426 }
1427 
1428 /* Record the association between the RTL generated for partition PART
1429    and the underlying variable of the SSA_NAME VAR.  */
1430 
1431 static void
adjust_one_expanded_partition_var(tree var)1432 adjust_one_expanded_partition_var (tree var)
1433 {
1434   if (!var)
1435     return;
1436 
1437   tree decl = SSA_NAME_VAR (var);
1438 
1439   int part = var_to_partition (SA.map, var);
1440   if (part == NO_PARTITION)
1441     return;
1442 
1443   rtx x = SA.partition_to_pseudo[part];
1444 
1445   gcc_assert (x);
1446 
1447   set_rtl (var, x);
1448 
1449   if (!REG_P (x))
1450     return;
1451 
1452   /* Note if the object is a user variable.  */
1453   if (decl && !DECL_ARTIFICIAL (decl))
1454     mark_user_reg (x);
1455 
1456   if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1457     mark_reg_pointer (x, get_pointer_alignment (var));
1458 }
1459 
1460 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1461    that will reside in a pseudo register.  */
1462 
1463 static void
expand_one_register_var(tree var)1464 expand_one_register_var (tree var)
1465 {
1466   if (TREE_CODE (var) == SSA_NAME)
1467     {
1468       int part = var_to_partition (SA.map, var);
1469       if (part != NO_PARTITION)
1470 	{
1471 	  rtx x = SA.partition_to_pseudo[part];
1472 	  gcc_assert (x);
1473 	  gcc_assert (REG_P (x));
1474 	  return;
1475 	}
1476       gcc_unreachable ();
1477     }
1478 
1479   tree decl = var;
1480   tree type = TREE_TYPE (decl);
1481   machine_mode reg_mode = promote_decl_mode (decl, NULL);
1482   rtx x = gen_reg_rtx (reg_mode);
1483 
1484   set_rtl (var, x);
1485 
1486   /* Note if the object is a user variable.  */
1487   if (!DECL_ARTIFICIAL (decl))
1488     mark_user_reg (x);
1489 
1490   if (POINTER_TYPE_P (type))
1491     mark_reg_pointer (x, get_pointer_alignment (var));
1492 }
1493 
1494 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
1495    has some associated error, e.g. its type is error-mark.  We just need
1496    to pick something that won't crash the rest of the compiler.  */
1497 
1498 static void
expand_one_error_var(tree var)1499 expand_one_error_var (tree var)
1500 {
1501   machine_mode mode = DECL_MODE (var);
1502   rtx x;
1503 
1504   if (mode == BLKmode)
1505     x = gen_rtx_MEM (BLKmode, const0_rtx);
1506   else if (mode == VOIDmode)
1507     x = const0_rtx;
1508   else
1509     x = gen_reg_rtx (mode);
1510 
1511   SET_DECL_RTL (var, x);
1512 }
1513 
1514 /* A subroutine of expand_one_var.  VAR is a variable that will be
1515    allocated to the local stack frame.  Return true if we wish to
1516    add VAR to STACK_VARS so that it will be coalesced with other
1517    variables.  Return false to allocate VAR immediately.
1518 
1519    This function is used to reduce the number of variables considered
1520    for coalescing, which reduces the size of the quadratic problem.  */
1521 
1522 static bool
defer_stack_allocation(tree var,bool toplevel)1523 defer_stack_allocation (tree var, bool toplevel)
1524 {
1525   tree size_unit = TREE_CODE (var) == SSA_NAME
1526     ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1527     : DECL_SIZE_UNIT (var);
1528   poly_uint64 size;
1529 
1530   /* Whether the variable is small enough for immediate allocation not to be
1531      a problem with regard to the frame size.  */
1532   bool smallish
1533     = (poly_int_tree_p (size_unit, &size)
1534        && (estimated_poly_value (size)
1535 	   < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
1536 
1537   /* If stack protection is enabled, *all* stack variables must be deferred,
1538      so that we can re-order the strings to the top of the frame.
1539      Similarly for Address Sanitizer.  */
1540   if (flag_stack_protect || asan_sanitize_stack_p ())
1541     return true;
1542 
1543   unsigned int align = TREE_CODE (var) == SSA_NAME
1544     ? TYPE_ALIGN (TREE_TYPE (var))
1545     : DECL_ALIGN (var);
1546 
1547   /* We handle "large" alignment via dynamic allocation.  We want to handle
1548      this extra complication in only one place, so defer them.  */
1549   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1550     return true;
1551 
1552   bool ignored = TREE_CODE (var) == SSA_NAME
1553     ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1554     : DECL_IGNORED_P (var);
1555 
1556   /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1557      might be detached from their block and appear at toplevel when we reach
1558      here.  We want to coalesce them with variables from other blocks when
1559      the immediate contribution to the frame size would be noticeable.  */
1560   if (toplevel && optimize > 0 && ignored && !smallish)
1561     return true;
1562 
1563   /* Variables declared in the outermost scope automatically conflict
1564      with every other variable.  The only reason to want to defer them
1565      at all is that, after sorting, we can more efficiently pack
1566      small variables in the stack frame.  Continue to defer at -O2.  */
1567   if (toplevel && optimize < 2)
1568     return false;
1569 
1570   /* Without optimization, *most* variables are allocated from the
1571      stack, which makes the quadratic problem large exactly when we
1572      want compilation to proceed as quickly as possible.  On the
1573      other hand, we don't want the function's stack frame size to
1574      get completely out of hand.  So we avoid adding scalars and
1575      "small" aggregates to the list at all.  */
1576   if (optimize == 0 && smallish)
1577     return false;
1578 
1579   return true;
1580 }
1581 
1582 /* A subroutine of expand_used_vars.  Expand one variable according to
1583    its flavor.  Variables to be placed on the stack are not actually
1584    expanded yet, merely recorded.
1585    When REALLY_EXPAND is false, only add stack values to be allocated.
1586    Return stack usage this variable is supposed to take.
1587 */
1588 
1589 static poly_uint64
expand_one_var(tree var,bool toplevel,bool really_expand)1590 expand_one_var (tree var, bool toplevel, bool really_expand)
1591 {
1592   unsigned int align = BITS_PER_UNIT;
1593   tree origvar = var;
1594 
1595   var = SSAVAR (var);
1596 
1597   if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1598     {
1599       if (is_global_var (var))
1600 	return 0;
1601 
1602       /* Because we don't know if VAR will be in register or on stack,
1603 	 we conservatively assume it will be on stack even if VAR is
1604 	 eventually put into register after RA pass.  For non-automatic
1605 	 variables, which won't be on stack, we collect alignment of
1606 	 type and ignore user specified alignment.  Similarly for
1607 	 SSA_NAMEs for which use_register_for_decl returns true.  */
1608       if (TREE_STATIC (var)
1609 	  || DECL_EXTERNAL (var)
1610 	  || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1611 	align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1612 				   TYPE_MODE (TREE_TYPE (var)),
1613 				   TYPE_ALIGN (TREE_TYPE (var)));
1614       else if (DECL_HAS_VALUE_EXPR_P (var)
1615 	       || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1616 	/* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1617 	   or variables which were assigned a stack slot already by
1618 	   expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1619 	   changed from the offset chosen to it.  */
1620 	align = crtl->stack_alignment_estimated;
1621       else
1622 	align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1623 
1624       /* If the variable alignment is very large we'll dynamicaly allocate
1625 	 it, which means that in-frame portion is just a pointer.  */
1626       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1627 	align = GET_MODE_ALIGNMENT (Pmode);
1628     }
1629 
1630   record_alignment_for_reg_var (align);
1631 
1632   poly_uint64 size;
1633   if (TREE_CODE (origvar) == SSA_NAME)
1634     {
1635       gcc_assert (!VAR_P (var)
1636 		  || (!DECL_EXTERNAL (var)
1637 		      && !DECL_HAS_VALUE_EXPR_P (var)
1638 		      && !TREE_STATIC (var)
1639 		      && TREE_TYPE (var) != error_mark_node
1640 		      && !DECL_HARD_REGISTER (var)
1641 		      && really_expand));
1642     }
1643   if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1644     ;
1645   else if (DECL_EXTERNAL (var))
1646     ;
1647   else if (DECL_HAS_VALUE_EXPR_P (var))
1648     ;
1649   else if (TREE_STATIC (var))
1650     ;
1651   else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1652     ;
1653   else if (TREE_TYPE (var) == error_mark_node)
1654     {
1655       if (really_expand)
1656         expand_one_error_var (var);
1657     }
1658   else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1659     {
1660       if (really_expand)
1661 	{
1662 	  expand_one_hard_reg_var (var);
1663 	  if (!DECL_HARD_REGISTER (var))
1664 	    /* Invalid register specification.  */
1665 	    expand_one_error_var (var);
1666 	}
1667     }
1668   else if (use_register_for_decl (var))
1669     {
1670       if (really_expand)
1671         expand_one_register_var (origvar);
1672     }
1673   else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1674 	   || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1675     {
1676       /* Reject variables which cover more than half of the address-space.  */
1677       if (really_expand)
1678 	{
1679 	  error ("size of variable %q+D is too large", var);
1680 	  expand_one_error_var (var);
1681 	}
1682     }
1683   else if (defer_stack_allocation (var, toplevel))
1684     add_stack_var (origvar);
1685   else
1686     {
1687       if (really_expand)
1688         {
1689           if (lookup_attribute ("naked",
1690                                 DECL_ATTRIBUTES (current_function_decl)))
1691             error ("cannot allocate stack for variable %q+D, naked function.",
1692                    var);
1693 
1694           expand_one_stack_var (origvar);
1695         }
1696       return size;
1697     }
1698   return 0;
1699 }
1700 
1701 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1702    expanding variables.  Those variables that can be put into registers
1703    are allocated pseudos; those that can't are put on the stack.
1704 
1705    TOPLEVEL is true if this is the outermost BLOCK.  */
1706 
1707 static void
expand_used_vars_for_block(tree block,bool toplevel)1708 expand_used_vars_for_block (tree block, bool toplevel)
1709 {
1710   tree t;
1711 
1712   /* Expand all variables at this level.  */
1713   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1714     if (TREE_USED (t)
1715         && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1716 	    || !DECL_NONSHAREABLE (t)))
1717       expand_one_var (t, toplevel, true);
1718 
1719   /* Expand all variables at containing levels.  */
1720   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1721     expand_used_vars_for_block (t, false);
1722 }
1723 
1724 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1725    and clear TREE_USED on all local variables.  */
1726 
1727 static void
clear_tree_used(tree block)1728 clear_tree_used (tree block)
1729 {
1730   tree t;
1731 
1732   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1733     /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1734     if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1735 	|| !DECL_NONSHAREABLE (t))
1736       TREE_USED (t) = 0;
1737 
1738   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1739     clear_tree_used (t);
1740 }
1741 
1742 enum {
1743   SPCT_FLAG_DEFAULT = 1,
1744   SPCT_FLAG_ALL = 2,
1745   SPCT_FLAG_STRONG = 3,
1746   SPCT_FLAG_EXPLICIT = 4
1747 };
1748 
1749 /* Examine TYPE and determine a bit mask of the following features.  */
1750 
1751 #define SPCT_HAS_LARGE_CHAR_ARRAY	1
1752 #define SPCT_HAS_SMALL_CHAR_ARRAY	2
1753 #define SPCT_HAS_ARRAY			4
1754 #define SPCT_HAS_AGGREGATE		8
1755 
1756 static unsigned int
stack_protect_classify_type(tree type)1757 stack_protect_classify_type (tree type)
1758 {
1759   unsigned int ret = 0;
1760   tree t;
1761 
1762   switch (TREE_CODE (type))
1763     {
1764     case ARRAY_TYPE:
1765       t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1766       if (t == char_type_node
1767 	  || t == signed_char_type_node
1768 	  || t == unsigned_char_type_node)
1769 	{
1770 	  unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1771 	  unsigned HOST_WIDE_INT len;
1772 
1773 	  if (!TYPE_SIZE_UNIT (type)
1774 	      || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1775 	    len = max;
1776 	  else
1777 	    len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1778 
1779 	  if (len < max)
1780 	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1781 	  else
1782 	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1783 	}
1784       else
1785 	ret = SPCT_HAS_ARRAY;
1786       break;
1787 
1788     case UNION_TYPE:
1789     case QUAL_UNION_TYPE:
1790     case RECORD_TYPE:
1791       ret = SPCT_HAS_AGGREGATE;
1792       for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1793 	if (TREE_CODE (t) == FIELD_DECL)
1794 	  ret |= stack_protect_classify_type (TREE_TYPE (t));
1795       break;
1796 
1797     default:
1798       break;
1799     }
1800 
1801   return ret;
1802 }
1803 
1804 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1805    part of the local stack frame.  Remember if we ever return nonzero for
1806    any variable in this function.  The return value is the phase number in
1807    which the variable should be allocated.  */
1808 
1809 static int
stack_protect_decl_phase(tree decl)1810 stack_protect_decl_phase (tree decl)
1811 {
1812   unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1813   int ret = 0;
1814 
1815   if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1816     has_short_buffer = true;
1817 
1818   if (flag_stack_protect == SPCT_FLAG_ALL
1819       || flag_stack_protect == SPCT_FLAG_STRONG
1820       || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1821 	  && lookup_attribute ("stack_protect",
1822 			       DECL_ATTRIBUTES (current_function_decl))))
1823     {
1824       if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1825 	  && !(bits & SPCT_HAS_AGGREGATE))
1826 	ret = 1;
1827       else if (bits & SPCT_HAS_ARRAY)
1828 	ret = 2;
1829     }
1830   else
1831     ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1832 
1833   if (ret)
1834     has_protected_decls = true;
1835 
1836   return ret;
1837 }
1838 
1839 /* Two helper routines that check for phase 1 and phase 2.  These are used
1840    as callbacks for expand_stack_vars.  */
1841 
1842 static bool
stack_protect_decl_phase_1(size_t i)1843 stack_protect_decl_phase_1 (size_t i)
1844 {
1845   return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1846 }
1847 
1848 static bool
stack_protect_decl_phase_2(size_t i)1849 stack_protect_decl_phase_2 (size_t i)
1850 {
1851   return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1852 }
1853 
1854 /* And helper function that checks for asan phase (with stack protector
1855    it is phase 3).  This is used as callback for expand_stack_vars.
1856    Returns true if any of the vars in the partition need to be protected.  */
1857 
1858 static bool
asan_decl_phase_3(size_t i)1859 asan_decl_phase_3 (size_t i)
1860 {
1861   while (i != EOC)
1862     {
1863       if (asan_protect_stack_decl (stack_vars[i].decl))
1864 	return true;
1865       i = stack_vars[i].next;
1866     }
1867   return false;
1868 }
1869 
1870 /* Ensure that variables in different stack protection phases conflict
1871    so that they are not merged and share the same stack slot.  */
1872 
1873 static void
add_stack_protection_conflicts(void)1874 add_stack_protection_conflicts (void)
1875 {
1876   size_t i, j, n = stack_vars_num;
1877   unsigned char *phase;
1878 
1879   phase = XNEWVEC (unsigned char, n);
1880   for (i = 0; i < n; ++i)
1881     phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1882 
1883   for (i = 0; i < n; ++i)
1884     {
1885       unsigned char ph_i = phase[i];
1886       for (j = i + 1; j < n; ++j)
1887 	if (ph_i != phase[j])
1888 	  add_stack_var_conflict (i, j);
1889     }
1890 
1891   XDELETEVEC (phase);
1892 }
1893 
1894 /* Create a decl for the guard at the top of the stack frame.  */
1895 
1896 static void
create_stack_guard(void)1897 create_stack_guard (void)
1898 {
1899   tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1900 			   VAR_DECL, NULL, ptr_type_node);
1901   TREE_THIS_VOLATILE (guard) = 1;
1902   TREE_USED (guard) = 1;
1903   expand_one_stack_var (guard);
1904   crtl->stack_protect_guard = guard;
1905 }
1906 
1907 /* Prepare for expanding variables.  */
1908 static void
init_vars_expansion(void)1909 init_vars_expansion (void)
1910 {
1911   /* Conflict bitmaps, and a few related temporary bitmaps, go here.  */
1912   bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1913 
1914   /* A map from decl to stack partition.  */
1915   decl_to_stack_part = new hash_map<tree, size_t>;
1916 
1917   /* Initialize local stack smashing state.  */
1918   has_protected_decls = false;
1919   has_short_buffer = false;
1920 }
1921 
1922 /* Free up stack variable graph data.  */
1923 static void
fini_vars_expansion(void)1924 fini_vars_expansion (void)
1925 {
1926   bitmap_obstack_release (&stack_var_bitmap_obstack);
1927   if (stack_vars)
1928     XDELETEVEC (stack_vars);
1929   if (stack_vars_sorted)
1930     XDELETEVEC (stack_vars_sorted);
1931   stack_vars = NULL;
1932   stack_vars_sorted = NULL;
1933   stack_vars_alloc = stack_vars_num = 0;
1934   delete decl_to_stack_part;
1935   decl_to_stack_part = NULL;
1936 }
1937 
1938 /* Make a fair guess for the size of the stack frame of the function
1939    in NODE.  This doesn't have to be exact, the result is only used in
1940    the inline heuristics.  So we don't want to run the full stack var
1941    packing algorithm (which is quadratic in the number of stack vars).
1942    Instead, we calculate the total size of all stack vars.  This turns
1943    out to be a pretty fair estimate -- packing of stack vars doesn't
1944    happen very often.  */
1945 
1946 HOST_WIDE_INT
estimated_stack_frame_size(struct cgraph_node * node)1947 estimated_stack_frame_size (struct cgraph_node *node)
1948 {
1949   poly_int64 size = 0;
1950   size_t i;
1951   tree var;
1952   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1953 
1954   push_cfun (fn);
1955 
1956   init_vars_expansion ();
1957 
1958   FOR_EACH_LOCAL_DECL (fn, i, var)
1959     if (auto_var_in_fn_p (var, fn->decl))
1960       size += expand_one_var (var, true, false);
1961 
1962   if (stack_vars_num > 0)
1963     {
1964       /* Fake sorting the stack vars for account_stack_vars ().  */
1965       stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1966       for (i = 0; i < stack_vars_num; ++i)
1967 	stack_vars_sorted[i] = i;
1968       size += account_stack_vars ();
1969     }
1970 
1971   fini_vars_expansion ();
1972   pop_cfun ();
1973   return estimated_poly_value (size);
1974 }
1975 
1976 /* Helper routine to check if a record or union contains an array field. */
1977 
1978 static int
record_or_union_type_has_array_p(const_tree tree_type)1979 record_or_union_type_has_array_p (const_tree tree_type)
1980 {
1981   tree fields = TYPE_FIELDS (tree_type);
1982   tree f;
1983 
1984   for (f = fields; f; f = DECL_CHAIN (f))
1985     if (TREE_CODE (f) == FIELD_DECL)
1986       {
1987 	tree field_type = TREE_TYPE (f);
1988 	if (RECORD_OR_UNION_TYPE_P (field_type)
1989 	    && record_or_union_type_has_array_p (field_type))
1990 	  return 1;
1991 	if (TREE_CODE (field_type) == ARRAY_TYPE)
1992 	  return 1;
1993       }
1994   return 0;
1995 }
1996 
1997 /* Check if the current function has local referenced variables that
1998    have their addresses taken, contain an array, or are arrays.  */
1999 
2000 static bool
stack_protect_decl_p()2001 stack_protect_decl_p ()
2002 {
2003   unsigned i;
2004   tree var;
2005 
2006   FOR_EACH_LOCAL_DECL (cfun, i, var)
2007     if (!is_global_var (var))
2008       {
2009 	tree var_type = TREE_TYPE (var);
2010 	if (VAR_P (var)
2011 	    && (TREE_CODE (var_type) == ARRAY_TYPE
2012 		|| TREE_ADDRESSABLE (var)
2013 		|| (RECORD_OR_UNION_TYPE_P (var_type)
2014 		    && record_or_union_type_has_array_p (var_type))))
2015 	  return true;
2016       }
2017   return false;
2018 }
2019 
2020 /* Check if the current function has calls that use a return slot.  */
2021 
2022 static bool
stack_protect_return_slot_p()2023 stack_protect_return_slot_p ()
2024 {
2025   basic_block bb;
2026 
2027   FOR_ALL_BB_FN (bb, cfun)
2028     for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2029 	 !gsi_end_p (gsi); gsi_next (&gsi))
2030       {
2031 	gimple *stmt = gsi_stmt (gsi);
2032 	/* This assumes that calls to internal-only functions never
2033 	   use a return slot.  */
2034 	if (is_gimple_call (stmt)
2035 	    && !gimple_call_internal_p (stmt)
2036 	    && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2037 				  gimple_call_fndecl (stmt)))
2038 	  return true;
2039       }
2040   return false;
2041 }
2042 
2043 /* Expand all variables used in the function.  */
2044 
2045 static rtx_insn *
expand_used_vars(void)2046 expand_used_vars (void)
2047 {
2048   tree var, outer_block = DECL_INITIAL (current_function_decl);
2049   auto_vec<tree> maybe_local_decls;
2050   rtx_insn *var_end_seq = NULL;
2051   unsigned i;
2052   unsigned len;
2053   bool gen_stack_protect_signal = false;
2054 
2055   /* Compute the phase of the stack frame for this function.  */
2056   {
2057     int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2058     int off = targetm.starting_frame_offset () % align;
2059     frame_phase = off ? align - off : 0;
2060   }
2061 
2062   /* Set TREE_USED on all variables in the local_decls.  */
2063   FOR_EACH_LOCAL_DECL (cfun, i, var)
2064     TREE_USED (var) = 1;
2065   /* Clear TREE_USED on all variables associated with a block scope.  */
2066   clear_tree_used (DECL_INITIAL (current_function_decl));
2067 
2068   init_vars_expansion ();
2069 
2070   if (targetm.use_pseudo_pic_reg ())
2071     pic_offset_table_rtx = gen_reg_rtx (Pmode);
2072 
2073   for (i = 0; i < SA.map->num_partitions; i++)
2074     {
2075       if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2076 	continue;
2077 
2078       tree var = partition_to_var (SA.map, i);
2079 
2080       gcc_assert (!virtual_operand_p (var));
2081 
2082       expand_one_ssa_partition (var);
2083     }
2084 
2085   if (flag_stack_protect == SPCT_FLAG_STRONG)
2086       gen_stack_protect_signal
2087 	= stack_protect_decl_p () || stack_protect_return_slot_p ();
2088 
2089   /* At this point all variables on the local_decls with TREE_USED
2090      set are not associated with any block scope.  Lay them out.  */
2091 
2092   len = vec_safe_length (cfun->local_decls);
2093   FOR_EACH_LOCAL_DECL (cfun, i, var)
2094     {
2095       bool expand_now = false;
2096 
2097       /* Expanded above already.  */
2098       if (is_gimple_reg (var))
2099 	{
2100 	  TREE_USED (var) = 0;
2101 	  goto next;
2102 	}
2103       /* We didn't set a block for static or extern because it's hard
2104 	 to tell the difference between a global variable (re)declared
2105 	 in a local scope, and one that's really declared there to
2106 	 begin with.  And it doesn't really matter much, since we're
2107 	 not giving them stack space.  Expand them now.  */
2108       else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2109 	expand_now = true;
2110 
2111       /* Expand variables not associated with any block now.  Those created by
2112 	 the optimizers could be live anywhere in the function.  Those that
2113 	 could possibly have been scoped originally and detached from their
2114 	 block will have their allocation deferred so we coalesce them with
2115 	 others when optimization is enabled.  */
2116       else if (TREE_USED (var))
2117 	expand_now = true;
2118 
2119       /* Finally, mark all variables on the list as used.  We'll use
2120 	 this in a moment when we expand those associated with scopes.  */
2121       TREE_USED (var) = 1;
2122 
2123       if (expand_now)
2124 	expand_one_var (var, true, true);
2125 
2126     next:
2127       if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2128 	{
2129 	  rtx rtl = DECL_RTL_IF_SET (var);
2130 
2131 	  /* Keep artificial non-ignored vars in cfun->local_decls
2132 	     chain until instantiate_decls.  */
2133 	  if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2134 	    add_local_decl (cfun, var);
2135 	  else if (rtl == NULL_RTX)
2136 	    /* If rtl isn't set yet, which can happen e.g. with
2137 	       -fstack-protector, retry before returning from this
2138 	       function.  */
2139 	    maybe_local_decls.safe_push (var);
2140 	}
2141     }
2142 
2143   /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2144 
2145      +-----------------+-----------------+
2146      | ...processed... | ...duplicates...|
2147      +-----------------+-----------------+
2148                        ^
2149 		       +-- LEN points here.
2150 
2151      We just want the duplicates, as those are the artificial
2152      non-ignored vars that we want to keep until instantiate_decls.
2153      Move them down and truncate the array.  */
2154   if (!vec_safe_is_empty (cfun->local_decls))
2155     cfun->local_decls->block_remove (0, len);
2156 
2157   /* At this point, all variables within the block tree with TREE_USED
2158      set are actually used by the optimized function.  Lay them out.  */
2159   expand_used_vars_for_block (outer_block, true);
2160 
2161   if (stack_vars_num > 0)
2162     {
2163       add_scope_conflicts ();
2164 
2165       /* If stack protection is enabled, we don't share space between
2166 	 vulnerable data and non-vulnerable data.  */
2167       if (flag_stack_protect != 0
2168 	  && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2169 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2170 		  && lookup_attribute ("stack_protect",
2171 				       DECL_ATTRIBUTES (current_function_decl)))))
2172 	add_stack_protection_conflicts ();
2173 
2174       /* Now that we have collected all stack variables, and have computed a
2175 	 minimal interference graph, attempt to save some stack space.  */
2176       partition_stack_vars ();
2177       if (dump_file)
2178 	dump_stack_var_partition ();
2179     }
2180 
2181   switch (flag_stack_protect)
2182     {
2183     case SPCT_FLAG_ALL:
2184       create_stack_guard ();
2185       break;
2186 
2187     case SPCT_FLAG_STRONG:
2188       if (gen_stack_protect_signal
2189 	  || cfun->calls_alloca || has_protected_decls
2190 	  || lookup_attribute ("stack_protect",
2191 			       DECL_ATTRIBUTES (current_function_decl)))
2192 	create_stack_guard ();
2193       break;
2194 
2195     case SPCT_FLAG_DEFAULT:
2196       if (cfun->calls_alloca || has_protected_decls
2197 	  || lookup_attribute ("stack_protect",
2198 			       DECL_ATTRIBUTES (current_function_decl)))
2199 	create_stack_guard ();
2200       break;
2201 
2202     case SPCT_FLAG_EXPLICIT:
2203       if (lookup_attribute ("stack_protect",
2204 			    DECL_ATTRIBUTES (current_function_decl)))
2205 	create_stack_guard ();
2206       break;
2207     default:
2208       ;
2209     }
2210 
2211   /* Assign rtl to each variable based on these partitions.  */
2212   if (stack_vars_num > 0)
2213     {
2214       struct stack_vars_data data;
2215 
2216       data.asan_base = NULL_RTX;
2217       data.asan_alignb = 0;
2218 
2219       /* Reorder decls to be protected by iterating over the variables
2220 	 array multiple times, and allocating out of each phase in turn.  */
2221       /* ??? We could probably integrate this into the qsort we did
2222 	 earlier, such that we naturally see these variables first,
2223 	 and thus naturally allocate things in the right order.  */
2224       if (has_protected_decls)
2225 	{
2226 	  /* Phase 1 contains only character arrays.  */
2227 	  expand_stack_vars (stack_protect_decl_phase_1, &data);
2228 
2229 	  /* Phase 2 contains other kinds of arrays.  */
2230 	  if (flag_stack_protect == SPCT_FLAG_ALL
2231 	      || flag_stack_protect == SPCT_FLAG_STRONG
2232 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2233 		  && lookup_attribute ("stack_protect",
2234 				       DECL_ATTRIBUTES (current_function_decl))))
2235 	    expand_stack_vars (stack_protect_decl_phase_2, &data);
2236 	}
2237 
2238       if (asan_sanitize_stack_p ())
2239 	/* Phase 3, any partitions that need asan protection
2240 	   in addition to phase 1 and 2.  */
2241 	expand_stack_vars (asan_decl_phase_3, &data);
2242 
2243       /* ASAN description strings don't yet have a syntax for expressing
2244 	 polynomial offsets.  */
2245       HOST_WIDE_INT prev_offset;
2246       if (!data.asan_vec.is_empty ()
2247 	  && frame_offset.is_constant (&prev_offset))
2248 	{
2249 	  HOST_WIDE_INT offset, sz, redzonesz;
2250 	  redzonesz = ASAN_RED_ZONE_SIZE;
2251 	  sz = data.asan_vec[0] - prev_offset;
2252 	  if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2253 	      && data.asan_alignb <= 4096
2254 	      && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2255 	    redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2256 			 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2257 	  /* Allocating a constant amount of space from a constant
2258 	     starting offset must give a constant result.  */
2259 	  offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2260 		    .to_constant ());
2261 	  data.asan_vec.safe_push (prev_offset);
2262 	  data.asan_vec.safe_push (offset);
2263 	  /* Leave space for alignment if STRICT_ALIGNMENT.  */
2264 	  if (STRICT_ALIGNMENT)
2265 	    alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2266 				      << ASAN_SHADOW_SHIFT)
2267 				     / BITS_PER_UNIT, 1);
2268 
2269 	  var_end_seq
2270 	    = asan_emit_stack_protection (virtual_stack_vars_rtx,
2271 					  data.asan_base,
2272 					  data.asan_alignb,
2273 					  data.asan_vec.address (),
2274 					  data.asan_decl_vec.address (),
2275 					  data.asan_vec.length ());
2276 	}
2277 
2278       expand_stack_vars (NULL, &data);
2279     }
2280 
2281   if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2282     var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2283 					      virtual_stack_vars_rtx,
2284 					      var_end_seq);
2285 
2286   fini_vars_expansion ();
2287 
2288   /* If there were any artificial non-ignored vars without rtl
2289      found earlier, see if deferred stack allocation hasn't assigned
2290      rtl to them.  */
2291   FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2292     {
2293       rtx rtl = DECL_RTL_IF_SET (var);
2294 
2295       /* Keep artificial non-ignored vars in cfun->local_decls
2296 	 chain until instantiate_decls.  */
2297       if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2298 	add_local_decl (cfun, var);
2299     }
2300 
2301   /* If the target requires that FRAME_OFFSET be aligned, do it.  */
2302   if (STACK_ALIGNMENT_NEEDED)
2303     {
2304       HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2305       if (FRAME_GROWS_DOWNWARD)
2306 	frame_offset = aligned_lower_bound (frame_offset, align);
2307       else
2308 	frame_offset = aligned_upper_bound (frame_offset, align);
2309     }
2310 
2311   return var_end_seq;
2312 }
2313 
2314 
2315 /* If we need to produce a detailed dump, print the tree representation
2316    for STMT to the dump file.  SINCE is the last RTX after which the RTL
2317    generated for STMT should have been appended.  */
2318 
2319 static void
maybe_dump_rtl_for_gimple_stmt(gimple * stmt,rtx_insn * since)2320 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2321 {
2322   if (dump_file && (dump_flags & TDF_DETAILS))
2323     {
2324       fprintf (dump_file, "\n;; ");
2325       print_gimple_stmt (dump_file, stmt, 0,
2326 			 TDF_SLIM | (dump_flags & TDF_LINENO));
2327       fprintf (dump_file, "\n");
2328 
2329       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2330     }
2331 }
2332 
2333 /* Maps the blocks that do not contain tree labels to rtx labels.  */
2334 
2335 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2336 
2337 /* Returns the label_rtx expression for a label starting basic block BB.  */
2338 
2339 static rtx_code_label *
label_rtx_for_bb(basic_block bb ATTRIBUTE_UNUSED)2340 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2341 {
2342   gimple_stmt_iterator gsi;
2343   tree lab;
2344 
2345   if (bb->flags & BB_RTL)
2346     return block_label (bb);
2347 
2348   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2349   if (elt)
2350     return *elt;
2351 
2352   /* Find the tree label if it is present.  */
2353 
2354   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2355     {
2356       glabel *lab_stmt;
2357 
2358       lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2359       if (!lab_stmt)
2360 	break;
2361 
2362       lab = gimple_label_label (lab_stmt);
2363       if (DECL_NONLOCAL (lab))
2364 	break;
2365 
2366       return jump_target_rtx (lab);
2367     }
2368 
2369   rtx_code_label *l = gen_label_rtx ();
2370   lab_rtx_for_bb->put (bb, l);
2371   return l;
2372 }
2373 
2374 
2375 /* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
2376    of a basic block where we just expanded the conditional at the end,
2377    possibly clean up the CFG and instruction sequence.  LAST is the
2378    last instruction before the just emitted jump sequence.  */
2379 
2380 static void
maybe_cleanup_end_of_block(edge e,rtx_insn * last)2381 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2382 {
2383   /* Special case: when jumpif decides that the condition is
2384      trivial it emits an unconditional jump (and the necessary
2385      barrier).  But we still have two edges, the fallthru one is
2386      wrong.  purge_dead_edges would clean this up later.  Unfortunately
2387      we have to insert insns (and split edges) before
2388      find_many_sub_basic_blocks and hence before purge_dead_edges.
2389      But splitting edges might create new blocks which depend on the
2390      fact that if there are two edges there's no barrier.  So the
2391      barrier would get lost and verify_flow_info would ICE.  Instead
2392      of auditing all edge splitters to care for the barrier (which
2393      normally isn't there in a cleaned CFG), fix it here.  */
2394   if (BARRIER_P (get_last_insn ()))
2395     {
2396       rtx_insn *insn;
2397       remove_edge (e);
2398       /* Now, we have a single successor block, if we have insns to
2399 	 insert on the remaining edge we potentially will insert
2400 	 it at the end of this block (if the dest block isn't feasible)
2401 	 in order to avoid splitting the edge.  This insertion will take
2402 	 place in front of the last jump.  But we might have emitted
2403 	 multiple jumps (conditional and one unconditional) to the
2404 	 same destination.  Inserting in front of the last one then
2405 	 is a problem.  See PR 40021.  We fix this by deleting all
2406 	 jumps except the last unconditional one.  */
2407       insn = PREV_INSN (get_last_insn ());
2408       /* Make sure we have an unconditional jump.  Otherwise we're
2409 	 confused.  */
2410       gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2411       for (insn = PREV_INSN (insn); insn != last;)
2412 	{
2413 	  insn = PREV_INSN (insn);
2414 	  if (JUMP_P (NEXT_INSN (insn)))
2415 	    {
2416 	      if (!any_condjump_p (NEXT_INSN (insn)))
2417 		{
2418 		  gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2419 		  delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2420 		}
2421 	      delete_insn (NEXT_INSN (insn));
2422 	    }
2423 	}
2424     }
2425 }
2426 
2427 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
2428    Returns a new basic block if we've terminated the current basic
2429    block and created a new one.  */
2430 
2431 static basic_block
expand_gimple_cond(basic_block bb,gcond * stmt)2432 expand_gimple_cond (basic_block bb, gcond *stmt)
2433 {
2434   basic_block new_bb, dest;
2435   edge true_edge;
2436   edge false_edge;
2437   rtx_insn *last2, *last;
2438   enum tree_code code;
2439   tree op0, op1;
2440 
2441   code = gimple_cond_code (stmt);
2442   op0 = gimple_cond_lhs (stmt);
2443   op1 = gimple_cond_rhs (stmt);
2444   /* We're sometimes presented with such code:
2445        D.123_1 = x < y;
2446        if (D.123_1 != 0)
2447          ...
2448      This would expand to two comparisons which then later might
2449      be cleaned up by combine.  But some pattern matchers like if-conversion
2450      work better when there's only one compare, so make up for this
2451      here as special exception if TER would have made the same change.  */
2452   if (SA.values
2453       && TREE_CODE (op0) == SSA_NAME
2454       && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2455       && TREE_CODE (op1) == INTEGER_CST
2456       && ((gimple_cond_code (stmt) == NE_EXPR
2457 	   && integer_zerop (op1))
2458 	  || (gimple_cond_code (stmt) == EQ_EXPR
2459 	      && integer_onep (op1)))
2460       && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2461     {
2462       gimple *second = SSA_NAME_DEF_STMT (op0);
2463       if (gimple_code (second) == GIMPLE_ASSIGN)
2464 	{
2465 	  enum tree_code code2 = gimple_assign_rhs_code (second);
2466 	  if (TREE_CODE_CLASS (code2) == tcc_comparison)
2467 	    {
2468 	      code = code2;
2469 	      op0 = gimple_assign_rhs1 (second);
2470 	      op1 = gimple_assign_rhs2 (second);
2471 	    }
2472 	  /* If jumps are cheap and the target does not support conditional
2473 	     compare, turn some more codes into jumpy sequences.  */
2474 	  else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2475 		   && targetm.gen_ccmp_first == NULL)
2476 	    {
2477 	      if ((code2 == BIT_AND_EXPR
2478 		   && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2479 		   && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2480 		  || code2 == TRUTH_AND_EXPR)
2481 		{
2482 		  code = TRUTH_ANDIF_EXPR;
2483 		  op0 = gimple_assign_rhs1 (second);
2484 		  op1 = gimple_assign_rhs2 (second);
2485 		}
2486 	      else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2487 		{
2488 		  code = TRUTH_ORIF_EXPR;
2489 		  op0 = gimple_assign_rhs1 (second);
2490 		  op1 = gimple_assign_rhs2 (second);
2491 		}
2492 	    }
2493 	}
2494     }
2495 
2496   last2 = last = get_last_insn ();
2497 
2498   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2499   set_curr_insn_location (gimple_location (stmt));
2500 
2501   /* These flags have no purpose in RTL land.  */
2502   true_edge->flags &= ~EDGE_TRUE_VALUE;
2503   false_edge->flags &= ~EDGE_FALSE_VALUE;
2504 
2505   /* We can either have a pure conditional jump with one fallthru edge or
2506      two-way jump that needs to be decomposed into two basic blocks.  */
2507   if (false_edge->dest == bb->next_bb)
2508     {
2509       jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2510 		true_edge->probability);
2511       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2512       if (true_edge->goto_locus != UNKNOWN_LOCATION)
2513 	set_curr_insn_location (true_edge->goto_locus);
2514       false_edge->flags |= EDGE_FALLTHRU;
2515       maybe_cleanup_end_of_block (false_edge, last);
2516       return NULL;
2517     }
2518   if (true_edge->dest == bb->next_bb)
2519     {
2520       jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2521 		   false_edge->probability);
2522       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2523       if (false_edge->goto_locus != UNKNOWN_LOCATION)
2524 	set_curr_insn_location (false_edge->goto_locus);
2525       true_edge->flags |= EDGE_FALLTHRU;
2526       maybe_cleanup_end_of_block (true_edge, last);
2527       return NULL;
2528     }
2529 
2530   jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2531 	    true_edge->probability);
2532   last = get_last_insn ();
2533   if (false_edge->goto_locus != UNKNOWN_LOCATION)
2534     set_curr_insn_location (false_edge->goto_locus);
2535   emit_jump (label_rtx_for_bb (false_edge->dest));
2536 
2537   BB_END (bb) = last;
2538   if (BARRIER_P (BB_END (bb)))
2539     BB_END (bb) = PREV_INSN (BB_END (bb));
2540   update_bb_for_insn (bb);
2541 
2542   new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2543   dest = false_edge->dest;
2544   redirect_edge_succ (false_edge, new_bb);
2545   false_edge->flags |= EDGE_FALLTHRU;
2546   new_bb->count = false_edge->count ();
2547   loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2548   add_bb_to_loop (new_bb, loop);
2549   if (loop->latch == bb
2550       && loop->header == dest)
2551     loop->latch = new_bb;
2552   make_single_succ_edge (new_bb, dest, 0);
2553   if (BARRIER_P (BB_END (new_bb)))
2554     BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2555   update_bb_for_insn (new_bb);
2556 
2557   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2558 
2559   if (true_edge->goto_locus != UNKNOWN_LOCATION)
2560     {
2561       set_curr_insn_location (true_edge->goto_locus);
2562       true_edge->goto_locus = curr_insn_location ();
2563     }
2564 
2565   return new_bb;
2566 }
2567 
2568 /* Mark all calls that can have a transaction restart.  */
2569 
2570 static void
mark_transaction_restart_calls(gimple * stmt)2571 mark_transaction_restart_calls (gimple *stmt)
2572 {
2573   struct tm_restart_node dummy;
2574   tm_restart_node **slot;
2575 
2576   if (!cfun->gimple_df->tm_restart)
2577     return;
2578 
2579   dummy.stmt = stmt;
2580   slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2581   if (slot)
2582     {
2583       struct tm_restart_node *n = *slot;
2584       tree list = n->label_or_list;
2585       rtx_insn *insn;
2586 
2587       for (insn = next_real_insn (get_last_insn ());
2588 	   !CALL_P (insn);
2589 	   insn = next_real_insn (insn))
2590 	continue;
2591 
2592       if (TREE_CODE (list) == LABEL_DECL)
2593 	add_reg_note (insn, REG_TM, label_rtx (list));
2594       else
2595 	for (; list ; list = TREE_CHAIN (list))
2596 	  add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2597     }
2598 }
2599 
2600 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2601    statement STMT.  */
2602 
2603 static void
expand_call_stmt(gcall * stmt)2604 expand_call_stmt (gcall *stmt)
2605 {
2606   tree exp, decl, lhs;
2607   bool builtin_p;
2608   size_t i;
2609 
2610   if (gimple_call_internal_p (stmt))
2611     {
2612       expand_internal_call (stmt);
2613       return;
2614     }
2615 
2616   /* If this is a call to a built-in function and it has no effect other
2617      than setting the lhs, try to implement it using an internal function
2618      instead.  */
2619   decl = gimple_call_fndecl (stmt);
2620   if (gimple_call_lhs (stmt)
2621       && !gimple_has_side_effects (stmt)
2622       && (optimize || (decl && called_as_built_in (decl))))
2623     {
2624       internal_fn ifn = replacement_internal_fn (stmt);
2625       if (ifn != IFN_LAST)
2626 	{
2627 	  expand_internal_call (ifn, stmt);
2628 	  return;
2629 	}
2630     }
2631 
2632   exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2633 
2634   CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2635   builtin_p = decl && DECL_BUILT_IN (decl);
2636 
2637   /* If this is not a builtin function, the function type through which the
2638      call is made may be different from the type of the function.  */
2639   if (!builtin_p)
2640     CALL_EXPR_FN (exp)
2641       = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2642 		      CALL_EXPR_FN (exp));
2643 
2644   TREE_TYPE (exp) = gimple_call_return_type (stmt);
2645   CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2646 
2647   for (i = 0; i < gimple_call_num_args (stmt); i++)
2648     {
2649       tree arg = gimple_call_arg (stmt, i);
2650       gimple *def;
2651       /* TER addresses into arguments of builtin functions so we have a
2652 	 chance to infer more correct alignment information.  See PR39954.  */
2653       if (builtin_p
2654 	  && TREE_CODE (arg) == SSA_NAME
2655 	  && (def = get_gimple_for_ssa_name (arg))
2656 	  && gimple_assign_rhs_code (def) == ADDR_EXPR)
2657 	arg = gimple_assign_rhs1 (def);
2658       CALL_EXPR_ARG (exp, i) = arg;
2659     }
2660 
2661   if (gimple_has_side_effects (stmt))
2662     TREE_SIDE_EFFECTS (exp) = 1;
2663 
2664   if (gimple_call_nothrow_p (stmt))
2665     TREE_NOTHROW (exp) = 1;
2666 
2667   if (gimple_no_warning_p (stmt))
2668     TREE_NO_WARNING (exp) = 1;
2669 
2670   CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2671   CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2672   CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2673   if (decl
2674       && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2675       && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2676     CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2677   else
2678     CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2679   CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2680   CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2681   SET_EXPR_LOCATION (exp, gimple_location (stmt));
2682   CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2683 
2684   /* Ensure RTL is created for debug args.  */
2685   if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2686     {
2687       vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2688       unsigned int ix;
2689       tree dtemp;
2690 
2691       if (debug_args)
2692 	for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2693 	  {
2694 	    gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2695 	    expand_debug_expr (dtemp);
2696 	  }
2697     }
2698 
2699   rtx_insn *before_call = get_last_insn ();
2700   lhs = gimple_call_lhs (stmt);
2701   if (lhs)
2702     expand_assignment (lhs, exp, false);
2703   else
2704     expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2705 
2706   /* If the gimple call is an indirect call and has 'nocf_check'
2707      attribute find a generated CALL insn to mark it as no
2708      control-flow verification is needed.  */
2709   if (gimple_call_nocf_check_p (stmt)
2710       && !gimple_call_fndecl (stmt))
2711     {
2712       rtx_insn *last = get_last_insn ();
2713       while (!CALL_P (last)
2714 	     && last != before_call)
2715 	last = PREV_INSN (last);
2716 
2717       if (last != before_call)
2718 	add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2719     }
2720 
2721   mark_transaction_restart_calls (stmt);
2722 }
2723 
2724 
2725 /* Generate RTL for an asm statement (explicit assembler code).
2726    STRING is a STRING_CST node containing the assembler code text,
2727    or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
2728    insn is volatile; don't optimize it.  */
2729 
2730 static void
expand_asm_loc(tree string,int vol,location_t locus)2731 expand_asm_loc (tree string, int vol, location_t locus)
2732 {
2733   rtx body;
2734 
2735   body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2736 				ggc_strdup (TREE_STRING_POINTER (string)),
2737 				locus);
2738 
2739   MEM_VOLATILE_P (body) = vol;
2740 
2741   /* Non-empty basic ASM implicitly clobbers memory.  */
2742   if (TREE_STRING_LENGTH (string) != 0)
2743     {
2744       rtx asm_op, clob;
2745       unsigned i, nclobbers;
2746       auto_vec<rtx> input_rvec, output_rvec;
2747       auto_vec<const char *> constraints;
2748       auto_vec<rtx> clobber_rvec;
2749       HARD_REG_SET clobbered_regs;
2750       CLEAR_HARD_REG_SET (clobbered_regs);
2751 
2752       clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2753       clobber_rvec.safe_push (clob);
2754 
2755       if (targetm.md_asm_adjust)
2756 	targetm.md_asm_adjust (output_rvec, input_rvec,
2757 			       constraints, clobber_rvec,
2758 			       clobbered_regs);
2759 
2760       asm_op = body;
2761       nclobbers = clobber_rvec.length ();
2762       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2763 
2764       XVECEXP (body, 0, 0) = asm_op;
2765       for (i = 0; i < nclobbers; i++)
2766 	XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2767     }
2768 
2769   emit_insn (body);
2770 }
2771 
2772 /* Return the number of times character C occurs in string S.  */
2773 static int
n_occurrences(int c,const char * s)2774 n_occurrences (int c, const char *s)
2775 {
2776   int n = 0;
2777   while (*s)
2778     n += (*s++ == c);
2779   return n;
2780 }
2781 
2782 /* A subroutine of expand_asm_operands.  Check that all operands have
2783    the same number of alternatives.  Return true if so.  */
2784 
2785 static bool
check_operand_nalternatives(const vec<const char * > & constraints)2786 check_operand_nalternatives (const vec<const char *> &constraints)
2787 {
2788   unsigned len = constraints.length();
2789   if (len > 0)
2790     {
2791       int nalternatives = n_occurrences (',', constraints[0]);
2792 
2793       if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2794 	{
2795 	  error ("too many alternatives in %<asm%>");
2796 	  return false;
2797 	}
2798 
2799       for (unsigned i = 1; i < len; ++i)
2800 	if (n_occurrences (',', constraints[i]) != nalternatives)
2801 	  {
2802 	    error ("operand constraints for %<asm%> differ "
2803 		   "in number of alternatives");
2804 	    return false;
2805 	  }
2806     }
2807   return true;
2808 }
2809 
2810 /* Check for overlap between registers marked in CLOBBERED_REGS and
2811    anything inappropriate in T.  Emit error and return the register
2812    variable definition for error, NULL_TREE for ok.  */
2813 
2814 static bool
tree_conflicts_with_clobbers_p(tree t,HARD_REG_SET * clobbered_regs)2815 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2816 {
2817   /* Conflicts between asm-declared register variables and the clobber
2818      list are not allowed.  */
2819   tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2820 
2821   if (overlap)
2822     {
2823       error ("asm-specifier for variable %qE conflicts with asm clobber list",
2824 	     DECL_NAME (overlap));
2825 
2826       /* Reset registerness to stop multiple errors emitted for a single
2827 	 variable.  */
2828       DECL_REGISTER (overlap) = 0;
2829       return true;
2830     }
2831 
2832   return false;
2833 }
2834 
2835 /* Generate RTL for an asm statement with arguments.
2836    STRING is the instruction template.
2837    OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2838    Each output or input has an expression in the TREE_VALUE and
2839    a tree list in TREE_PURPOSE which in turn contains a constraint
2840    name in TREE_VALUE (or NULL_TREE) and a constraint string
2841    in TREE_PURPOSE.
2842    CLOBBERS is a list of STRING_CST nodes each naming a hard register
2843    that is clobbered by this insn.
2844 
2845    LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2846    should be the fallthru basic block of the asm goto.
2847 
2848    Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2849    Some elements of OUTPUTS may be replaced with trees representing temporary
2850    values.  The caller should copy those temporary values to the originally
2851    specified lvalues.
2852 
2853    VOL nonzero means the insn is volatile; don't optimize it.  */
2854 
2855 static void
expand_asm_stmt(gasm * stmt)2856 expand_asm_stmt (gasm *stmt)
2857 {
2858   class save_input_location
2859   {
2860     location_t old;
2861 
2862   public:
2863     explicit save_input_location(location_t where)
2864     {
2865       old = input_location;
2866       input_location = where;
2867     }
2868 
2869     ~save_input_location()
2870     {
2871       input_location = old;
2872     }
2873   };
2874 
2875   location_t locus = gimple_location (stmt);
2876 
2877   if (gimple_asm_input_p (stmt))
2878     {
2879       const char *s = gimple_asm_string (stmt);
2880       tree string = build_string (strlen (s), s);
2881       expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2882       return;
2883     }
2884 
2885   /* There are some legacy diagnostics in here, and also avoids a
2886      sixth parameger to targetm.md_asm_adjust.  */
2887   save_input_location s_i_l(locus);
2888 
2889   unsigned noutputs = gimple_asm_noutputs (stmt);
2890   unsigned ninputs = gimple_asm_ninputs (stmt);
2891   unsigned nlabels = gimple_asm_nlabels (stmt);
2892   unsigned i;
2893 
2894   /* ??? Diagnose during gimplification?  */
2895   if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2896     {
2897       error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2898       return;
2899     }
2900 
2901   auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2902   auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2903   auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2904 
2905   /* Copy the gimple vectors into new vectors that we can manipulate.  */
2906 
2907   output_tvec.safe_grow (noutputs);
2908   input_tvec.safe_grow (ninputs);
2909   constraints.safe_grow (noutputs + ninputs);
2910 
2911   for (i = 0; i < noutputs; ++i)
2912     {
2913       tree t = gimple_asm_output_op (stmt, i);
2914       output_tvec[i] = TREE_VALUE (t);
2915       constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2916     }
2917   for (i = 0; i < ninputs; i++)
2918     {
2919       tree t = gimple_asm_input_op (stmt, i);
2920       input_tvec[i] = TREE_VALUE (t);
2921       constraints[i + noutputs]
2922 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2923     }
2924 
2925   /* ??? Diagnose during gimplification?  */
2926   if (! check_operand_nalternatives (constraints))
2927     return;
2928 
2929   /* Count the number of meaningful clobbered registers, ignoring what
2930      we would ignore later.  */
2931   auto_vec<rtx> clobber_rvec;
2932   HARD_REG_SET clobbered_regs;
2933   CLEAR_HARD_REG_SET (clobbered_regs);
2934 
2935   if (unsigned n = gimple_asm_nclobbers (stmt))
2936     {
2937       clobber_rvec.reserve (n);
2938       for (i = 0; i < n; i++)
2939 	{
2940 	  tree t = gimple_asm_clobber_op (stmt, i);
2941           const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2942 	  int nregs, j;
2943 
2944 	  j = decode_reg_name_and_count (regname, &nregs);
2945 	  if (j < 0)
2946 	    {
2947 	      if (j == -2)
2948 		{
2949 		  /* ??? Diagnose during gimplification?  */
2950 		  error ("unknown register name %qs in %<asm%>", regname);
2951 		}
2952 	      else if (j == -4)
2953 		{
2954 		  rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2955 		  clobber_rvec.safe_push (x);
2956 		}
2957 	      else
2958 		{
2959 		  /* Otherwise we should have -1 == empty string
2960 		     or -3 == cc, which is not a register.  */
2961 		  gcc_assert (j == -1 || j == -3);
2962 		}
2963 	    }
2964 	  else
2965 	    for (int reg = j; reg < j + nregs; reg++)
2966 	      {
2967 		/* Clobbering the PIC register is an error.  */
2968 		if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2969 		  {
2970 		    /* ??? Diagnose during gimplification?  */
2971 		    error ("PIC register clobbered by %qs in %<asm%>",
2972 			   regname);
2973 		    return;
2974 		  }
2975 		/* Clobbering the stack pointer register.  */
2976 		else if (reg == (int) STACK_POINTER_REGNUM)
2977 		  crtl->sp_is_clobbered_by_asm = true;
2978 
2979 	        SET_HARD_REG_BIT (clobbered_regs, reg);
2980 	        rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2981 		clobber_rvec.safe_push (x);
2982 	      }
2983 	}
2984     }
2985   unsigned nclobbers = clobber_rvec.length();
2986 
2987   /* First pass over inputs and outputs checks validity and sets
2988      mark_addressable if needed.  */
2989   /* ??? Diagnose during gimplification?  */
2990 
2991   for (i = 0; i < noutputs; ++i)
2992     {
2993       tree val = output_tvec[i];
2994       tree type = TREE_TYPE (val);
2995       const char *constraint;
2996       bool is_inout;
2997       bool allows_reg;
2998       bool allows_mem;
2999 
3000       /* Try to parse the output constraint.  If that fails, there's
3001 	 no point in going further.  */
3002       constraint = constraints[i];
3003       if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
3004 				    &allows_mem, &allows_reg, &is_inout))
3005 	return;
3006 
3007       if (! allows_reg
3008 	  && (allows_mem
3009 	      || is_inout
3010 	      || (DECL_P (val)
3011 		  && REG_P (DECL_RTL (val))
3012 		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
3013 	mark_addressable (val);
3014     }
3015 
3016   for (i = 0; i < ninputs; ++i)
3017     {
3018       bool allows_reg, allows_mem;
3019       const char *constraint;
3020 
3021       constraint = constraints[i + noutputs];
3022       if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3023 				    constraints.address (),
3024 				    &allows_mem, &allows_reg))
3025 	return;
3026 
3027       if (! allows_reg && allows_mem)
3028 	mark_addressable (input_tvec[i]);
3029     }
3030 
3031   /* Second pass evaluates arguments.  */
3032 
3033   /* Make sure stack is consistent for asm goto.  */
3034   if (nlabels > 0)
3035     do_pending_stack_adjust ();
3036   int old_generating_concat_p = generating_concat_p;
3037 
3038   /* Vector of RTX's of evaluated output operands.  */
3039   auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3040   auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3041   rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3042 
3043   output_rvec.safe_grow (noutputs);
3044 
3045   for (i = 0; i < noutputs; ++i)
3046     {
3047       tree val = output_tvec[i];
3048       tree type = TREE_TYPE (val);
3049       bool is_inout, allows_reg, allows_mem, ok;
3050       rtx op;
3051 
3052       ok = parse_output_constraint (&constraints[i], i, ninputs,
3053 				    noutputs, &allows_mem, &allows_reg,
3054 				    &is_inout);
3055       gcc_assert (ok);
3056 
3057       /* If an output operand is not a decl or indirect ref and our constraint
3058 	 allows a register, make a temporary to act as an intermediate.
3059 	 Make the asm insn write into that, then we will copy it to
3060 	 the real output operand.  Likewise for promoted variables.  */
3061 
3062       generating_concat_p = 0;
3063 
3064       if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
3065 	  || (DECL_P (val)
3066 	      && (allows_mem || REG_P (DECL_RTL (val)))
3067 	      && ! (REG_P (DECL_RTL (val))
3068 		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3069 	  || ! allows_reg
3070 	  || is_inout
3071 	  || TREE_ADDRESSABLE (type))
3072 	{
3073 	  op = expand_expr (val, NULL_RTX, VOIDmode,
3074 			    !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3075 	  if (MEM_P (op))
3076 	    op = validize_mem (op);
3077 
3078 	  if (! allows_reg && !MEM_P (op))
3079 	    error ("output number %d not directly addressable", i);
3080 	  if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
3081 	      || GET_CODE (op) == CONCAT)
3082 	    {
3083 	      rtx old_op = op;
3084 	      op = gen_reg_rtx (GET_MODE (op));
3085 
3086 	      generating_concat_p = old_generating_concat_p;
3087 
3088 	      if (is_inout)
3089 		emit_move_insn (op, old_op);
3090 
3091 	      push_to_sequence2 (after_rtl_seq, after_rtl_end);
3092 	      emit_move_insn (old_op, op);
3093 	      after_rtl_seq = get_insns ();
3094 	      after_rtl_end = get_last_insn ();
3095 	      end_sequence ();
3096 	    }
3097 	}
3098       else
3099 	{
3100 	  op = assign_temp (type, 0, 1);
3101 	  op = validize_mem (op);
3102 	  if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3103 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3104 
3105 	  generating_concat_p = old_generating_concat_p;
3106 
3107 	  push_to_sequence2 (after_rtl_seq, after_rtl_end);
3108 	  expand_assignment (val, make_tree (type, op), false);
3109 	  after_rtl_seq = get_insns ();
3110 	  after_rtl_end = get_last_insn ();
3111 	  end_sequence ();
3112 	}
3113       output_rvec[i] = op;
3114 
3115       if (is_inout)
3116 	inout_opnum.safe_push (i);
3117     }
3118 
3119   auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3120   auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3121 
3122   input_rvec.safe_grow (ninputs);
3123   input_mode.safe_grow (ninputs);
3124 
3125   generating_concat_p = 0;
3126 
3127   for (i = 0; i < ninputs; ++i)
3128     {
3129       tree val = input_tvec[i];
3130       tree type = TREE_TYPE (val);
3131       bool allows_reg, allows_mem, ok;
3132       const char *constraint;
3133       rtx op;
3134 
3135       constraint = constraints[i + noutputs];
3136       ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3137 				   constraints.address (),
3138 				   &allows_mem, &allows_reg);
3139       gcc_assert (ok);
3140 
3141       /* EXPAND_INITIALIZER will not generate code for valid initializer
3142 	 constants, but will still generate code for other types of operand.
3143 	 This is the behavior we want for constant constraints.  */
3144       op = expand_expr (val, NULL_RTX, VOIDmode,
3145 			allows_reg ? EXPAND_NORMAL
3146 			: allows_mem ? EXPAND_MEMORY
3147 			: EXPAND_INITIALIZER);
3148 
3149       /* Never pass a CONCAT to an ASM.  */
3150       if (GET_CODE (op) == CONCAT)
3151 	op = force_reg (GET_MODE (op), op);
3152       else if (MEM_P (op))
3153 	op = validize_mem (op);
3154 
3155       if (asm_operand_ok (op, constraint, NULL) <= 0)
3156 	{
3157 	  if (allows_reg && TYPE_MODE (type) != BLKmode)
3158 	    op = force_reg (TYPE_MODE (type), op);
3159 	  else if (!allows_mem)
3160 	    warning (0, "asm operand %d probably doesn%'t match constraints",
3161 		     i + noutputs);
3162 	  else if (MEM_P (op))
3163 	    {
3164 	      /* We won't recognize either volatile memory or memory
3165 		 with a queued address as available a memory_operand
3166 		 at this point.  Ignore it: clearly this *is* a memory.  */
3167 	    }
3168 	  else
3169 	    gcc_unreachable ();
3170 	}
3171       input_rvec[i] = op;
3172       input_mode[i] = TYPE_MODE (type);
3173     }
3174 
3175   /* For in-out operands, copy output rtx to input rtx.  */
3176   unsigned ninout = inout_opnum.length();
3177   for (i = 0; i < ninout; i++)
3178     {
3179       int j = inout_opnum[i];
3180       rtx o = output_rvec[j];
3181 
3182       input_rvec.safe_push (o);
3183       input_mode.safe_push (GET_MODE (o));
3184 
3185       char buffer[16];
3186       sprintf (buffer, "%d", j);
3187       constraints.safe_push (ggc_strdup (buffer));
3188     }
3189   ninputs += ninout;
3190 
3191   /* Sometimes we wish to automatically clobber registers across an asm.
3192      Case in point is when the i386 backend moved from cc0 to a hard reg --
3193      maintaining source-level compatibility means automatically clobbering
3194      the flags register.  */
3195   rtx_insn *after_md_seq = NULL;
3196   if (targetm.md_asm_adjust)
3197     after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3198 					  constraints, clobber_rvec,
3199 					  clobbered_regs);
3200 
3201   /* Do not allow the hook to change the output and input count,
3202      lest it mess up the operand numbering.  */
3203   gcc_assert (output_rvec.length() == noutputs);
3204   gcc_assert (input_rvec.length() == ninputs);
3205   gcc_assert (constraints.length() == noutputs + ninputs);
3206 
3207   /* But it certainly can adjust the clobbers.  */
3208   nclobbers = clobber_rvec.length();
3209 
3210   /* Third pass checks for easy conflicts.  */
3211   /* ??? Why are we doing this on trees instead of rtx.  */
3212 
3213   bool clobber_conflict_found = 0;
3214   for (i = 0; i < noutputs; ++i)
3215     if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3216 	clobber_conflict_found = 1;
3217   for (i = 0; i < ninputs - ninout; ++i)
3218     if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3219 	clobber_conflict_found = 1;
3220 
3221   /* Make vectors for the expression-rtx, constraint strings,
3222      and named operands.  */
3223 
3224   rtvec argvec = rtvec_alloc (ninputs);
3225   rtvec constraintvec = rtvec_alloc (ninputs);
3226   rtvec labelvec = rtvec_alloc (nlabels);
3227 
3228   rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3229 				    : GET_MODE (output_rvec[0])),
3230 				   ggc_strdup (gimple_asm_string (stmt)),
3231 				   "", 0, argvec, constraintvec,
3232 				   labelvec, locus);
3233   MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3234 
3235   for (i = 0; i < ninputs; ++i)
3236     {
3237       ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3238       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3239 	= gen_rtx_ASM_INPUT_loc (input_mode[i],
3240 				 constraints[i + noutputs],
3241 				 locus);
3242     }
3243 
3244   /* Copy labels to the vector.  */
3245   rtx_code_label *fallthru_label = NULL;
3246   if (nlabels > 0)
3247     {
3248       basic_block fallthru_bb = NULL;
3249       edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3250       if (fallthru)
3251 	fallthru_bb = fallthru->dest;
3252 
3253       for (i = 0; i < nlabels; ++i)
3254 	{
3255 	  tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3256 	  rtx_insn *r;
3257 	  /* If asm goto has any labels in the fallthru basic block, use
3258 	     a label that we emit immediately after the asm goto.  Expansion
3259 	     may insert further instructions into the same basic block after
3260 	     asm goto and if we don't do this, insertion of instructions on
3261 	     the fallthru edge might misbehave.  See PR58670.  */
3262 	  if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3263 	    {
3264 	      if (fallthru_label == NULL_RTX)
3265 	        fallthru_label = gen_label_rtx ();
3266 	      r = fallthru_label;
3267 	    }
3268 	  else
3269 	    r = label_rtx (label);
3270 	  ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3271 	}
3272     }
3273 
3274   /* Now, for each output, construct an rtx
3275      (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3276 			       ARGVEC CONSTRAINTS OPNAMES))
3277      If there is more than one, put them inside a PARALLEL.  */
3278 
3279   if (nlabels > 0 && nclobbers == 0)
3280     {
3281       gcc_assert (noutputs == 0);
3282       emit_jump_insn (body);
3283     }
3284   else if (noutputs == 0 && nclobbers == 0)
3285     {
3286       /* No output operands: put in a raw ASM_OPERANDS rtx.  */
3287       emit_insn (body);
3288     }
3289   else if (noutputs == 1 && nclobbers == 0)
3290     {
3291       ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3292       emit_insn (gen_rtx_SET (output_rvec[0], body));
3293     }
3294   else
3295     {
3296       rtx obody = body;
3297       int num = noutputs;
3298 
3299       if (num == 0)
3300 	num = 1;
3301 
3302       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3303 
3304       /* For each output operand, store a SET.  */
3305       for (i = 0; i < noutputs; ++i)
3306 	{
3307 	  rtx src, o = output_rvec[i];
3308 	  if (i == 0)
3309 	    {
3310 	      ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3311 	      src = obody;
3312 	    }
3313 	  else
3314 	    {
3315 	      src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3316 					  ASM_OPERANDS_TEMPLATE (obody),
3317 					  constraints[i], i, argvec,
3318 					  constraintvec, labelvec, locus);
3319 	      MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3320 	    }
3321 	  XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3322 	}
3323 
3324       /* If there are no outputs (but there are some clobbers)
3325 	 store the bare ASM_OPERANDS into the PARALLEL.  */
3326       if (i == 0)
3327 	XVECEXP (body, 0, i++) = obody;
3328 
3329       /* Store (clobber REG) for each clobbered register specified.  */
3330       for (unsigned j = 0; j < nclobbers; ++j)
3331 	{
3332 	  rtx clobbered_reg = clobber_rvec[j];
3333 
3334 	  /* Do sanity check for overlap between clobbers and respectively
3335 	     input and outputs that hasn't been handled.  Such overlap
3336 	     should have been detected and reported above.  */
3337 	  if (!clobber_conflict_found && REG_P (clobbered_reg))
3338 	    {
3339 	      /* We test the old body (obody) contents to avoid
3340 		 tripping over the under-construction body.  */
3341 	      for (unsigned k = 0; k < noutputs; ++k)
3342 		if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3343 		  internal_error ("asm clobber conflict with output operand");
3344 
3345 	      for (unsigned k = 0; k < ninputs - ninout; ++k)
3346 		if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3347 		  internal_error ("asm clobber conflict with input operand");
3348 	    }
3349 
3350 	  XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3351 	}
3352 
3353       if (nlabels > 0)
3354 	emit_jump_insn (body);
3355       else
3356 	emit_insn (body);
3357     }
3358 
3359   generating_concat_p = old_generating_concat_p;
3360 
3361   if (fallthru_label)
3362     emit_label (fallthru_label);
3363 
3364   if (after_md_seq)
3365     emit_insn (after_md_seq);
3366   if (after_rtl_seq)
3367     emit_insn (after_rtl_seq);
3368 
3369   free_temp_slots ();
3370   crtl->has_asm_statement = 1;
3371 }
3372 
3373 /* Emit code to jump to the address
3374    specified by the pointer expression EXP.  */
3375 
3376 static void
expand_computed_goto(tree exp)3377 expand_computed_goto (tree exp)
3378 {
3379   rtx x = expand_normal (exp);
3380 
3381   do_pending_stack_adjust ();
3382   emit_indirect_jump (x);
3383 }
3384 
3385 /* Generate RTL code for a `goto' statement with target label LABEL.
3386    LABEL should be a LABEL_DECL tree node that was or will later be
3387    defined with `expand_label'.  */
3388 
3389 static void
expand_goto(tree label)3390 expand_goto (tree label)
3391 {
3392   if (flag_checking)
3393     {
3394       /* Check for a nonlocal goto to a containing function.  Should have
3395 	 gotten translated to __builtin_nonlocal_goto.  */
3396       tree context = decl_function_context (label);
3397       gcc_assert (!context || context == current_function_decl);
3398     }
3399 
3400   emit_jump (jump_target_rtx (label));
3401 }
3402 
3403 /* Output a return with no value.  */
3404 
3405 static void
expand_null_return_1(void)3406 expand_null_return_1 (void)
3407 {
3408   clear_pending_stack_adjust ();
3409   do_pending_stack_adjust ();
3410   emit_jump (return_label);
3411 }
3412 
3413 /* Generate RTL to return from the current function, with no value.
3414    (That is, we do not do anything about returning any value.)  */
3415 
3416 void
expand_null_return(void)3417 expand_null_return (void)
3418 {
3419   /* If this function was declared to return a value, but we
3420      didn't, clobber the return registers so that they are not
3421      propagated live to the rest of the function.  */
3422   clobber_return_register ();
3423 
3424   expand_null_return_1 ();
3425 }
3426 
3427 /* Generate RTL to return from the current function, with value VAL.  */
3428 
3429 static void
expand_value_return(rtx val)3430 expand_value_return (rtx val)
3431 {
3432   /* Copy the value to the return location unless it's already there.  */
3433 
3434   tree decl = DECL_RESULT (current_function_decl);
3435   rtx return_reg = DECL_RTL (decl);
3436   if (return_reg != val)
3437     {
3438       tree funtype = TREE_TYPE (current_function_decl);
3439       tree type = TREE_TYPE (decl);
3440       int unsignedp = TYPE_UNSIGNED (type);
3441       machine_mode old_mode = DECL_MODE (decl);
3442       machine_mode mode;
3443       if (DECL_BY_REFERENCE (decl))
3444         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3445       else
3446         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3447 
3448       if (mode != old_mode)
3449 	val = convert_modes (mode, old_mode, val, unsignedp);
3450 
3451       if (GET_CODE (return_reg) == PARALLEL)
3452 	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3453       else
3454 	emit_move_insn (return_reg, val);
3455     }
3456 
3457   expand_null_return_1 ();
3458 }
3459 
3460 /* Generate RTL to evaluate the expression RETVAL and return it
3461    from the current function.  */
3462 
3463 static void
expand_return(tree retval,tree bounds)3464 expand_return (tree retval, tree bounds)
3465 {
3466   rtx result_rtl;
3467   rtx val = 0;
3468   tree retval_rhs;
3469   rtx bounds_rtl;
3470 
3471   /* If function wants no value, give it none.  */
3472   if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3473     {
3474       expand_normal (retval);
3475       expand_null_return ();
3476       return;
3477     }
3478 
3479   if (retval == error_mark_node)
3480     {
3481       /* Treat this like a return of no value from a function that
3482 	 returns a value.  */
3483       expand_null_return ();
3484       return;
3485     }
3486   else if ((TREE_CODE (retval) == MODIFY_EXPR
3487 	    || TREE_CODE (retval) == INIT_EXPR)
3488 	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3489     retval_rhs = TREE_OPERAND (retval, 1);
3490   else
3491     retval_rhs = retval;
3492 
3493   result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3494 
3495   /* Put returned bounds to the right place.  */
3496   bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3497   if (bounds_rtl)
3498     {
3499       rtx addr = NULL;
3500       rtx bnd = NULL;
3501 
3502       if (bounds && bounds != error_mark_node)
3503 	{
3504 	  bnd = expand_normal (bounds);
3505 	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3506 	}
3507       else if (REG_P (bounds_rtl))
3508 	{
3509 	  if (bounds)
3510 	    bnd = chkp_expand_zero_bounds ();
3511 	  else
3512 	    {
3513 	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3514 	      addr = gen_rtx_MEM (Pmode, addr);
3515 	      bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3516 	    }
3517 
3518 	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3519 	}
3520       else
3521 	{
3522 	  int n;
3523 
3524 	  gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3525 
3526 	  if (bounds)
3527 	    bnd = chkp_expand_zero_bounds ();
3528 	  else
3529 	    {
3530 	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3531 	      addr = gen_rtx_MEM (Pmode, addr);
3532 	    }
3533 
3534 	  for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3535 	    {
3536 	      rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3537 	      if (!bounds)
3538 		{
3539 		  rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3540 		  rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3541 		  bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3542 		}
3543 	      targetm.calls.store_returned_bounds (slot, bnd);
3544 	    }
3545 	}
3546     }
3547   else if (chkp_function_instrumented_p (current_function_decl)
3548 	   && !BOUNDED_P (retval_rhs)
3549 	   && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3550 	   && TREE_CODE (retval_rhs) != RESULT_DECL)
3551     {
3552       rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3553       addr = gen_rtx_MEM (Pmode, addr);
3554 
3555       gcc_assert (MEM_P (result_rtl));
3556 
3557       chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3558     }
3559 
3560   /* If we are returning the RESULT_DECL, then the value has already
3561      been stored into it, so we don't have to do anything special.  */
3562   if (TREE_CODE (retval_rhs) == RESULT_DECL)
3563     expand_value_return (result_rtl);
3564 
3565   /* If the result is an aggregate that is being returned in one (or more)
3566      registers, load the registers here.  */
3567 
3568   else if (retval_rhs != 0
3569 	   && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3570 	   && REG_P (result_rtl))
3571     {
3572       val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3573       if (val)
3574 	{
3575 	  /* Use the mode of the result value on the return register.  */
3576 	  PUT_MODE (result_rtl, GET_MODE (val));
3577 	  expand_value_return (val);
3578 	}
3579       else
3580 	expand_null_return ();
3581     }
3582   else if (retval_rhs != 0
3583 	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3584 	   && (REG_P (result_rtl)
3585 	       || (GET_CODE (result_rtl) == PARALLEL)))
3586     {
3587       /* Compute the return value into a temporary (usually a pseudo reg).  */
3588       val
3589 	= assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3590       val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3591       val = force_not_mem (val);
3592       expand_value_return (val);
3593     }
3594   else
3595     {
3596       /* No hard reg used; calculate value into hard return reg.  */
3597       expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3598       expand_value_return (result_rtl);
3599     }
3600 }
3601 
3602 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3603    STMT that doesn't require special handling for outgoing edges.  That
3604    is no tailcalls and no GIMPLE_COND.  */
3605 
3606 static void
expand_gimple_stmt_1(gimple * stmt)3607 expand_gimple_stmt_1 (gimple *stmt)
3608 {
3609   tree op0;
3610 
3611   set_curr_insn_location (gimple_location (stmt));
3612 
3613   switch (gimple_code (stmt))
3614     {
3615     case GIMPLE_GOTO:
3616       op0 = gimple_goto_dest (stmt);
3617       if (TREE_CODE (op0) == LABEL_DECL)
3618 	expand_goto (op0);
3619       else
3620 	expand_computed_goto (op0);
3621       break;
3622     case GIMPLE_LABEL:
3623       expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3624       break;
3625     case GIMPLE_NOP:
3626     case GIMPLE_PREDICT:
3627       break;
3628     case GIMPLE_SWITCH:
3629       {
3630 	gswitch *swtch = as_a <gswitch *> (stmt);
3631 	if (gimple_switch_num_labels (swtch) == 1)
3632 	  expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3633 	else
3634 	  expand_case (swtch);
3635       }
3636       break;
3637     case GIMPLE_ASM:
3638       expand_asm_stmt (as_a <gasm *> (stmt));
3639       break;
3640     case GIMPLE_CALL:
3641       expand_call_stmt (as_a <gcall *> (stmt));
3642       break;
3643 
3644     case GIMPLE_RETURN:
3645       {
3646 	tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3647 	op0 = gimple_return_retval (as_a <greturn *> (stmt));
3648 
3649 	if (op0 && op0 != error_mark_node)
3650 	  {
3651 	    tree result = DECL_RESULT (current_function_decl);
3652 
3653 	    /* Mark we have return statement with missing bounds.  */
3654 	    if (!bnd
3655 		&& chkp_function_instrumented_p (cfun->decl)
3656 		&& !DECL_P (op0))
3657 	      bnd = error_mark_node;
3658 
3659 	    /* If we are not returning the current function's RESULT_DECL,
3660 	       build an assignment to it.  */
3661 	    if (op0 != result)
3662 	      {
3663 		/* I believe that a function's RESULT_DECL is unique.  */
3664 		gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3665 
3666 		/* ??? We'd like to use simply expand_assignment here,
3667 		   but this fails if the value is of BLKmode but the return
3668 		   decl is a register.  expand_return has special handling
3669 		   for this combination, which eventually should move
3670 		   to common code.  See comments there.  Until then, let's
3671 		   build a modify expression :-/  */
3672 		op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3673 			      result, op0);
3674 	      }
3675 	  }
3676 
3677 	if (!op0)
3678 	  expand_null_return ();
3679 	else
3680 	  expand_return (op0, bnd);
3681       }
3682       break;
3683 
3684     case GIMPLE_ASSIGN:
3685       {
3686 	gassign *assign_stmt = as_a <gassign *> (stmt);
3687 	tree lhs = gimple_assign_lhs (assign_stmt);
3688 
3689 	/* Tree expand used to fiddle with |= and &= of two bitfield
3690 	   COMPONENT_REFs here.  This can't happen with gimple, the LHS
3691 	   of binary assigns must be a gimple reg.  */
3692 
3693 	if (TREE_CODE (lhs) != SSA_NAME
3694 	    || get_gimple_rhs_class (gimple_expr_code (stmt))
3695 	       == GIMPLE_SINGLE_RHS)
3696 	  {
3697 	    tree rhs = gimple_assign_rhs1 (assign_stmt);
3698 	    gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3699 			== GIMPLE_SINGLE_RHS);
3700 	    if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3701 		/* Do not put locations on possibly shared trees.  */
3702 		&& !is_gimple_min_invariant (rhs))
3703 	      SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3704 	    if (TREE_CLOBBER_P (rhs))
3705 	      /* This is a clobber to mark the going out of scope for
3706 		 this LHS.  */
3707 	      ;
3708 	    else
3709 	      expand_assignment (lhs, rhs,
3710 				 gimple_assign_nontemporal_move_p (
3711 				   assign_stmt));
3712 	  }
3713 	else
3714 	  {
3715 	    rtx target, temp;
3716 	    bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3717 	    struct separate_ops ops;
3718 	    bool promoted = false;
3719 
3720 	    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3721 	    if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3722 	      promoted = true;
3723 
3724 	    ops.code = gimple_assign_rhs_code (assign_stmt);
3725 	    ops.type = TREE_TYPE (lhs);
3726 	    switch (get_gimple_rhs_class (ops.code))
3727 	      {
3728 		case GIMPLE_TERNARY_RHS:
3729 		  ops.op2 = gimple_assign_rhs3 (assign_stmt);
3730 		  /* Fallthru */
3731 		case GIMPLE_BINARY_RHS:
3732 		  ops.op1 = gimple_assign_rhs2 (assign_stmt);
3733 		  /* Fallthru */
3734 		case GIMPLE_UNARY_RHS:
3735 		  ops.op0 = gimple_assign_rhs1 (assign_stmt);
3736 		  break;
3737 		default:
3738 		  gcc_unreachable ();
3739 	      }
3740 	    ops.location = gimple_location (stmt);
3741 
3742 	    /* If we want to use a nontemporal store, force the value to
3743 	       register first.  If we store into a promoted register,
3744 	       don't directly expand to target.  */
3745 	    temp = nontemporal || promoted ? NULL_RTX : target;
3746 	    temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3747 				       EXPAND_NORMAL);
3748 
3749 	    if (temp == target)
3750 	      ;
3751 	    else if (promoted)
3752 	      {
3753 		int unsignedp = SUBREG_PROMOTED_SIGN (target);
3754 		/* If TEMP is a VOIDmode constant, use convert_modes to make
3755 		   sure that we properly convert it.  */
3756 		if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3757 		  {
3758 		    temp = convert_modes (GET_MODE (target),
3759 					  TYPE_MODE (ops.type),
3760 					  temp, unsignedp);
3761 		    temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3762 					  GET_MODE (target), temp, unsignedp);
3763 		  }
3764 
3765 		convert_move (SUBREG_REG (target), temp, unsignedp);
3766 	      }
3767 	    else if (nontemporal && emit_storent_insn (target, temp))
3768 	      ;
3769 	    else
3770 	      {
3771 		temp = force_operand (temp, target);
3772 		if (temp != target)
3773 		  emit_move_insn (target, temp);
3774 	      }
3775 	  }
3776       }
3777       break;
3778 
3779     default:
3780       gcc_unreachable ();
3781     }
3782 }
3783 
3784 /* Expand one gimple statement STMT and return the last RTL instruction
3785    before any of the newly generated ones.
3786 
3787    In addition to generating the necessary RTL instructions this also
3788    sets REG_EH_REGION notes if necessary and sets the current source
3789    location for diagnostics.  */
3790 
3791 static rtx_insn *
expand_gimple_stmt(gimple * stmt)3792 expand_gimple_stmt (gimple *stmt)
3793 {
3794   location_t saved_location = input_location;
3795   rtx_insn *last = get_last_insn ();
3796   int lp_nr;
3797 
3798   gcc_assert (cfun);
3799 
3800   /* We need to save and restore the current source location so that errors
3801      discovered during expansion are emitted with the right location.  But
3802      it would be better if the diagnostic routines used the source location
3803      embedded in the tree nodes rather than globals.  */
3804   if (gimple_has_location (stmt))
3805     input_location = gimple_location (stmt);
3806 
3807   expand_gimple_stmt_1 (stmt);
3808 
3809   /* Free any temporaries used to evaluate this statement.  */
3810   free_temp_slots ();
3811 
3812   input_location = saved_location;
3813 
3814   /* Mark all insns that may trap.  */
3815   lp_nr = lookup_stmt_eh_lp (stmt);
3816   if (lp_nr)
3817     {
3818       rtx_insn *insn;
3819       for (insn = next_real_insn (last); insn;
3820 	   insn = next_real_insn (insn))
3821 	{
3822 	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3823 	      /* If we want exceptions for non-call insns, any
3824 		 may_trap_p instruction may throw.  */
3825 	      && GET_CODE (PATTERN (insn)) != CLOBBER
3826 	      && GET_CODE (PATTERN (insn)) != USE
3827 	      && insn_could_throw_p (insn))
3828 	    make_reg_eh_region_note (insn, 0, lp_nr);
3829 	}
3830     }
3831 
3832   return last;
3833 }
3834 
3835 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
3836    that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
3837    generated a tail call (something that might be denied by the ABI
3838    rules governing the call; see calls.c).
3839 
3840    Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3841    can still reach the rest of BB.  The case here is __builtin_sqrt,
3842    where the NaN result goes through the external function (with a
3843    tailcall) and the normal result happens via a sqrt instruction.  */
3844 
3845 static basic_block
expand_gimple_tailcall(basic_block bb,gcall * stmt,bool * can_fallthru)3846 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3847 {
3848   rtx_insn *last2, *last;
3849   edge e;
3850   edge_iterator ei;
3851   profile_probability probability;
3852 
3853   last2 = last = expand_gimple_stmt (stmt);
3854 
3855   for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3856     if (CALL_P (last) && SIBLING_CALL_P (last))
3857       goto found;
3858 
3859   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3860 
3861   *can_fallthru = true;
3862   return NULL;
3863 
3864  found:
3865   /* ??? Wouldn't it be better to just reset any pending stack adjust?
3866      Any instructions emitted here are about to be deleted.  */
3867   do_pending_stack_adjust ();
3868 
3869   /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
3870   /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
3871      EH or abnormal edges, we shouldn't have created a tail call in
3872      the first place.  So it seems to me we should just be removing
3873      all edges here, or redirecting the existing fallthru edge to
3874      the exit block.  */
3875 
3876   probability = profile_probability::never ();
3877 
3878   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3879     {
3880       if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3881 	{
3882 	  if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3883 	    e->dest->count -= e->count ();
3884 	  probability += e->probability;
3885 	  remove_edge (e);
3886 	}
3887       else
3888 	ei_next (&ei);
3889     }
3890 
3891   /* This is somewhat ugly: the call_expr expander often emits instructions
3892      after the sibcall (to perform the function return).  These confuse the
3893      find_many_sub_basic_blocks code, so we need to get rid of these.  */
3894   last = NEXT_INSN (last);
3895   gcc_assert (BARRIER_P (last));
3896 
3897   *can_fallthru = false;
3898   while (NEXT_INSN (last))
3899     {
3900       /* For instance an sqrt builtin expander expands if with
3901 	 sibcall in the then and label for `else`.  */
3902       if (LABEL_P (NEXT_INSN (last)))
3903 	{
3904 	  *can_fallthru = true;
3905 	  break;
3906 	}
3907       delete_insn (NEXT_INSN (last));
3908     }
3909 
3910   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3911 		 | EDGE_SIBCALL);
3912   e->probability = probability;
3913   BB_END (bb) = last;
3914   update_bb_for_insn (bb);
3915 
3916   if (NEXT_INSN (last))
3917     {
3918       bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3919 
3920       last = BB_END (bb);
3921       if (BARRIER_P (last))
3922 	BB_END (bb) = PREV_INSN (last);
3923     }
3924 
3925   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3926 
3927   return bb;
3928 }
3929 
3930 /* Return the difference between the floor and the truncated result of
3931    a signed division by OP1 with remainder MOD.  */
3932 static rtx
floor_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)3933 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3934 {
3935   /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3936   return gen_rtx_IF_THEN_ELSE
3937     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3938      gen_rtx_IF_THEN_ELSE
3939      (mode, gen_rtx_LT (BImode,
3940 			gen_rtx_DIV (mode, op1, mod),
3941 			const0_rtx),
3942       constm1_rtx, const0_rtx),
3943      const0_rtx);
3944 }
3945 
3946 /* Return the difference between the ceil and the truncated result of
3947    a signed division by OP1 with remainder MOD.  */
3948 static rtx
ceil_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)3949 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3950 {
3951   /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3952   return gen_rtx_IF_THEN_ELSE
3953     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3954      gen_rtx_IF_THEN_ELSE
3955      (mode, gen_rtx_GT (BImode,
3956 			gen_rtx_DIV (mode, op1, mod),
3957 			const0_rtx),
3958       const1_rtx, const0_rtx),
3959      const0_rtx);
3960 }
3961 
3962 /* Return the difference between the ceil and the truncated result of
3963    an unsigned division by OP1 with remainder MOD.  */
3964 static rtx
ceil_udiv_adjust(machine_mode mode,rtx mod,rtx op1 ATTRIBUTE_UNUSED)3965 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3966 {
3967   /* (mod != 0 ? 1 : 0) */
3968   return gen_rtx_IF_THEN_ELSE
3969     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3970      const1_rtx, const0_rtx);
3971 }
3972 
3973 /* Return the difference between the rounded and the truncated result
3974    of a signed division by OP1 with remainder MOD.  Halfway cases are
3975    rounded away from zero, rather than to the nearest even number.  */
3976 static rtx
round_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)3977 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3978 {
3979   /* (abs (mod) >= abs (op1) - abs (mod)
3980       ? (op1 / mod > 0 ? 1 : -1)
3981       : 0) */
3982   return gen_rtx_IF_THEN_ELSE
3983     (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3984 		       gen_rtx_MINUS (mode,
3985 				      gen_rtx_ABS (mode, op1),
3986 				      gen_rtx_ABS (mode, mod))),
3987      gen_rtx_IF_THEN_ELSE
3988      (mode, gen_rtx_GT (BImode,
3989 			gen_rtx_DIV (mode, op1, mod),
3990 			const0_rtx),
3991       const1_rtx, constm1_rtx),
3992      const0_rtx);
3993 }
3994 
3995 /* Return the difference between the rounded and the truncated result
3996    of a unsigned division by OP1 with remainder MOD.  Halfway cases
3997    are rounded away from zero, rather than to the nearest even
3998    number.  */
3999 static rtx
round_udiv_adjust(machine_mode mode,rtx mod,rtx op1)4000 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
4001 {
4002   /* (mod >= op1 - mod ? 1 : 0) */
4003   return gen_rtx_IF_THEN_ELSE
4004     (mode, gen_rtx_GE (BImode, mod,
4005 		       gen_rtx_MINUS (mode, op1, mod)),
4006      const1_rtx, const0_rtx);
4007 }
4008 
4009 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4010    any rtl.  */
4011 
4012 static rtx
convert_debug_memory_address(scalar_int_mode mode,rtx x,addr_space_t as)4013 convert_debug_memory_address (scalar_int_mode mode, rtx x,
4014 			      addr_space_t as)
4015 {
4016 #ifndef POINTERS_EXTEND_UNSIGNED
4017   gcc_assert (mode == Pmode
4018 	      || mode == targetm.addr_space.address_mode (as));
4019   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
4020 #else
4021   rtx temp;
4022 
4023   gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
4024 
4025   if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4026     return x;
4027 
4028   /* X must have some form of address mode already.  */
4029   scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4030   if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4031     x = lowpart_subreg (mode, x, xmode);
4032   else if (POINTERS_EXTEND_UNSIGNED > 0)
4033     x = gen_rtx_ZERO_EXTEND (mode, x);
4034   else if (!POINTERS_EXTEND_UNSIGNED)
4035     x = gen_rtx_SIGN_EXTEND (mode, x);
4036   else
4037     {
4038       switch (GET_CODE (x))
4039 	{
4040 	case SUBREG:
4041 	  if ((SUBREG_PROMOTED_VAR_P (x)
4042 	       || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4043 	       || (GET_CODE (SUBREG_REG (x)) == PLUS
4044 		   && REG_P (XEXP (SUBREG_REG (x), 0))
4045 		   && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4046 		   && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4047 	      && GET_MODE (SUBREG_REG (x)) == mode)
4048 	    return SUBREG_REG (x);
4049 	  break;
4050 	case LABEL_REF:
4051 	  temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4052 	  LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4053 	  return temp;
4054 	case SYMBOL_REF:
4055 	  temp = shallow_copy_rtx (x);
4056 	  PUT_MODE (temp, mode);
4057 	  return temp;
4058 	case CONST:
4059 	  temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4060 	  if (temp)
4061 	    temp = gen_rtx_CONST (mode, temp);
4062 	  return temp;
4063 	case PLUS:
4064 	case MINUS:
4065 	  if (CONST_INT_P (XEXP (x, 1)))
4066 	    {
4067 	      temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4068 	      if (temp)
4069 		return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4070 	    }
4071 	  break;
4072 	default:
4073 	  break;
4074 	}
4075       /* Don't know how to express ptr_extend as operation in debug info.  */
4076       return NULL;
4077     }
4078 #endif /* POINTERS_EXTEND_UNSIGNED */
4079 
4080   return x;
4081 }
4082 
4083 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4084    by avoid_deep_ter_for_debug.  */
4085 
4086 static hash_map<tree, tree> *deep_ter_debug_map;
4087 
4088 /* Split too deep TER chains for debug stmts using debug temporaries.  */
4089 
4090 static void
avoid_deep_ter_for_debug(gimple * stmt,int depth)4091 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4092 {
4093   use_operand_p use_p;
4094   ssa_op_iter iter;
4095   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4096     {
4097       tree use = USE_FROM_PTR (use_p);
4098       if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4099 	continue;
4100       gimple *g = get_gimple_for_ssa_name (use);
4101       if (g == NULL)
4102 	continue;
4103       if (depth > 6 && !stmt_ends_bb_p (g))
4104 	{
4105 	  if (deep_ter_debug_map == NULL)
4106 	    deep_ter_debug_map = new hash_map<tree, tree>;
4107 
4108 	  tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4109 	  if (vexpr != NULL)
4110 	    continue;
4111 	  vexpr = make_node (DEBUG_EXPR_DECL);
4112 	  gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4113 	  DECL_ARTIFICIAL (vexpr) = 1;
4114 	  TREE_TYPE (vexpr) = TREE_TYPE (use);
4115 	  SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4116 	  gimple_stmt_iterator gsi = gsi_for_stmt (g);
4117 	  gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4118 	  avoid_deep_ter_for_debug (def_temp, 0);
4119 	}
4120       else
4121 	avoid_deep_ter_for_debug (g, depth + 1);
4122     }
4123 }
4124 
4125 /* Return an RTX equivalent to the value of the parameter DECL.  */
4126 
4127 static rtx
expand_debug_parm_decl(tree decl)4128 expand_debug_parm_decl (tree decl)
4129 {
4130   rtx incoming = DECL_INCOMING_RTL (decl);
4131 
4132   if (incoming
4133       && GET_MODE (incoming) != BLKmode
4134       && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4135 	  || (MEM_P (incoming)
4136 	      && REG_P (XEXP (incoming, 0))
4137 	      && HARD_REGISTER_P (XEXP (incoming, 0)))))
4138     {
4139       rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4140 
4141 #ifdef HAVE_window_save
4142       /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4143 	 If the target machine has an explicit window save instruction, the
4144 	 actual entry value is the corresponding OUTGOING_REGNO instead.  */
4145       if (REG_P (incoming)
4146 	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4147 	incoming
4148 	  = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4149 				OUTGOING_REGNO (REGNO (incoming)), 0);
4150       else if (MEM_P (incoming))
4151 	{
4152 	  rtx reg = XEXP (incoming, 0);
4153 	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4154 	    {
4155 	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4156 	      incoming = replace_equiv_address_nv (incoming, reg);
4157 	    }
4158 	  else
4159 	    incoming = copy_rtx (incoming);
4160 	}
4161 #endif
4162 
4163       ENTRY_VALUE_EXP (rtl) = incoming;
4164       return rtl;
4165     }
4166 
4167   if (incoming
4168       && GET_MODE (incoming) != BLKmode
4169       && !TREE_ADDRESSABLE (decl)
4170       && MEM_P (incoming)
4171       && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4172 	  || (GET_CODE (XEXP (incoming, 0)) == PLUS
4173 	      && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4174 	      && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4175     return copy_rtx (incoming);
4176 
4177   return NULL_RTX;
4178 }
4179 
4180 /* Return an RTX equivalent to the value of the tree expression EXP.  */
4181 
4182 static rtx
expand_debug_expr(tree exp)4183 expand_debug_expr (tree exp)
4184 {
4185   rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4186   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4187   machine_mode inner_mode = VOIDmode;
4188   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4189   addr_space_t as;
4190   scalar_int_mode op0_mode, op1_mode, addr_mode;
4191 
4192   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4193     {
4194     case tcc_expression:
4195       switch (TREE_CODE (exp))
4196 	{
4197 	case COND_EXPR:
4198 	case DOT_PROD_EXPR:
4199 	case SAD_EXPR:
4200 	case WIDEN_MULT_PLUS_EXPR:
4201 	case WIDEN_MULT_MINUS_EXPR:
4202 	case FMA_EXPR:
4203 	  goto ternary;
4204 
4205 	case TRUTH_ANDIF_EXPR:
4206 	case TRUTH_ORIF_EXPR:
4207 	case TRUTH_AND_EXPR:
4208 	case TRUTH_OR_EXPR:
4209 	case TRUTH_XOR_EXPR:
4210 	  goto binary;
4211 
4212 	case TRUTH_NOT_EXPR:
4213 	  goto unary;
4214 
4215 	default:
4216 	  break;
4217 	}
4218       break;
4219 
4220     ternary:
4221       op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4222       if (!op2)
4223 	return NULL_RTX;
4224       /* Fall through.  */
4225 
4226     binary:
4227     case tcc_binary:
4228       if (mode == BLKmode)
4229 	return NULL_RTX;
4230       op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4231       if (!op1)
4232 	return NULL_RTX;
4233       switch (TREE_CODE (exp))
4234 	{
4235 	case LSHIFT_EXPR:
4236 	case RSHIFT_EXPR:
4237 	case LROTATE_EXPR:
4238 	case RROTATE_EXPR:
4239 	case WIDEN_LSHIFT_EXPR:
4240 	  /* Ensure second operand isn't wider than the first one.  */
4241 	  inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4242 	  if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4243 	      && (GET_MODE_UNIT_PRECISION (mode)
4244 		  < GET_MODE_PRECISION (op1_mode)))
4245 	    op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4246 	  break;
4247 	default:
4248 	  break;
4249 	}
4250       /* Fall through.  */
4251 
4252     unary:
4253     case tcc_unary:
4254       if (mode == BLKmode)
4255 	return NULL_RTX;
4256       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4257       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4258       if (!op0)
4259 	return NULL_RTX;
4260       break;
4261 
4262     case tcc_comparison:
4263       unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4264       goto binary;
4265 
4266     case tcc_type:
4267     case tcc_statement:
4268       gcc_unreachable ();
4269 
4270     case tcc_constant:
4271     case tcc_exceptional:
4272     case tcc_declaration:
4273     case tcc_reference:
4274     case tcc_vl_exp:
4275       break;
4276     }
4277 
4278   switch (TREE_CODE (exp))
4279     {
4280     case STRING_CST:
4281       if (!lookup_constant_def (exp))
4282 	{
4283 	  if (strlen (TREE_STRING_POINTER (exp)) + 1
4284 	      != (size_t) TREE_STRING_LENGTH (exp))
4285 	    return NULL_RTX;
4286 	  op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4287 	  op0 = gen_rtx_MEM (BLKmode, op0);
4288 	  set_mem_attributes (op0, exp, 0);
4289 	  return op0;
4290 	}
4291       /* Fall through.  */
4292 
4293     case INTEGER_CST:
4294     case REAL_CST:
4295     case FIXED_CST:
4296       op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4297       return op0;
4298 
4299     case POLY_INT_CST:
4300       return immed_wide_int_const (poly_int_cst_value (exp), mode);
4301 
4302     case COMPLEX_CST:
4303       gcc_assert (COMPLEX_MODE_P (mode));
4304       op0 = expand_debug_expr (TREE_REALPART (exp));
4305       op1 = expand_debug_expr (TREE_IMAGPART (exp));
4306       return gen_rtx_CONCAT (mode, op0, op1);
4307 
4308     case DEBUG_EXPR_DECL:
4309       op0 = DECL_RTL_IF_SET (exp);
4310 
4311       if (op0)
4312 	return op0;
4313 
4314       op0 = gen_rtx_DEBUG_EXPR (mode);
4315       DEBUG_EXPR_TREE_DECL (op0) = exp;
4316       SET_DECL_RTL (exp, op0);
4317 
4318       return op0;
4319 
4320     case VAR_DECL:
4321     case PARM_DECL:
4322     case FUNCTION_DECL:
4323     case LABEL_DECL:
4324     case CONST_DECL:
4325     case RESULT_DECL:
4326       op0 = DECL_RTL_IF_SET (exp);
4327 
4328       /* This decl was probably optimized away.  */
4329       if (!op0
4330 	  /* At least label RTXen are sometimes replaced by
4331 	     NOTE_INSN_DELETED_LABEL.  Any notes here are not
4332 	     handled by copy_rtx.  */
4333 	  || NOTE_P (op0))
4334 	{
4335 	  if (!VAR_P (exp)
4336 	      || DECL_EXTERNAL (exp)
4337 	      || !TREE_STATIC (exp)
4338 	      || !DECL_NAME (exp)
4339 	      || DECL_HARD_REGISTER (exp)
4340 	      || DECL_IN_CONSTANT_POOL (exp)
4341 	      || mode == VOIDmode)
4342 	    return NULL;
4343 
4344 	  op0 = make_decl_rtl_for_debug (exp);
4345 	  if (!MEM_P (op0)
4346 	      || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4347 	      || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4348 	    return NULL;
4349 	}
4350       else
4351 	op0 = copy_rtx (op0);
4352 
4353       if (GET_MODE (op0) == BLKmode
4354 	  /* If op0 is not BLKmode, but mode is, adjust_mode
4355 	     below would ICE.  While it is likely a FE bug,
4356 	     try to be robust here.  See PR43166.  */
4357 	  || mode == BLKmode
4358 	  || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4359 	{
4360 	  gcc_assert (MEM_P (op0));
4361 	  op0 = adjust_address_nv (op0, mode, 0);
4362 	  return op0;
4363 	}
4364 
4365       /* Fall through.  */
4366 
4367     adjust_mode:
4368     case PAREN_EXPR:
4369     CASE_CONVERT:
4370       {
4371 	inner_mode = GET_MODE (op0);
4372 
4373 	if (mode == inner_mode)
4374 	  return op0;
4375 
4376 	if (inner_mode == VOIDmode)
4377 	  {
4378 	    if (TREE_CODE (exp) == SSA_NAME)
4379 	      inner_mode = TYPE_MODE (TREE_TYPE (exp));
4380 	    else
4381 	      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4382 	    if (mode == inner_mode)
4383 	      return op0;
4384 	  }
4385 
4386 	if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4387 	  {
4388 	    if (GET_MODE_UNIT_BITSIZE (mode)
4389 		== GET_MODE_UNIT_BITSIZE (inner_mode))
4390 	      op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4391 	    else if (GET_MODE_UNIT_BITSIZE (mode)
4392 		     < GET_MODE_UNIT_BITSIZE (inner_mode))
4393 	      op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4394 	    else
4395 	      op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4396 	  }
4397 	else if (FLOAT_MODE_P (mode))
4398 	  {
4399 	    gcc_assert (TREE_CODE (exp) != SSA_NAME);
4400 	    if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4401 	      op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4402 	    else
4403 	      op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4404 	  }
4405 	else if (FLOAT_MODE_P (inner_mode))
4406 	  {
4407 	    if (unsignedp)
4408 	      op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4409 	    else
4410 	      op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4411 	  }
4412 	else if (GET_MODE_UNIT_PRECISION (mode)
4413 		 == GET_MODE_UNIT_PRECISION (inner_mode))
4414 	  op0 = lowpart_subreg (mode, op0, inner_mode);
4415 	else if (GET_MODE_UNIT_PRECISION (mode)
4416 		 < GET_MODE_UNIT_PRECISION (inner_mode))
4417 	  op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4418 	else if (UNARY_CLASS_P (exp)
4419 		 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4420 		 : unsignedp)
4421 	  op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4422 	else
4423 	  op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4424 
4425 	return op0;
4426       }
4427 
4428     case MEM_REF:
4429       if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4430 	{
4431 	  tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4432 				     TREE_OPERAND (exp, 0),
4433 				     TREE_OPERAND (exp, 1));
4434 	  if (newexp)
4435 	    return expand_debug_expr (newexp);
4436 	}
4437       /* FALLTHROUGH */
4438     case INDIRECT_REF:
4439       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4440       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4441       if (!op0)
4442 	return NULL;
4443 
4444       if (TREE_CODE (exp) == MEM_REF)
4445 	{
4446 	  if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4447 	      || (GET_CODE (op0) == PLUS
4448 		  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4449 	    /* (mem (debug_implicit_ptr)) might confuse aliasing.
4450 	       Instead just use get_inner_reference.  */
4451 	    goto component_ref;
4452 
4453 	  op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4454 	  if (!op1 || !CONST_INT_P (op1))
4455 	    return NULL;
4456 
4457 	  op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4458 	}
4459 
4460       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4461 
4462       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4463 					  op0, as);
4464       if (op0 == NULL_RTX)
4465 	return NULL;
4466 
4467       op0 = gen_rtx_MEM (mode, op0);
4468       set_mem_attributes (op0, exp, 0);
4469       if (TREE_CODE (exp) == MEM_REF
4470 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4471 	set_mem_expr (op0, NULL_TREE);
4472       set_mem_addr_space (op0, as);
4473 
4474       return op0;
4475 
4476     case TARGET_MEM_REF:
4477       if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4478 	  && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4479 	return NULL;
4480 
4481       op0 = expand_debug_expr
4482 	    (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4483       if (!op0)
4484 	return NULL;
4485 
4486       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4487       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4488 					  op0, as);
4489       if (op0 == NULL_RTX)
4490 	return NULL;
4491 
4492       op0 = gen_rtx_MEM (mode, op0);
4493 
4494       set_mem_attributes (op0, exp, 0);
4495       set_mem_addr_space (op0, as);
4496 
4497       return op0;
4498 
4499     component_ref:
4500     case ARRAY_REF:
4501     case ARRAY_RANGE_REF:
4502     case COMPONENT_REF:
4503     case BIT_FIELD_REF:
4504     case REALPART_EXPR:
4505     case IMAGPART_EXPR:
4506     case VIEW_CONVERT_EXPR:
4507       {
4508 	machine_mode mode1;
4509 	poly_int64 bitsize, bitpos;
4510 	tree offset;
4511 	int reversep, volatilep = 0;
4512 	tree tem
4513 	  = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4514 				 &unsignedp, &reversep, &volatilep);
4515 	rtx orig_op0;
4516 
4517 	if (known_eq (bitsize, 0))
4518 	  return NULL;
4519 
4520 	orig_op0 = op0 = expand_debug_expr (tem);
4521 
4522 	if (!op0)
4523 	  return NULL;
4524 
4525 	if (offset)
4526 	  {
4527 	    machine_mode addrmode, offmode;
4528 
4529 	    if (!MEM_P (op0))
4530 	      return NULL;
4531 
4532 	    op0 = XEXP (op0, 0);
4533 	    addrmode = GET_MODE (op0);
4534 	    if (addrmode == VOIDmode)
4535 	      addrmode = Pmode;
4536 
4537 	    op1 = expand_debug_expr (offset);
4538 	    if (!op1)
4539 	      return NULL;
4540 
4541 	    offmode = GET_MODE (op1);
4542 	    if (offmode == VOIDmode)
4543 	      offmode = TYPE_MODE (TREE_TYPE (offset));
4544 
4545 	    if (addrmode != offmode)
4546 	      op1 = lowpart_subreg (addrmode, op1, offmode);
4547 
4548 	    /* Don't use offset_address here, we don't need a
4549 	       recognizable address, and we don't want to generate
4550 	       code.  */
4551 	    op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4552 							  op0, op1));
4553 	  }
4554 
4555 	if (MEM_P (op0))
4556 	  {
4557 	    if (mode1 == VOIDmode)
4558 	      {
4559 		if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4560 		  return NULL;
4561 		/* Bitfield.  */
4562 		mode1 = smallest_int_mode_for_size (bitsize);
4563 	      }
4564 	    poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4565 	    if (maybe_ne (bytepos, 0))
4566 	      {
4567 		op0 = adjust_address_nv (op0, mode1, bytepos);
4568 		bitpos = num_trailing_bits (bitpos);
4569 	      }
4570 	    else if (known_eq (bitpos, 0)
4571 		     && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4572 	      op0 = adjust_address_nv (op0, mode, 0);
4573 	    else if (GET_MODE (op0) != mode1)
4574 	      op0 = adjust_address_nv (op0, mode1, 0);
4575 	    else
4576 	      op0 = copy_rtx (op0);
4577 	    if (op0 == orig_op0)
4578 	      op0 = shallow_copy_rtx (op0);
4579 	    set_mem_attributes (op0, exp, 0);
4580 	  }
4581 
4582 	if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4583 	  return op0;
4584 
4585 	if (maybe_lt (bitpos, 0))
4586           return NULL;
4587 
4588 	if (GET_MODE (op0) == BLKmode || mode == BLKmode)
4589 	  return NULL;
4590 
4591 	poly_int64 bytepos;
4592 	if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4593 	    && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4594 	  {
4595 	    machine_mode opmode = GET_MODE (op0);
4596 
4597 	    if (opmode == VOIDmode)
4598 	      opmode = TYPE_MODE (TREE_TYPE (tem));
4599 
4600 	    /* This condition may hold if we're expanding the address
4601 	       right past the end of an array that turned out not to
4602 	       be addressable (i.e., the address was only computed in
4603 	       debug stmts).  The gen_subreg below would rightfully
4604 	       crash, and the address doesn't really exist, so just
4605 	       drop it.  */
4606 	    if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4607 	      return NULL;
4608 
4609 	    if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4610 	      return simplify_gen_subreg (mode, op0, opmode, bytepos);
4611 	  }
4612 
4613 	return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4614 				     && TYPE_UNSIGNED (TREE_TYPE (exp))
4615 				     ? SIGN_EXTRACT
4616 				     : ZERO_EXTRACT, mode,
4617 				     GET_MODE (op0) != VOIDmode
4618 				     ? GET_MODE (op0)
4619 				     : TYPE_MODE (TREE_TYPE (tem)),
4620 				     op0, gen_int_mode (bitsize, word_mode),
4621 				     gen_int_mode (bitpos, word_mode));
4622       }
4623 
4624     case ABS_EXPR:
4625       return simplify_gen_unary (ABS, mode, op0, mode);
4626 
4627     case NEGATE_EXPR:
4628       return simplify_gen_unary (NEG, mode, op0, mode);
4629 
4630     case BIT_NOT_EXPR:
4631       return simplify_gen_unary (NOT, mode, op0, mode);
4632 
4633     case FLOAT_EXPR:
4634       return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4635 									 0)))
4636 				 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4637 				 inner_mode);
4638 
4639     case FIX_TRUNC_EXPR:
4640       return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4641 				 inner_mode);
4642 
4643     case POINTER_PLUS_EXPR:
4644       /* For the rare target where pointers are not the same size as
4645 	 size_t, we need to check for mis-matched modes and correct
4646 	 the addend.  */
4647       if (op0 && op1
4648 	  && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4649 	  && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4650 	  && op0_mode != op1_mode)
4651 	{
4652 	  if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4653 	      /* If OP0 is a partial mode, then we must truncate, even
4654 		 if it has the same bitsize as OP1 as GCC's
4655 		 representation of partial modes is opaque.  */
4656 	      || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4657 		  && (GET_MODE_BITSIZE (op0_mode)
4658 		      == GET_MODE_BITSIZE (op1_mode))))
4659 	    op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4660 	  else
4661 	    /* We always sign-extend, regardless of the signedness of
4662 	       the operand, because the operand is always unsigned
4663 	       here even if the original C expression is signed.  */
4664 	    op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4665 	}
4666       /* Fall through.  */
4667     case PLUS_EXPR:
4668       return simplify_gen_binary (PLUS, mode, op0, op1);
4669 
4670     case MINUS_EXPR:
4671     case POINTER_DIFF_EXPR:
4672       return simplify_gen_binary (MINUS, mode, op0, op1);
4673 
4674     case MULT_EXPR:
4675       return simplify_gen_binary (MULT, mode, op0, op1);
4676 
4677     case RDIV_EXPR:
4678     case TRUNC_DIV_EXPR:
4679     case EXACT_DIV_EXPR:
4680       if (unsignedp)
4681 	return simplify_gen_binary (UDIV, mode, op0, op1);
4682       else
4683 	return simplify_gen_binary (DIV, mode, op0, op1);
4684 
4685     case TRUNC_MOD_EXPR:
4686       return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4687 
4688     case FLOOR_DIV_EXPR:
4689       if (unsignedp)
4690 	return simplify_gen_binary (UDIV, mode, op0, op1);
4691       else
4692 	{
4693 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4694 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4695 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4696 	  return simplify_gen_binary (PLUS, mode, div, adj);
4697 	}
4698 
4699     case FLOOR_MOD_EXPR:
4700       if (unsignedp)
4701 	return simplify_gen_binary (UMOD, mode, op0, op1);
4702       else
4703 	{
4704 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4705 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4706 	  adj = simplify_gen_unary (NEG, mode,
4707 				    simplify_gen_binary (MULT, mode, adj, op1),
4708 				    mode);
4709 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4710 	}
4711 
4712     case CEIL_DIV_EXPR:
4713       if (unsignedp)
4714 	{
4715 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4716 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4717 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4718 	  return simplify_gen_binary (PLUS, mode, div, adj);
4719 	}
4720       else
4721 	{
4722 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4723 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4724 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4725 	  return simplify_gen_binary (PLUS, mode, div, adj);
4726 	}
4727 
4728     case CEIL_MOD_EXPR:
4729       if (unsignedp)
4730 	{
4731 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4732 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4733 	  adj = simplify_gen_unary (NEG, mode,
4734 				    simplify_gen_binary (MULT, mode, adj, op1),
4735 				    mode);
4736 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4737 	}
4738       else
4739 	{
4740 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4741 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4742 	  adj = simplify_gen_unary (NEG, mode,
4743 				    simplify_gen_binary (MULT, mode, adj, op1),
4744 				    mode);
4745 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4746 	}
4747 
4748     case ROUND_DIV_EXPR:
4749       if (unsignedp)
4750 	{
4751 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4752 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4753 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4754 	  return simplify_gen_binary (PLUS, mode, div, adj);
4755 	}
4756       else
4757 	{
4758 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4759 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4760 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4761 	  return simplify_gen_binary (PLUS, mode, div, adj);
4762 	}
4763 
4764     case ROUND_MOD_EXPR:
4765       if (unsignedp)
4766 	{
4767 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4768 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4769 	  adj = simplify_gen_unary (NEG, mode,
4770 				    simplify_gen_binary (MULT, mode, adj, op1),
4771 				    mode);
4772 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4773 	}
4774       else
4775 	{
4776 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4777 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4778 	  adj = simplify_gen_unary (NEG, mode,
4779 				    simplify_gen_binary (MULT, mode, adj, op1),
4780 				    mode);
4781 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4782 	}
4783 
4784     case LSHIFT_EXPR:
4785       return simplify_gen_binary (ASHIFT, mode, op0, op1);
4786 
4787     case RSHIFT_EXPR:
4788       if (unsignedp)
4789 	return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4790       else
4791 	return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4792 
4793     case LROTATE_EXPR:
4794       return simplify_gen_binary (ROTATE, mode, op0, op1);
4795 
4796     case RROTATE_EXPR:
4797       return simplify_gen_binary (ROTATERT, mode, op0, op1);
4798 
4799     case MIN_EXPR:
4800       return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4801 
4802     case MAX_EXPR:
4803       return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4804 
4805     case BIT_AND_EXPR:
4806     case TRUTH_AND_EXPR:
4807       return simplify_gen_binary (AND, mode, op0, op1);
4808 
4809     case BIT_IOR_EXPR:
4810     case TRUTH_OR_EXPR:
4811       return simplify_gen_binary (IOR, mode, op0, op1);
4812 
4813     case BIT_XOR_EXPR:
4814     case TRUTH_XOR_EXPR:
4815       return simplify_gen_binary (XOR, mode, op0, op1);
4816 
4817     case TRUTH_ANDIF_EXPR:
4818       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4819 
4820     case TRUTH_ORIF_EXPR:
4821       return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4822 
4823     case TRUTH_NOT_EXPR:
4824       return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4825 
4826     case LT_EXPR:
4827       return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4828 				      op0, op1);
4829 
4830     case LE_EXPR:
4831       return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4832 				      op0, op1);
4833 
4834     case GT_EXPR:
4835       return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4836 				      op0, op1);
4837 
4838     case GE_EXPR:
4839       return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4840 				      op0, op1);
4841 
4842     case EQ_EXPR:
4843       return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4844 
4845     case NE_EXPR:
4846       return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4847 
4848     case UNORDERED_EXPR:
4849       return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4850 
4851     case ORDERED_EXPR:
4852       return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4853 
4854     case UNLT_EXPR:
4855       return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4856 
4857     case UNLE_EXPR:
4858       return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4859 
4860     case UNGT_EXPR:
4861       return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4862 
4863     case UNGE_EXPR:
4864       return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4865 
4866     case UNEQ_EXPR:
4867       return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4868 
4869     case LTGT_EXPR:
4870       return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4871 
4872     case COND_EXPR:
4873       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4874 
4875     case COMPLEX_EXPR:
4876       gcc_assert (COMPLEX_MODE_P (mode));
4877       if (GET_MODE (op0) == VOIDmode)
4878 	op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4879       if (GET_MODE (op1) == VOIDmode)
4880 	op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4881       return gen_rtx_CONCAT (mode, op0, op1);
4882 
4883     case CONJ_EXPR:
4884       if (GET_CODE (op0) == CONCAT)
4885 	return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4886 			       simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4887 						   XEXP (op0, 1),
4888 						   GET_MODE_INNER (mode)));
4889       else
4890 	{
4891 	  scalar_mode imode = GET_MODE_INNER (mode);
4892 	  rtx re, im;
4893 
4894 	  if (MEM_P (op0))
4895 	    {
4896 	      re = adjust_address_nv (op0, imode, 0);
4897 	      im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4898 	    }
4899 	  else
4900 	    {
4901 	      scalar_int_mode ifmode;
4902 	      scalar_int_mode ihmode;
4903 	      rtx halfsize;
4904 	      if (!int_mode_for_mode (mode).exists (&ifmode)
4905 		  || !int_mode_for_mode (imode).exists (&ihmode))
4906 		return NULL;
4907 	      halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4908 	      re = op0;
4909 	      if (mode != ifmode)
4910 		re = gen_rtx_SUBREG (ifmode, re, 0);
4911 	      re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4912 	      if (imode != ihmode)
4913 		re = gen_rtx_SUBREG (imode, re, 0);
4914 	      im = copy_rtx (op0);
4915 	      if (mode != ifmode)
4916 		im = gen_rtx_SUBREG (ifmode, im, 0);
4917 	      im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4918 	      if (imode != ihmode)
4919 		im = gen_rtx_SUBREG (imode, im, 0);
4920 	    }
4921 	  im = gen_rtx_NEG (imode, im);
4922 	  return gen_rtx_CONCAT (mode, re, im);
4923 	}
4924 
4925     case ADDR_EXPR:
4926       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4927       if (!op0 || !MEM_P (op0))
4928 	{
4929 	  if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4930 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4931 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4932 	      && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4933 		  || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4934 	    return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4935 
4936 	  if (handled_component_p (TREE_OPERAND (exp, 0)))
4937 	    {
4938 	      poly_int64 bitoffset, bitsize, maxsize, byteoffset;
4939 	      bool reverse;
4940 	      tree decl
4941 		= get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4942 					   &bitsize, &maxsize, &reverse);
4943 	      if ((VAR_P (decl)
4944 		   || TREE_CODE (decl) == PARM_DECL
4945 		   || TREE_CODE (decl) == RESULT_DECL)
4946 		  && (!TREE_ADDRESSABLE (decl)
4947 		      || target_for_debug_bind (decl))
4948 		  && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
4949 		  && known_gt (bitsize, 0)
4950 		  && known_eq (bitsize, maxsize))
4951 		{
4952 		  rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4953 		  return plus_constant (mode, base, byteoffset);
4954 		}
4955 	    }
4956 
4957 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4958 	      && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4959 		 == ADDR_EXPR)
4960 	    {
4961 	      op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4962 						     0));
4963 	      if (op0 != NULL
4964 		  && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4965 		      || (GET_CODE (op0) == PLUS
4966 			  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4967 			  && CONST_INT_P (XEXP (op0, 1)))))
4968 		{
4969 		  op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4970 							 1));
4971 		  if (!op1 || !CONST_INT_P (op1))
4972 		    return NULL;
4973 
4974 		  return plus_constant (mode, op0, INTVAL (op1));
4975 		}
4976 	    }
4977 
4978 	  return NULL;
4979 	}
4980 
4981       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4982       addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
4983       op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
4984 
4985       return op0;
4986 
4987     case VECTOR_CST:
4988       {
4989 	unsigned HOST_WIDE_INT i, nelts;
4990 
4991 	if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
4992 	  return NULL;
4993 
4994 	op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
4995 
4996 	for (i = 0; i < nelts; ++i)
4997 	  {
4998 	    op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4999 	    if (!op1)
5000 	      return NULL;
5001 	    XVECEXP (op0, 0, i) = op1;
5002 	  }
5003 
5004 	return op0;
5005       }
5006 
5007     case CONSTRUCTOR:
5008       if (TREE_CLOBBER_P (exp))
5009 	return NULL;
5010       else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
5011 	{
5012 	  unsigned i;
5013 	  unsigned HOST_WIDE_INT nelts;
5014 	  tree val;
5015 
5016 	  if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
5017 	    goto flag_unsupported;
5018 
5019 	  op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5020 
5021 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
5022 	    {
5023 	      op1 = expand_debug_expr (val);
5024 	      if (!op1)
5025 		return NULL;
5026 	      XVECEXP (op0, 0, i) = op1;
5027 	    }
5028 
5029 	  if (i < nelts)
5030 	    {
5031 	      op1 = expand_debug_expr
5032 		(build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
5033 
5034 	      if (!op1)
5035 		return NULL;
5036 
5037 	      for (; i < nelts; i++)
5038 		XVECEXP (op0, 0, i) = op1;
5039 	    }
5040 
5041 	  return op0;
5042 	}
5043       else
5044 	goto flag_unsupported;
5045 
5046     case CALL_EXPR:
5047       /* ??? Maybe handle some builtins?  */
5048       return NULL;
5049 
5050     case SSA_NAME:
5051       {
5052 	gimple *g = get_gimple_for_ssa_name (exp);
5053 	if (g)
5054 	  {
5055 	    tree t = NULL_TREE;
5056 	    if (deep_ter_debug_map)
5057 	      {
5058 		tree *slot = deep_ter_debug_map->get (exp);
5059 		if (slot)
5060 		  t = *slot;
5061 	      }
5062 	    if (t == NULL_TREE)
5063 	      t = gimple_assign_rhs_to_tree (g);
5064 	    op0 = expand_debug_expr (t);
5065 	    if (!op0)
5066 	      return NULL;
5067 	  }
5068 	else
5069 	  {
5070 	    /* If this is a reference to an incoming value of
5071 	       parameter that is never used in the code or where the
5072 	       incoming value is never used in the code, use
5073 	       PARM_DECL's DECL_RTL if set.  */
5074 	    if (SSA_NAME_IS_DEFAULT_DEF (exp)
5075 		&& SSA_NAME_VAR (exp)
5076 		&& TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5077 		&& has_zero_uses (exp))
5078 	      {
5079 		op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5080 		if (op0)
5081 		  goto adjust_mode;
5082 		op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5083 		if (op0)
5084 		  goto adjust_mode;
5085 	      }
5086 
5087 	    int part = var_to_partition (SA.map, exp);
5088 
5089 	    if (part == NO_PARTITION)
5090 	      return NULL;
5091 
5092 	    gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5093 
5094 	    op0 = copy_rtx (SA.partition_to_pseudo[part]);
5095 	  }
5096 	goto adjust_mode;
5097       }
5098 
5099     case ERROR_MARK:
5100       return NULL;
5101 
5102     /* Vector stuff.  For most of the codes we don't have rtl codes.  */
5103     case REALIGN_LOAD_EXPR:
5104     case VEC_COND_EXPR:
5105     case VEC_PACK_FIX_TRUNC_EXPR:
5106     case VEC_PACK_SAT_EXPR:
5107     case VEC_PACK_TRUNC_EXPR:
5108     case VEC_UNPACK_FLOAT_HI_EXPR:
5109     case VEC_UNPACK_FLOAT_LO_EXPR:
5110     case VEC_UNPACK_HI_EXPR:
5111     case VEC_UNPACK_LO_EXPR:
5112     case VEC_WIDEN_MULT_HI_EXPR:
5113     case VEC_WIDEN_MULT_LO_EXPR:
5114     case VEC_WIDEN_MULT_EVEN_EXPR:
5115     case VEC_WIDEN_MULT_ODD_EXPR:
5116     case VEC_WIDEN_LSHIFT_HI_EXPR:
5117     case VEC_WIDEN_LSHIFT_LO_EXPR:
5118     case VEC_PERM_EXPR:
5119     case VEC_DUPLICATE_EXPR:
5120     case VEC_SERIES_EXPR:
5121       return NULL;
5122 
5123     /* Misc codes.  */
5124     case ADDR_SPACE_CONVERT_EXPR:
5125     case FIXED_CONVERT_EXPR:
5126     case OBJ_TYPE_REF:
5127     case WITH_SIZE_EXPR:
5128     case BIT_INSERT_EXPR:
5129       return NULL;
5130 
5131     case DOT_PROD_EXPR:
5132       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5133 	  && SCALAR_INT_MODE_P (mode))
5134 	{
5135 	  op0
5136 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5137 									  0)))
5138 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5139 				  inner_mode);
5140 	  op1
5141 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5142 									  1)))
5143 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5144 				  inner_mode);
5145 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5146 	  return simplify_gen_binary (PLUS, mode, op0, op2);
5147 	}
5148       return NULL;
5149 
5150     case WIDEN_MULT_EXPR:
5151     case WIDEN_MULT_PLUS_EXPR:
5152     case WIDEN_MULT_MINUS_EXPR:
5153       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5154 	  && SCALAR_INT_MODE_P (mode))
5155 	{
5156 	  inner_mode = GET_MODE (op0);
5157 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5158 	    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5159 	  else
5160 	    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5161 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5162 	    op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5163 	  else
5164 	    op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5165 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5166 	  if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5167 	    return op0;
5168 	  else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5169 	    return simplify_gen_binary (PLUS, mode, op0, op2);
5170 	  else
5171 	    return simplify_gen_binary (MINUS, mode, op2, op0);
5172 	}
5173       return NULL;
5174 
5175     case MULT_HIGHPART_EXPR:
5176       /* ??? Similar to the above.  */
5177       return NULL;
5178 
5179     case WIDEN_SUM_EXPR:
5180     case WIDEN_LSHIFT_EXPR:
5181       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5182 	  && SCALAR_INT_MODE_P (mode))
5183 	{
5184 	  op0
5185 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5186 									  0)))
5187 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5188 				  inner_mode);
5189 	  return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5190 				      ? ASHIFT : PLUS, mode, op0, op1);
5191 	}
5192       return NULL;
5193 
5194     case FMA_EXPR:
5195       return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
5196 
5197     default:
5198     flag_unsupported:
5199       if (flag_checking)
5200 	{
5201 	  debug_tree (exp);
5202 	  gcc_unreachable ();
5203 	}
5204       return NULL;
5205     }
5206 }
5207 
5208 /* Return an RTX equivalent to the source bind value of the tree expression
5209    EXP.  */
5210 
5211 static rtx
expand_debug_source_expr(tree exp)5212 expand_debug_source_expr (tree exp)
5213 {
5214   rtx op0 = NULL_RTX;
5215   machine_mode mode = VOIDmode, inner_mode;
5216 
5217   switch (TREE_CODE (exp))
5218     {
5219     case PARM_DECL:
5220       {
5221 	mode = DECL_MODE (exp);
5222 	op0 = expand_debug_parm_decl (exp);
5223 	if (op0)
5224 	   break;
5225 	/* See if this isn't an argument that has been completely
5226 	   optimized out.  */
5227 	if (!DECL_RTL_SET_P (exp)
5228 	    && !DECL_INCOMING_RTL (exp)
5229 	    && DECL_ABSTRACT_ORIGIN (current_function_decl))
5230 	  {
5231 	    tree aexp = DECL_ORIGIN (exp);
5232 	    if (DECL_CONTEXT (aexp)
5233 		== DECL_ABSTRACT_ORIGIN (current_function_decl))
5234 	      {
5235 		vec<tree, va_gc> **debug_args;
5236 		unsigned int ix;
5237 		tree ddecl;
5238 		debug_args = decl_debug_args_lookup (current_function_decl);
5239 		if (debug_args != NULL)
5240 		  {
5241 		    for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5242 			 ix += 2)
5243 		      if (ddecl == aexp)
5244 			return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5245 		  }
5246 	      }
5247 	  }
5248 	break;
5249       }
5250     default:
5251       break;
5252     }
5253 
5254   if (op0 == NULL_RTX)
5255     return NULL_RTX;
5256 
5257   inner_mode = GET_MODE (op0);
5258   if (mode == inner_mode)
5259     return op0;
5260 
5261   if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5262     {
5263       if (GET_MODE_UNIT_BITSIZE (mode)
5264 	  == GET_MODE_UNIT_BITSIZE (inner_mode))
5265 	op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5266       else if (GET_MODE_UNIT_BITSIZE (mode)
5267 	       < GET_MODE_UNIT_BITSIZE (inner_mode))
5268 	op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5269       else
5270 	op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5271     }
5272   else if (FLOAT_MODE_P (mode))
5273     gcc_unreachable ();
5274   else if (FLOAT_MODE_P (inner_mode))
5275     {
5276       if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5277 	op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5278       else
5279 	op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5280     }
5281   else if (GET_MODE_UNIT_PRECISION (mode)
5282 	   == GET_MODE_UNIT_PRECISION (inner_mode))
5283     op0 = lowpart_subreg (mode, op0, inner_mode);
5284   else if (GET_MODE_UNIT_PRECISION (mode)
5285 	   < GET_MODE_UNIT_PRECISION (inner_mode))
5286     op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5287   else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5288     op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5289   else
5290     op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5291 
5292   return op0;
5293 }
5294 
5295 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5296    Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5297    deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN.  */
5298 
5299 static void
avoid_complex_debug_insns(rtx_insn * insn,rtx * exp_p,int depth)5300 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5301 {
5302   rtx exp = *exp_p;
5303 
5304   if (exp == NULL_RTX)
5305     return;
5306 
5307   if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5308     return;
5309 
5310   if (depth == 4)
5311     {
5312       /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
5313       rtx dval = make_debug_expr_from_rtl (exp);
5314 
5315       /* Emit a debug bind insn before INSN.  */
5316       rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5317 				       DEBUG_EXPR_TREE_DECL (dval), exp,
5318 				       VAR_INIT_STATUS_INITIALIZED);
5319 
5320       emit_debug_insn_before (bind, insn);
5321       *exp_p = dval;
5322       return;
5323     }
5324 
5325   const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5326   int i, j;
5327   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5328     switch (*format_ptr++)
5329       {
5330       case 'e':
5331 	avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5332 	break;
5333 
5334       case 'E':
5335       case 'V':
5336 	for (j = 0; j < XVECLEN (exp, i); j++)
5337 	  avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5338 	break;
5339 
5340       default:
5341 	break;
5342       }
5343 }
5344 
5345 /* Expand the _LOCs in debug insns.  We run this after expanding all
5346    regular insns, so that any variables referenced in the function
5347    will have their DECL_RTLs set.  */
5348 
5349 static void
expand_debug_locations(void)5350 expand_debug_locations (void)
5351 {
5352   rtx_insn *insn;
5353   rtx_insn *last = get_last_insn ();
5354   int save_strict_alias = flag_strict_aliasing;
5355 
5356   /* New alias sets while setting up memory attributes cause
5357      -fcompare-debug failures, even though it doesn't bring about any
5358      codegen changes.  */
5359   flag_strict_aliasing = 0;
5360 
5361   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5362     if (DEBUG_BIND_INSN_P (insn))
5363       {
5364 	tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5365 	rtx val;
5366 	rtx_insn *prev_insn, *insn2;
5367 	machine_mode mode;
5368 
5369 	if (value == NULL_TREE)
5370 	  val = NULL_RTX;
5371 	else
5372 	  {
5373 	    if (INSN_VAR_LOCATION_STATUS (insn)
5374 		== VAR_INIT_STATUS_UNINITIALIZED)
5375 	      val = expand_debug_source_expr (value);
5376 	    /* The avoid_deep_ter_for_debug function inserts
5377 	       debug bind stmts after SSA_NAME definition, with the
5378 	       SSA_NAME as the whole bind location.  Disable temporarily
5379 	       expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5380 	       being defined in this DEBUG_INSN.  */
5381 	    else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5382 	      {
5383 		tree *slot = deep_ter_debug_map->get (value);
5384 		if (slot)
5385 		  {
5386 		    if (*slot == INSN_VAR_LOCATION_DECL (insn))
5387 		      *slot = NULL_TREE;
5388 		    else
5389 		      slot = NULL;
5390 		  }
5391 		val = expand_debug_expr (value);
5392 		if (slot)
5393 		  *slot = INSN_VAR_LOCATION_DECL (insn);
5394 	      }
5395 	    else
5396 	      val = expand_debug_expr (value);
5397 	    gcc_assert (last == get_last_insn ());
5398 	  }
5399 
5400 	if (!val)
5401 	  val = gen_rtx_UNKNOWN_VAR_LOC ();
5402 	else
5403 	  {
5404 	    mode = GET_MODE (INSN_VAR_LOCATION (insn));
5405 
5406 	    gcc_assert (mode == GET_MODE (val)
5407 			|| (GET_MODE (val) == VOIDmode
5408 			    && (CONST_SCALAR_INT_P (val)
5409 				|| GET_CODE (val) == CONST_FIXED
5410 				|| GET_CODE (val) == LABEL_REF)));
5411 	  }
5412 
5413 	INSN_VAR_LOCATION_LOC (insn) = val;
5414 	prev_insn = PREV_INSN (insn);
5415 	for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5416 	  avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5417       }
5418 
5419   flag_strict_aliasing = save_strict_alias;
5420 }
5421 
5422 /* Performs swapping operands of commutative operations to expand
5423    the expensive one first.  */
5424 
5425 static void
reorder_operands(basic_block bb)5426 reorder_operands (basic_block bb)
5427 {
5428   unsigned int *lattice;  /* Hold cost of each statement.  */
5429   unsigned int i = 0, n = 0;
5430   gimple_stmt_iterator gsi;
5431   gimple_seq stmts;
5432   gimple *stmt;
5433   bool swap;
5434   tree op0, op1;
5435   ssa_op_iter iter;
5436   use_operand_p use_p;
5437   gimple *def0, *def1;
5438 
5439   /* Compute cost of each statement using estimate_num_insns.  */
5440   stmts = bb_seq (bb);
5441   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5442     {
5443       stmt = gsi_stmt (gsi);
5444       if (!is_gimple_debug (stmt))
5445         gimple_set_uid (stmt, n++);
5446     }
5447   lattice = XNEWVEC (unsigned int, n);
5448   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5449     {
5450       unsigned cost;
5451       stmt = gsi_stmt (gsi);
5452       if (is_gimple_debug (stmt))
5453 	continue;
5454       cost = estimate_num_insns (stmt, &eni_size_weights);
5455       lattice[i] = cost;
5456       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5457 	{
5458 	  tree use = USE_FROM_PTR (use_p);
5459 	  gimple *def_stmt;
5460 	  if (TREE_CODE (use) != SSA_NAME)
5461 	    continue;
5462 	  def_stmt = get_gimple_for_ssa_name (use);
5463 	  if (!def_stmt)
5464 	    continue;
5465 	  lattice[i] += lattice[gimple_uid (def_stmt)];
5466 	}
5467       i++;
5468       if (!is_gimple_assign (stmt)
5469 	  || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5470 	continue;
5471       op0 = gimple_op (stmt, 1);
5472       op1 = gimple_op (stmt, 2);
5473       if (TREE_CODE (op0) != SSA_NAME
5474 	  || TREE_CODE (op1) != SSA_NAME)
5475 	continue;
5476       /* Swap operands if the second one is more expensive.  */
5477       def0 = get_gimple_for_ssa_name (op0);
5478       def1 = get_gimple_for_ssa_name (op1);
5479       if (!def1)
5480 	continue;
5481       swap = false;
5482       if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5483 	swap = true;
5484       if (swap)
5485 	{
5486 	  if (dump_file && (dump_flags & TDF_DETAILS))
5487 	    {
5488 	      fprintf (dump_file, "Swap operands in stmt:\n");
5489 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5490 	      fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5491 		       def0 ? lattice[gimple_uid (def0)] : 0,
5492 		       lattice[gimple_uid (def1)]);
5493 	    }
5494 	  swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5495 			     gimple_assign_rhs2_ptr (stmt));
5496 	}
5497     }
5498   XDELETE (lattice);
5499 }
5500 
5501 /* Expand basic block BB from GIMPLE trees to RTL.  */
5502 
5503 static basic_block
expand_gimple_basic_block(basic_block bb,bool disable_tail_calls)5504 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5505 {
5506   gimple_stmt_iterator gsi;
5507   gimple_seq stmts;
5508   gimple *stmt = NULL;
5509   rtx_note *note = NULL;
5510   rtx_insn *last;
5511   edge e;
5512   edge_iterator ei;
5513 
5514   if (dump_file)
5515     fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5516 	     bb->index);
5517 
5518   /* Note that since we are now transitioning from GIMPLE to RTL, we
5519      cannot use the gsi_*_bb() routines because they expect the basic
5520      block to be in GIMPLE, instead of RTL.  Therefore, we need to
5521      access the BB sequence directly.  */
5522   if (optimize)
5523     reorder_operands (bb);
5524   stmts = bb_seq (bb);
5525   bb->il.gimple.seq = NULL;
5526   bb->il.gimple.phi_nodes = NULL;
5527   rtl_profile_for_bb (bb);
5528   init_rtl_bb_info (bb);
5529   bb->flags |= BB_RTL;
5530 
5531   /* Remove the RETURN_EXPR if we may fall though to the exit
5532      instead.  */
5533   gsi = gsi_last (stmts);
5534   if (!gsi_end_p (gsi)
5535       && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5536     {
5537       greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5538 
5539       gcc_assert (single_succ_p (bb));
5540       gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5541 
5542       if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5543 	  && !gimple_return_retval (ret_stmt))
5544 	{
5545 	  gsi_remove (&gsi, false);
5546 	  single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5547 	}
5548     }
5549 
5550   gsi = gsi_start (stmts);
5551   if (!gsi_end_p (gsi))
5552     {
5553       stmt = gsi_stmt (gsi);
5554       if (gimple_code (stmt) != GIMPLE_LABEL)
5555 	stmt = NULL;
5556     }
5557 
5558   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5559 
5560   if (stmt || elt)
5561     {
5562       gcc_checking_assert (!note);
5563       last = get_last_insn ();
5564 
5565       if (stmt)
5566 	{
5567 	  expand_gimple_stmt (stmt);
5568 	  gsi_next (&gsi);
5569 	}
5570 
5571       if (elt)
5572 	emit_label (*elt);
5573 
5574       BB_HEAD (bb) = NEXT_INSN (last);
5575       if (NOTE_P (BB_HEAD (bb)))
5576 	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5577       gcc_assert (LABEL_P (BB_HEAD (bb)));
5578       note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5579 
5580       maybe_dump_rtl_for_gimple_stmt (stmt, last);
5581     }
5582   else
5583     BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5584 
5585   if (note)
5586     NOTE_BASIC_BLOCK (note) = bb;
5587 
5588   for (; !gsi_end_p (gsi); gsi_next (&gsi))
5589     {
5590       basic_block new_bb;
5591 
5592       stmt = gsi_stmt (gsi);
5593 
5594       /* If this statement is a non-debug one, and we generate debug
5595 	 insns, then this one might be the last real use of a TERed
5596 	 SSA_NAME, but where there are still some debug uses further
5597 	 down.  Expanding the current SSA name in such further debug
5598 	 uses by their RHS might lead to wrong debug info, as coalescing
5599 	 might make the operands of such RHS be placed into the same
5600 	 pseudo as something else.  Like so:
5601 	   a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
5602 	   use(a_1);
5603 	   a_2 = ...
5604            #DEBUG ... => a_1
5605 	 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5606 	 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5607 	 the write to a_2 would actually have clobbered the place which
5608 	 formerly held a_0.
5609 
5610 	 So, instead of that, we recognize the situation, and generate
5611 	 debug temporaries at the last real use of TERed SSA names:
5612 	   a_1 = a_0 + 1;
5613            #DEBUG #D1 => a_1
5614 	   use(a_1);
5615 	   a_2 = ...
5616            #DEBUG ... => #D1
5617 	 */
5618       if (MAY_HAVE_DEBUG_BIND_INSNS
5619 	  && SA.values
5620 	  && !is_gimple_debug (stmt))
5621 	{
5622 	  ssa_op_iter iter;
5623 	  tree op;
5624 	  gimple *def;
5625 
5626 	  location_t sloc = curr_insn_location ();
5627 
5628 	  /* Look for SSA names that have their last use here (TERed
5629 	     names always have only one real use).  */
5630 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5631 	    if ((def = get_gimple_for_ssa_name (op)))
5632 	      {
5633 		imm_use_iterator imm_iter;
5634 		use_operand_p use_p;
5635 		bool have_debug_uses = false;
5636 
5637 		FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5638 		  {
5639 		    if (gimple_debug_bind_p (USE_STMT (use_p)))
5640 		      {
5641 			have_debug_uses = true;
5642 			break;
5643 		      }
5644 		  }
5645 
5646 		if (have_debug_uses)
5647 		  {
5648 		    /* OP is a TERed SSA name, with DEF its defining
5649 		       statement, and where OP is used in further debug
5650 		       instructions.  Generate a debug temporary, and
5651 		       replace all uses of OP in debug insns with that
5652 		       temporary.  */
5653 		    gimple *debugstmt;
5654 		    tree value = gimple_assign_rhs_to_tree (def);
5655 		    tree vexpr = make_node (DEBUG_EXPR_DECL);
5656 		    rtx val;
5657 		    machine_mode mode;
5658 
5659 		    set_curr_insn_location (gimple_location (def));
5660 
5661 		    DECL_ARTIFICIAL (vexpr) = 1;
5662 		    TREE_TYPE (vexpr) = TREE_TYPE (value);
5663 		    if (DECL_P (value))
5664 		      mode = DECL_MODE (value);
5665 		    else
5666 		      mode = TYPE_MODE (TREE_TYPE (value));
5667 		    SET_DECL_MODE (vexpr, mode);
5668 
5669 		    val = gen_rtx_VAR_LOCATION
5670 			(mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5671 
5672 		    emit_debug_insn (val);
5673 
5674 		    FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5675 		      {
5676 			if (!gimple_debug_bind_p (debugstmt))
5677 			  continue;
5678 
5679 			FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5680 			  SET_USE (use_p, vexpr);
5681 
5682 			update_stmt (debugstmt);
5683 		      }
5684 		  }
5685 	      }
5686 	  set_curr_insn_location (sloc);
5687 	}
5688 
5689       currently_expanding_gimple_stmt = stmt;
5690 
5691       /* Expand this statement, then evaluate the resulting RTL and
5692 	 fixup the CFG accordingly.  */
5693       if (gimple_code (stmt) == GIMPLE_COND)
5694 	{
5695 	  new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5696 	  if (new_bb)
5697 	    return new_bb;
5698 	}
5699       else if (is_gimple_debug (stmt))
5700 	{
5701 	  location_t sloc = curr_insn_location ();
5702 	  gimple_stmt_iterator nsi = gsi;
5703 
5704 	  for (;;)
5705 	    {
5706 	      tree var;
5707 	      tree value = NULL_TREE;
5708 	      rtx val = NULL_RTX;
5709 	      machine_mode mode;
5710 
5711 	      if (!gimple_debug_nonbind_marker_p (stmt))
5712 		{
5713 		  if (gimple_debug_bind_p (stmt))
5714 		    {
5715 		      var = gimple_debug_bind_get_var (stmt);
5716 
5717 		      if (TREE_CODE (var) != DEBUG_EXPR_DECL
5718 			  && TREE_CODE (var) != LABEL_DECL
5719 			  && !target_for_debug_bind (var))
5720 			goto delink_debug_stmt;
5721 
5722 		      if (DECL_P (var) && !VECTOR_TYPE_P (TREE_TYPE (var)))
5723 			mode = DECL_MODE (var);
5724 		      else
5725 			mode = TYPE_MODE (TREE_TYPE (var));
5726 
5727 		      if (gimple_debug_bind_has_value_p (stmt))
5728 			value = gimple_debug_bind_get_value (stmt);
5729 
5730 		      val = gen_rtx_VAR_LOCATION
5731 			(mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5732 		    }
5733 		  else if (gimple_debug_source_bind_p (stmt))
5734 		    {
5735 		      var = gimple_debug_source_bind_get_var (stmt);
5736 
5737 		      value = gimple_debug_source_bind_get_value (stmt);
5738 
5739 		      if (!VECTOR_TYPE_P (TREE_TYPE (var)))
5740 			mode = DECL_MODE (var);
5741 		      else
5742 			mode = TYPE_MODE (TREE_TYPE (var));
5743 
5744 		      val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5745 						  VAR_INIT_STATUS_UNINITIALIZED);
5746 		    }
5747 		  else
5748 		    gcc_unreachable ();
5749 		}
5750 	      /* If this function was first compiled with markers
5751 		 enabled, but they're now disable (e.g. LTO), drop
5752 		 them on the floor.  */
5753 	      else if (gimple_debug_nonbind_marker_p (stmt)
5754 		       && !MAY_HAVE_DEBUG_MARKER_INSNS)
5755 		goto delink_debug_stmt;
5756 	      else if (gimple_debug_begin_stmt_p (stmt))
5757 		val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5758 	      else if (gimple_debug_inline_entry_p (stmt))
5759 		{
5760 		  tree block = gimple_block (stmt);
5761 
5762 		  if (block)
5763 		    val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5764 		  else
5765 		    goto delink_debug_stmt;
5766 		}
5767 	      else
5768 		gcc_unreachable ();
5769 
5770 	      last = get_last_insn ();
5771 
5772 	      set_curr_insn_location (gimple_location (stmt));
5773 
5774 	      emit_debug_insn (val);
5775 
5776 	      if (dump_file && (dump_flags & TDF_DETAILS))
5777 		{
5778 		  /* We can't dump the insn with a TREE where an RTX
5779 		     is expected.  */
5780 		  if (GET_CODE (val) == VAR_LOCATION)
5781 		    {
5782 		      gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5783 		      PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5784 		    }
5785 		  maybe_dump_rtl_for_gimple_stmt (stmt, last);
5786 		  if (GET_CODE (val) == VAR_LOCATION)
5787 		    PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5788 		}
5789 
5790 	    delink_debug_stmt:
5791 	      /* In order not to generate too many debug temporaries,
5792 	         we delink all uses of debug statements we already expanded.
5793 		 Therefore debug statements between definition and real
5794 		 use of TERed SSA names will continue to use the SSA name,
5795 		 and not be replaced with debug temps.  */
5796 	      delink_stmt_imm_use (stmt);
5797 
5798 	      gsi = nsi;
5799 	      gsi_next (&nsi);
5800 	      if (gsi_end_p (nsi))
5801 		break;
5802 	      stmt = gsi_stmt (nsi);
5803 	      if (!is_gimple_debug (stmt))
5804 		break;
5805 	    }
5806 
5807 	  set_curr_insn_location (sloc);
5808 	}
5809       else
5810 	{
5811 	  gcall *call_stmt = dyn_cast <gcall *> (stmt);
5812 	  if (call_stmt
5813 	      && gimple_call_tail_p (call_stmt)
5814 	      && disable_tail_calls)
5815 	    gimple_call_set_tail (call_stmt, false);
5816 
5817 	  if (call_stmt && gimple_call_tail_p (call_stmt))
5818 	    {
5819 	      bool can_fallthru;
5820 	      new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5821 	      if (new_bb)
5822 		{
5823 		  if (can_fallthru)
5824 		    bb = new_bb;
5825 		  else
5826 		    return new_bb;
5827 		}
5828 	    }
5829 	  else
5830 	    {
5831 	      def_operand_p def_p;
5832 	      def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5833 
5834 	      if (def_p != NULL)
5835 		{
5836 		  /* Ignore this stmt if it is in the list of
5837 		     replaceable expressions.  */
5838 		  if (SA.values
5839 		      && bitmap_bit_p (SA.values,
5840 				       SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5841 		    continue;
5842 		}
5843 	      last = expand_gimple_stmt (stmt);
5844 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5845 	    }
5846 	}
5847     }
5848 
5849   currently_expanding_gimple_stmt = NULL;
5850 
5851   /* Expand implicit goto and convert goto_locus.  */
5852   FOR_EACH_EDGE (e, ei, bb->succs)
5853     {
5854       if (e->goto_locus != UNKNOWN_LOCATION)
5855 	set_curr_insn_location (e->goto_locus);
5856       if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5857 	{
5858 	  emit_jump (label_rtx_for_bb (e->dest));
5859 	  e->flags &= ~EDGE_FALLTHRU;
5860 	}
5861     }
5862 
5863   /* Expanded RTL can create a jump in the last instruction of block.
5864      This later might be assumed to be a jump to successor and break edge insertion.
5865      We need to insert dummy move to prevent this. PR41440. */
5866   if (single_succ_p (bb)
5867       && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5868       && (last = get_last_insn ())
5869       && (JUMP_P (last)
5870 	  || (DEBUG_INSN_P (last)
5871 	      && JUMP_P (prev_nondebug_insn (last)))))
5872     {
5873       rtx dummy = gen_reg_rtx (SImode);
5874       emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5875     }
5876 
5877   do_pending_stack_adjust ();
5878 
5879   /* Find the block tail.  The last insn in the block is the insn
5880      before a barrier and/or table jump insn.  */
5881   last = get_last_insn ();
5882   if (BARRIER_P (last))
5883     last = PREV_INSN (last);
5884   if (JUMP_TABLE_DATA_P (last))
5885     last = PREV_INSN (PREV_INSN (last));
5886   BB_END (bb) = last;
5887 
5888   update_bb_for_insn (bb);
5889 
5890   return bb;
5891 }
5892 
5893 
5894 /* Create a basic block for initialization code.  */
5895 
5896 static basic_block
construct_init_block(void)5897 construct_init_block (void)
5898 {
5899   basic_block init_block, first_block;
5900   edge e = NULL;
5901   int flags;
5902 
5903   /* Multiple entry points not supported yet.  */
5904   gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5905   init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5906   init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5907   ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5908   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5909 
5910   e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5911 
5912   /* When entry edge points to first basic block, we don't need jump,
5913      otherwise we have to jump into proper target.  */
5914   if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5915     {
5916       tree label = gimple_block_label (e->dest);
5917 
5918       emit_jump (jump_target_rtx (label));
5919       flags = 0;
5920     }
5921   else
5922     flags = EDGE_FALLTHRU;
5923 
5924   init_block = create_basic_block (NEXT_INSN (get_insns ()),
5925 				   get_last_insn (),
5926 				   ENTRY_BLOCK_PTR_FOR_FN (cfun));
5927   init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5928   add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5929   if (e)
5930     {
5931       first_block = e->dest;
5932       redirect_edge_succ (e, init_block);
5933       e = make_single_succ_edge (init_block, first_block, flags);
5934     }
5935   else
5936     e = make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5937 			       EDGE_FALLTHRU);
5938 
5939   update_bb_for_insn (init_block);
5940   return init_block;
5941 }
5942 
5943 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5944    found in the block tree.  */
5945 
5946 static void
set_block_levels(tree block,int level)5947 set_block_levels (tree block, int level)
5948 {
5949   while (block)
5950     {
5951       BLOCK_NUMBER (block) = level;
5952       set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5953       block = BLOCK_CHAIN (block);
5954     }
5955 }
5956 
5957 /* Create a block containing landing pads and similar stuff.  */
5958 
5959 static void
construct_exit_block(void)5960 construct_exit_block (void)
5961 {
5962   rtx_insn *head = get_last_insn ();
5963   rtx_insn *end;
5964   basic_block exit_block;
5965   edge e, e2;
5966   unsigned ix;
5967   edge_iterator ei;
5968   basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5969   rtx_insn *orig_end = BB_END (prev_bb);
5970 
5971   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5972 
5973   /* Make sure the locus is set to the end of the function, so that
5974      epilogue line numbers and warnings are set properly.  */
5975   if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5976     input_location = cfun->function_end_locus;
5977 
5978   /* Generate rtl for function exit.  */
5979   expand_function_end ();
5980 
5981   end = get_last_insn ();
5982   if (head == end)
5983     return;
5984   /* While emitting the function end we could move end of the last basic
5985      block.  */
5986   BB_END (prev_bb) = orig_end;
5987   while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5988     head = NEXT_INSN (head);
5989   /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5990      bb count counting will be confused.  Any instructions before that
5991      label are emitted for the case where PREV_BB falls through into the
5992      exit block, so append those instructions to prev_bb in that case.  */
5993   if (NEXT_INSN (head) != return_label)
5994     {
5995       while (NEXT_INSN (head) != return_label)
5996 	{
5997 	  if (!NOTE_P (NEXT_INSN (head)))
5998 	    BB_END (prev_bb) = NEXT_INSN (head);
5999 	  head = NEXT_INSN (head);
6000 	}
6001     }
6002   exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
6003   exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
6004   add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
6005 
6006   ix = 0;
6007   while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
6008     {
6009       e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
6010       if (!(e->flags & EDGE_ABNORMAL))
6011 	redirect_edge_succ (e, exit_block);
6012       else
6013 	ix++;
6014     }
6015 
6016   e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6017 			     EDGE_FALLTHRU);
6018   FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6019     if (e2 != e)
6020       {
6021 	exit_block->count -= e2->count ();
6022       }
6023   update_bb_for_insn (exit_block);
6024 }
6025 
6026 /* Helper function for discover_nonconstant_array_refs.
6027    Look for ARRAY_REF nodes with non-constant indexes and mark them
6028    addressable.  */
6029 
6030 static tree
discover_nonconstant_array_refs_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)6031 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
6032 				   void *data ATTRIBUTE_UNUSED)
6033 {
6034   tree t = *tp;
6035 
6036   if (IS_TYPE_OR_DECL_P (t))
6037     *walk_subtrees = 0;
6038   else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6039     {
6040       while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6041 	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6042 	      && (!TREE_OPERAND (t, 2)
6043 		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6044 	     || (TREE_CODE (t) == COMPONENT_REF
6045 		 && (!TREE_OPERAND (t,2)
6046 		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6047 	     || TREE_CODE (t) == BIT_FIELD_REF
6048 	     || TREE_CODE (t) == REALPART_EXPR
6049 	     || TREE_CODE (t) == IMAGPART_EXPR
6050 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
6051 	     || CONVERT_EXPR_P (t))
6052 	t = TREE_OPERAND (t, 0);
6053 
6054       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6055 	{
6056 	  t = get_base_address (t);
6057 	  if (t && DECL_P (t)
6058               && DECL_MODE (t) != BLKmode)
6059 	    TREE_ADDRESSABLE (t) = 1;
6060 	}
6061 
6062       *walk_subtrees = 0;
6063     }
6064 
6065   return NULL_TREE;
6066 }
6067 
6068 /* RTL expansion is not able to compile array references with variable
6069    offsets for arrays stored in single register.  Discover such
6070    expressions and mark variables as addressable to avoid this
6071    scenario.  */
6072 
6073 static void
discover_nonconstant_array_refs(void)6074 discover_nonconstant_array_refs (void)
6075 {
6076   basic_block bb;
6077   gimple_stmt_iterator gsi;
6078 
6079   FOR_EACH_BB_FN (bb, cfun)
6080     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6081       {
6082 	gimple *stmt = gsi_stmt (gsi);
6083 	if (!is_gimple_debug (stmt))
6084 	  walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6085       }
6086 }
6087 
6088 /* This function sets crtl->args.internal_arg_pointer to a virtual
6089    register if DRAP is needed.  Local register allocator will replace
6090    virtual_incoming_args_rtx with the virtual register.  */
6091 
6092 static void
expand_stack_alignment(void)6093 expand_stack_alignment (void)
6094 {
6095   rtx drap_rtx;
6096   unsigned int preferred_stack_boundary;
6097 
6098   if (! SUPPORTS_STACK_ALIGNMENT)
6099     return;
6100 
6101   if (cfun->calls_alloca
6102       || cfun->has_nonlocal_label
6103       || crtl->has_nonlocal_goto)
6104     crtl->need_drap = true;
6105 
6106   /* Call update_stack_boundary here again to update incoming stack
6107      boundary.  It may set incoming stack alignment to a different
6108      value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
6109      use the minimum incoming stack alignment to check if it is OK
6110      to perform sibcall optimization since sibcall optimization will
6111      only align the outgoing stack to incoming stack boundary.  */
6112   if (targetm.calls.update_stack_boundary)
6113     targetm.calls.update_stack_boundary ();
6114 
6115   /* The incoming stack frame has to be aligned at least at
6116      parm_stack_boundary.  */
6117   gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6118 
6119   /* Update crtl->stack_alignment_estimated and use it later to align
6120      stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
6121      exceptions since callgraph doesn't collect incoming stack alignment
6122      in this case.  */
6123   if (cfun->can_throw_non_call_exceptions
6124       && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6125     preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6126   else
6127     preferred_stack_boundary = crtl->preferred_stack_boundary;
6128   if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6129     crtl->stack_alignment_estimated = preferred_stack_boundary;
6130   if (preferred_stack_boundary > crtl->stack_alignment_needed)
6131     crtl->stack_alignment_needed = preferred_stack_boundary;
6132 
6133   gcc_assert (crtl->stack_alignment_needed
6134 	      <= crtl->stack_alignment_estimated);
6135 
6136   crtl->stack_realign_needed
6137     = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6138   crtl->stack_realign_tried = crtl->stack_realign_needed;
6139 
6140   crtl->stack_realign_processed = true;
6141 
6142   /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6143      alignment.  */
6144   gcc_assert (targetm.calls.get_drap_rtx != NULL);
6145   drap_rtx = targetm.calls.get_drap_rtx ();
6146 
6147   /* stack_realign_drap and drap_rtx must match.  */
6148   gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6149 
6150   /* Do nothing if NULL is returned, which means DRAP is not needed.  */
6151   if (drap_rtx != NULL)
6152     {
6153       crtl->args.internal_arg_pointer = drap_rtx;
6154 
6155       /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6156          needed. */
6157       fixup_tail_calls ();
6158     }
6159 }
6160 
6161 
6162 static void
expand_main_function(void)6163 expand_main_function (void)
6164 {
6165 #if (defined(INVOKE__main)				\
6166      || (!defined(HAS_INIT_SECTION)			\
6167 	 && !defined(INIT_SECTION_ASM_OP)		\
6168 	 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6169   emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6170 #endif
6171 }
6172 
6173 
6174 /* Expand code to initialize the stack_protect_guard.  This is invoked at
6175    the beginning of a function to be protected.  */
6176 
6177 static void
stack_protect_prologue(void)6178 stack_protect_prologue (void)
6179 {
6180   tree guard_decl = targetm.stack_protect_guard ();
6181   rtx x, y;
6182 
6183   x = expand_normal (crtl->stack_protect_guard);
6184   if (guard_decl)
6185     y = expand_normal (guard_decl);
6186   else
6187     y = const0_rtx;
6188 
6189   /* Allow the target to copy from Y to X without leaking Y into a
6190      register.  */
6191   if (targetm.have_stack_protect_set ())
6192     if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6193       {
6194 	emit_insn (insn);
6195 	return;
6196       }
6197 
6198   /* Otherwise do a straight move.  */
6199   emit_move_insn (x, y);
6200 }
6201 
6202 /* Translate the intermediate representation contained in the CFG
6203    from GIMPLE trees to RTL.
6204 
6205    We do conversion per basic block and preserve/update the tree CFG.
6206    This implies we have to do some magic as the CFG can simultaneously
6207    consist of basic blocks containing RTL and GIMPLE trees.  This can
6208    confuse the CFG hooks, so be careful to not manipulate CFG during
6209    the expansion.  */
6210 
6211 namespace {
6212 
6213 const pass_data pass_data_expand =
6214 {
6215   RTL_PASS, /* type */
6216   "expand", /* name */
6217   OPTGROUP_NONE, /* optinfo_flags */
6218   TV_EXPAND, /* tv_id */
6219   ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6220     | PROP_gimple_lcx
6221     | PROP_gimple_lvec
6222     | PROP_gimple_lva), /* properties_required */
6223   PROP_rtl, /* properties_provided */
6224   ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6225   0, /* todo_flags_start */
6226   0, /* todo_flags_finish */
6227 };
6228 
6229 class pass_expand : public rtl_opt_pass
6230 {
6231 public:
pass_expand(gcc::context * ctxt)6232   pass_expand (gcc::context *ctxt)
6233     : rtl_opt_pass (pass_data_expand, ctxt)
6234   {}
6235 
6236   /* opt_pass methods: */
6237   virtual unsigned int execute (function *);
6238 
6239 }; // class pass_expand
6240 
6241 unsigned int
execute(function * fun)6242 pass_expand::execute (function *fun)
6243 {
6244   basic_block bb, init_block;
6245   edge_iterator ei;
6246   edge e;
6247   rtx_insn *var_seq, *var_ret_seq;
6248   unsigned i;
6249 
6250   timevar_push (TV_OUT_OF_SSA);
6251   rewrite_out_of_ssa (&SA);
6252   timevar_pop (TV_OUT_OF_SSA);
6253   SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6254 
6255   if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6256     {
6257       gimple_stmt_iterator gsi;
6258       FOR_EACH_BB_FN (bb, cfun)
6259 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6260 	  if (gimple_debug_bind_p (gsi_stmt (gsi)))
6261 	    avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6262     }
6263 
6264   /* Make sure all values used by the optimization passes have sane
6265      defaults.  */
6266   reg_renumber = 0;
6267 
6268   /* Some backends want to know that we are expanding to RTL.  */
6269   currently_expanding_to_rtl = 1;
6270   /* Dominators are not kept up-to-date as we may create new basic-blocks.  */
6271   free_dominance_info (CDI_DOMINATORS);
6272 
6273   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6274 
6275   if (chkp_function_instrumented_p (current_function_decl))
6276     chkp_reset_rtl_bounds ();
6277 
6278   insn_locations_init ();
6279   if (!DECL_IS_BUILTIN (current_function_decl))
6280     {
6281       /* Eventually, all FEs should explicitly set function_start_locus.  */
6282       if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6283 	set_curr_insn_location
6284 	  (DECL_SOURCE_LOCATION (current_function_decl));
6285       else
6286 	set_curr_insn_location (fun->function_start_locus);
6287     }
6288   else
6289     set_curr_insn_location (UNKNOWN_LOCATION);
6290   prologue_location = curr_insn_location ();
6291 
6292 #ifdef INSN_SCHEDULING
6293   init_sched_attrs ();
6294 #endif
6295 
6296   /* Make sure first insn is a note even if we don't want linenums.
6297      This makes sure the first insn will never be deleted.
6298      Also, final expects a note to appear there.  */
6299   emit_note (NOTE_INSN_DELETED);
6300 
6301   /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
6302   discover_nonconstant_array_refs ();
6303 
6304   targetm.expand_to_rtl_hook ();
6305   crtl->init_stack_alignment ();
6306   fun->cfg->max_jumptable_ents = 0;
6307 
6308   /* Resovle the function section.  Some targets, like ARM EABI rely on knowledge
6309      of the function section at exapnsion time to predict distance of calls.  */
6310   resolve_unique_section (current_function_decl, 0, flag_function_sections);
6311 
6312   /* Expand the variables recorded during gimple lowering.  */
6313   timevar_push (TV_VAR_EXPAND);
6314   start_sequence ();
6315 
6316   var_ret_seq = expand_used_vars ();
6317 
6318   var_seq = get_insns ();
6319   end_sequence ();
6320   timevar_pop (TV_VAR_EXPAND);
6321 
6322   /* Honor stack protection warnings.  */
6323   if (warn_stack_protect)
6324     {
6325       if (fun->calls_alloca)
6326 	warning (OPT_Wstack_protector,
6327 		 "stack protector not protecting local variables: "
6328 		 "variable length buffer");
6329       if (has_short_buffer && !crtl->stack_protect_guard)
6330 	warning (OPT_Wstack_protector,
6331 		 "stack protector not protecting function: "
6332 		 "all local arrays are less than %d bytes long",
6333 		 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6334     }
6335 
6336   /* Set up parameters and prepare for return, for the function.  */
6337   expand_function_start (current_function_decl);
6338 
6339   /* If we emitted any instructions for setting up the variables,
6340      emit them before the FUNCTION_START note.  */
6341   if (var_seq)
6342     {
6343       emit_insn_before (var_seq, parm_birth_insn);
6344 
6345       /* In expand_function_end we'll insert the alloca save/restore
6346 	 before parm_birth_insn.  We've just insertted an alloca call.
6347 	 Adjust the pointer to match.  */
6348       parm_birth_insn = var_seq;
6349     }
6350 
6351   /* Now propagate the RTL assignment of each partition to the
6352      underlying var of each SSA_NAME.  */
6353   tree name;
6354 
6355   FOR_EACH_SSA_NAME (i, name, cfun)
6356     {
6357       /* We might have generated new SSA names in
6358 	 update_alias_info_with_stack_vars.  They will have a NULL
6359 	 defining statements, and won't be part of the partitioning,
6360 	 so ignore those.  */
6361       if (!SSA_NAME_DEF_STMT (name))
6362 	continue;
6363 
6364       adjust_one_expanded_partition_var (name);
6365     }
6366 
6367   /* Clean up RTL of variables that straddle across multiple
6368      partitions, and check that the rtl of any PARM_DECLs that are not
6369      cleaned up is that of their default defs.  */
6370   FOR_EACH_SSA_NAME (i, name, cfun)
6371     {
6372       int part;
6373 
6374       /* We might have generated new SSA names in
6375 	 update_alias_info_with_stack_vars.  They will have a NULL
6376 	 defining statements, and won't be part of the partitioning,
6377 	 so ignore those.  */
6378       if (!SSA_NAME_DEF_STMT (name))
6379 	continue;
6380       part = var_to_partition (SA.map, name);
6381       if (part == NO_PARTITION)
6382 	continue;
6383 
6384       /* If this decl was marked as living in multiple places, reset
6385 	 this now to NULL.  */
6386       tree var = SSA_NAME_VAR (name);
6387       if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6388 	SET_DECL_RTL (var, NULL);
6389       /* Check that the pseudos chosen by assign_parms are those of
6390 	 the corresponding default defs.  */
6391       else if (SSA_NAME_IS_DEFAULT_DEF (name)
6392 	       && (TREE_CODE (var) == PARM_DECL
6393 		   || TREE_CODE (var) == RESULT_DECL))
6394 	{
6395 	  rtx in = DECL_RTL_IF_SET (var);
6396 	  gcc_assert (in);
6397 	  rtx out = SA.partition_to_pseudo[part];
6398 	  gcc_assert (in == out);
6399 
6400 	  /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6401 	     those expected by debug backends for each parm and for
6402 	     the result.  This is particularly important for stabs,
6403 	     whose register elimination from parm's DECL_RTL may cause
6404 	     -fcompare-debug differences as SET_DECL_RTL changes reg's
6405 	     attrs.  So, make sure the RTL already has the parm as the
6406 	     EXPR, so that it won't change.  */
6407 	  SET_DECL_RTL (var, NULL_RTX);
6408 	  if (MEM_P (in))
6409 	    set_mem_attributes (in, var, true);
6410 	  SET_DECL_RTL (var, in);
6411 	}
6412     }
6413 
6414   /* If this function is `main', emit a call to `__main'
6415      to run global initializers, etc.  */
6416   if (DECL_NAME (current_function_decl)
6417       && MAIN_NAME_P (DECL_NAME (current_function_decl))
6418       && DECL_FILE_SCOPE_P (current_function_decl))
6419     expand_main_function ();
6420 
6421   /* Initialize the stack_protect_guard field.  This must happen after the
6422      call to __main (if any) so that the external decl is initialized.  */
6423   if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6424     stack_protect_prologue ();
6425 
6426   expand_phi_nodes (&SA);
6427 
6428   /* Release any stale SSA redirection data.  */
6429   redirect_edge_var_map_empty ();
6430 
6431   /* Register rtl specific functions for cfg.  */
6432   rtl_register_cfg_hooks ();
6433 
6434   init_block = construct_init_block ();
6435 
6436   /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
6437      remaining edges later.  */
6438   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6439     e->flags &= ~EDGE_EXECUTABLE;
6440 
6441   /* If the function has too many markers, drop them while expanding.  */
6442   if (cfun->debug_marker_count
6443       >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
6444     cfun->debug_nonbind_markers = false;
6445 
6446   lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6447   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6448 		  next_bb)
6449     bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6450 
6451   if (MAY_HAVE_DEBUG_BIND_INSNS)
6452     expand_debug_locations ();
6453 
6454   if (deep_ter_debug_map)
6455     {
6456       delete deep_ter_debug_map;
6457       deep_ter_debug_map = NULL;
6458     }
6459 
6460   /* Free stuff we no longer need after GIMPLE optimizations.  */
6461   free_dominance_info (CDI_DOMINATORS);
6462   free_dominance_info (CDI_POST_DOMINATORS);
6463   delete_tree_cfg_annotations (fun);
6464 
6465   timevar_push (TV_OUT_OF_SSA);
6466   finish_out_of_ssa (&SA);
6467   timevar_pop (TV_OUT_OF_SSA);
6468 
6469   timevar_push (TV_POST_EXPAND);
6470   /* We are no longer in SSA form.  */
6471   fun->gimple_df->in_ssa_p = false;
6472   loops_state_clear (LOOP_CLOSED_SSA);
6473 
6474   /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6475      conservatively to true until they are all profile aware.  */
6476   delete lab_rtx_for_bb;
6477   free_histograms (fun);
6478 
6479   construct_exit_block ();
6480   insn_locations_finalize ();
6481 
6482   if (var_ret_seq)
6483     {
6484       rtx_insn *after = return_label;
6485       rtx_insn *next = NEXT_INSN (after);
6486       if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6487 	after = next;
6488       emit_insn_after (var_ret_seq, after);
6489     }
6490 
6491   /* Zap the tree EH table.  */
6492   set_eh_throw_stmt_table (fun, NULL);
6493 
6494   /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6495      split edges which edge insertions might do.  */
6496   rebuild_jump_labels (get_insns ());
6497 
6498   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6499 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6500     {
6501       edge e;
6502       edge_iterator ei;
6503       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6504 	{
6505 	  if (e->insns.r)
6506 	    {
6507 	      rebuild_jump_labels_chain (e->insns.r);
6508 	      /* Put insns after parm birth, but before
6509 		 NOTE_INSNS_FUNCTION_BEG.  */
6510 	      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6511 		  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6512 		{
6513 		  rtx_insn *insns = e->insns.r;
6514 		  e->insns.r = NULL;
6515 		  if (NOTE_P (parm_birth_insn)
6516 		      && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6517 		    emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6518 		  else
6519 		    emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6520 		}
6521 	      else
6522 		commit_one_edge_insertion (e);
6523 	    }
6524 	  else
6525 	    ei_next (&ei);
6526 	}
6527     }
6528 
6529   /* We're done expanding trees to RTL.  */
6530   currently_expanding_to_rtl = 0;
6531 
6532   flush_mark_addressable_queue ();
6533 
6534   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6535 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6536     {
6537       edge e;
6538       edge_iterator ei;
6539       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6540 	{
6541 	  /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
6542 	  e->flags &= ~EDGE_EXECUTABLE;
6543 
6544 	  /* At the moment not all abnormal edges match the RTL
6545 	     representation.  It is safe to remove them here as
6546 	     find_many_sub_basic_blocks will rediscover them.
6547 	     In the future we should get this fixed properly.  */
6548 	  if ((e->flags & EDGE_ABNORMAL)
6549 	      && !(e->flags & EDGE_SIBCALL))
6550 	    remove_edge (e);
6551 	  else
6552 	    ei_next (&ei);
6553 	}
6554     }
6555 
6556   auto_sbitmap blocks (last_basic_block_for_fn (fun));
6557   bitmap_ones (blocks);
6558   find_many_sub_basic_blocks (blocks);
6559   purge_all_dead_edges ();
6560 
6561   /* After initial rtl generation, call back to finish generating
6562      exception support code.  We need to do this before cleaning up
6563      the CFG as the code does not expect dead landing pads.  */
6564   if (fun->eh->region_tree != NULL)
6565     finish_eh_generation ();
6566 
6567   /* Call expand_stack_alignment after finishing all
6568      updates to crtl->preferred_stack_boundary.  */
6569   expand_stack_alignment ();
6570 
6571   /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6572      function.  */
6573   if (crtl->tail_call_emit)
6574     fixup_tail_calls ();
6575 
6576   /* BB subdivision may have created basic blocks that are are only reachable
6577      from unlikely bbs but not marked as such in the profile.  */
6578   if (optimize)
6579     propagate_unlikely_bbs_forward ();
6580 
6581   /* Remove unreachable blocks, otherwise we cannot compute dominators
6582      which are needed for loop state verification.  As a side-effect
6583      this also compacts blocks.
6584      ???  We cannot remove trivially dead insns here as for example
6585      the DRAP reg on i?86 is not magically live at this point.
6586      gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise.  */
6587   cleanup_cfg (CLEANUP_NO_INSN_DEL);
6588 
6589   checking_verify_flow_info ();
6590 
6591   /* Initialize pseudos allocated for hard registers.  */
6592   emit_initial_value_sets ();
6593 
6594   /* And finally unshare all RTL.  */
6595   unshare_all_rtl ();
6596 
6597   /* There's no need to defer outputting this function any more; we
6598      know we want to output it.  */
6599   DECL_DEFER_OUTPUT (current_function_decl) = 0;
6600 
6601   /* Now that we're done expanding trees to RTL, we shouldn't have any
6602      more CONCATs anywhere.  */
6603   generating_concat_p = 0;
6604 
6605   if (dump_file)
6606     {
6607       fprintf (dump_file,
6608 	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6609       /* And the pass manager will dump RTL for us.  */
6610     }
6611 
6612   /* If we're emitting a nested function, make sure its parent gets
6613      emitted as well.  Doing otherwise confuses debug info.  */
6614     {
6615       tree parent;
6616       for (parent = DECL_CONTEXT (current_function_decl);
6617 	   parent != NULL_TREE;
6618 	   parent = get_containing_scope (parent))
6619 	if (TREE_CODE (parent) == FUNCTION_DECL)
6620 	  TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6621     }
6622 
6623   TREE_ASM_WRITTEN (current_function_decl) = 1;
6624 
6625   /* After expanding, the return labels are no longer needed. */
6626   return_label = NULL;
6627   naked_return_label = NULL;
6628 
6629   /* After expanding, the tm_restart map is no longer needed.  */
6630   if (fun->gimple_df->tm_restart)
6631     fun->gimple_df->tm_restart = NULL;
6632 
6633   /* Tag the blocks with a depth number so that change_scope can find
6634      the common parent easily.  */
6635   set_block_levels (DECL_INITIAL (fun->decl), 0);
6636   default_rtl_profile ();
6637 
6638   /* For -dx discard loops now, otherwise IL verify in clean_state will
6639      ICE.  */
6640   if (rtl_dump_and_exit)
6641     {
6642       cfun->curr_properties &= ~PROP_loops;
6643       loop_optimizer_finalize ();
6644     }
6645 
6646   timevar_pop (TV_POST_EXPAND);
6647 
6648   return 0;
6649 }
6650 
6651 } // anon namespace
6652 
6653 rtl_opt_pass *
make_pass_expand(gcc::context * ctxt)6654 make_pass_expand (gcc::context *ctxt)
6655 {
6656   return new pass_expand (ctxt);
6657 }
6658