1 /* Liveness for SSA trees.
2    Copyright (C) 2003-2018 Free Software Foundation, Inc.
3    Contributed by Andrew MacLeod <amacleod@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "timevar.h"
29 #include "ssa.h"
30 #include "cgraph.h"
31 #include "gimple-pretty-print.h"
32 #include "diagnostic-core.h"
33 #include "gimple-iterator.h"
34 #include "tree-dfa.h"
35 #include "dumpfile.h"
36 #include "tree-ssa-live.h"
37 #include "debug.h"
38 #include "tree-ssa.h"
39 #include "ipa-utils.h"
40 #include "cfgloop.h"
41 #include "stringpool.h"
42 #include "attribs.h"
43 
44 static void verify_live_on_entry (tree_live_info_p);
45 
46 
47 /* VARMAP maintains a mapping from SSA version number to real variables.
48 
49    All SSA_NAMES are divided into partitions.  Initially each ssa_name is the
50    only member of it's own partition.  Coalescing will attempt to group any
51    ssa_names which occur in a copy or in a PHI node into the same partition.
52 
53    At the end of out-of-ssa, each partition becomes a "real" variable and is
54    rewritten as a compiler variable.
55 
56    The var_map data structure is used to manage these partitions.  It allows
57    partitions to be combined, and determines which partition belongs to what
58    ssa_name or variable, and vice versa.  */
59 
60 
61 /* Remove the base table in MAP.  */
62 
63 static void
var_map_base_fini(var_map map)64 var_map_base_fini (var_map map)
65 {
66   /* Free the basevar info if it is present.  */
67   if (map->partition_to_base_index != NULL)
68     {
69       free (map->partition_to_base_index);
70       map->partition_to_base_index = NULL;
71       map->num_basevars = 0;
72     }
73 }
74 /* Create a variable partition map of SIZE, initialize and return it.  */
75 
76 var_map
init_var_map(int size)77 init_var_map (int size)
78 {
79   var_map map;
80 
81   map = (var_map) xmalloc (sizeof (struct _var_map));
82   map->var_partition = partition_new (size);
83 
84   map->partition_to_view = NULL;
85   map->view_to_partition = NULL;
86   map->num_partitions = size;
87   map->partition_size = size;
88   map->num_basevars = 0;
89   map->partition_to_base_index = NULL;
90   return map;
91 }
92 
93 
94 /* Free memory associated with MAP.  */
95 
96 void
delete_var_map(var_map map)97 delete_var_map (var_map map)
98 {
99   var_map_base_fini (map);
100   partition_delete (map->var_partition);
101   free (map->partition_to_view);
102   free (map->view_to_partition);
103   free (map);
104 }
105 
106 
107 /* This function will combine the partitions in MAP for VAR1 and VAR2.  It
108    Returns the partition which represents the new partition.  If the two
109    partitions cannot be combined, NO_PARTITION is returned.  */
110 
111 int
var_union(var_map map,tree var1,tree var2)112 var_union (var_map map, tree var1, tree var2)
113 {
114   int p1, p2, p3;
115 
116   gcc_assert (TREE_CODE (var1) == SSA_NAME);
117   gcc_assert (TREE_CODE (var2) == SSA_NAME);
118 
119   /* This is independent of partition_to_view. If partition_to_view is
120      on, then whichever one of these partitions is absorbed will never have a
121      dereference into the partition_to_view array any more.  */
122 
123   p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
124   p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
125 
126   gcc_assert (p1 != NO_PARTITION);
127   gcc_assert (p2 != NO_PARTITION);
128 
129   if (p1 == p2)
130     p3 = p1;
131   else
132     p3 = partition_union (map->var_partition, p1, p2);
133 
134   if (map->partition_to_view)
135     p3 = map->partition_to_view[p3];
136 
137   return p3;
138 }
139 
140 
141 /* Compress the partition numbers in MAP such that they fall in the range
142    0..(num_partitions-1) instead of wherever they turned out during
143    the partitioning exercise.  This removes any references to unused
144    partitions, thereby allowing bitmaps and other vectors to be much
145    denser.
146 
147    This is implemented such that compaction doesn't affect partitioning.
148    Ie., once partitions are created and possibly merged, running one
149    or more different kind of compaction will not affect the partitions
150    themselves.  Their index might change, but all the same variables will
151    still be members of the same partition group.  This allows work on reduced
152    sets, and no loss of information when a larger set is later desired.
153 
154    In particular, coalescing can work on partitions which have 2 or more
155    definitions, and then 'recompact' later to include all the single
156    definitions for assignment to program variables.  */
157 
158 
159 /* Set MAP back to the initial state of having no partition view.  Return a
160    bitmap which has a bit set for each partition number which is in use in the
161    varmap.  */
162 
163 static bitmap
partition_view_init(var_map map)164 partition_view_init (var_map map)
165 {
166   bitmap used;
167   int tmp;
168   unsigned int x;
169 
170   used = BITMAP_ALLOC (NULL);
171 
172   /* Already in a view? Abandon the old one.  */
173   if (map->partition_to_view)
174     {
175       free (map->partition_to_view);
176       map->partition_to_view = NULL;
177     }
178   if (map->view_to_partition)
179     {
180       free (map->view_to_partition);
181       map->view_to_partition = NULL;
182     }
183 
184   /* Find out which partitions are actually referenced.  */
185   for (x = 0; x < map->partition_size; x++)
186     {
187       tmp = partition_find (map->var_partition, x);
188       if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
189 	  && (!has_zero_uses (ssa_name (tmp))
190 	      || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))
191 	      || (SSA_NAME_VAR (ssa_name (tmp))
192 		  && !VAR_P (SSA_NAME_VAR (ssa_name (tmp))))))
193 	bitmap_set_bit (used, tmp);
194     }
195 
196   map->num_partitions = map->partition_size;
197   return used;
198 }
199 
200 
201 /* This routine will finalize the view data for MAP based on the partitions
202    set in SELECTED.  This is either the same bitmap returned from
203    partition_view_init, or a trimmed down version if some of those partitions
204    were not desired in this view.  SELECTED is freed before returning.  */
205 
206 static void
partition_view_fini(var_map map,bitmap selected)207 partition_view_fini (var_map map, bitmap selected)
208 {
209   bitmap_iterator bi;
210   unsigned count, i, x, limit;
211 
212   gcc_assert (selected);
213 
214   count = bitmap_count_bits (selected);
215   limit = map->partition_size;
216 
217   /* If its a one-to-one ratio, we don't need any view compaction.  */
218   if (count < limit)
219     {
220       map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
221       memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
222       map->view_to_partition = (int *)xmalloc (count * sizeof (int));
223 
224       i = 0;
225       /* Give each selected partition an index.  */
226       EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
227 	{
228 	  map->partition_to_view[x] = i;
229 	  map->view_to_partition[i] = x;
230 	  i++;
231 	}
232       gcc_assert (i == count);
233       map->num_partitions = i;
234     }
235 
236   BITMAP_FREE (selected);
237 }
238 
239 
240 /* Create a partition view which includes all the used partitions in MAP.  */
241 
242 void
partition_view_normal(var_map map)243 partition_view_normal (var_map map)
244 {
245   bitmap used;
246 
247   used = partition_view_init (map);
248   partition_view_fini (map, used);
249 
250   var_map_base_fini (map);
251 }
252 
253 
254 /* Create a partition view in MAP which includes just partitions which occur in
255    the bitmap ONLY. If WANT_BASES is true, create the base variable map
256    as well.  */
257 
258 void
partition_view_bitmap(var_map map,bitmap only)259 partition_view_bitmap (var_map map, bitmap only)
260 {
261   bitmap used;
262   bitmap new_partitions = BITMAP_ALLOC (NULL);
263   unsigned x, p;
264   bitmap_iterator bi;
265 
266   used = partition_view_init (map);
267   EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
268     {
269       p = partition_find (map->var_partition, x);
270       gcc_assert (bitmap_bit_p (used, p));
271       bitmap_set_bit (new_partitions, p);
272     }
273   partition_view_fini (map, new_partitions);
274 
275   var_map_base_fini (map);
276 }
277 
278 
279 static bitmap usedvars;
280 
281 /* Mark VAR as used, so that it'll be preserved during rtl expansion.
282    Returns true if VAR wasn't marked before.  */
283 
284 static inline bool
set_is_used(tree var)285 set_is_used (tree var)
286 {
287   return bitmap_set_bit (usedvars, DECL_UID (var));
288 }
289 
290 /* Return true if VAR is marked as used.  */
291 
292 static inline bool
is_used_p(tree var)293 is_used_p (tree var)
294 {
295   return bitmap_bit_p (usedvars, DECL_UID (var));
296 }
297 
298 static inline void mark_all_vars_used (tree *);
299 
300 /* Helper function for mark_all_vars_used, called via walk_tree.  */
301 
302 static tree
mark_all_vars_used_1(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)303 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
304 {
305   tree t = *tp;
306   enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
307   tree b;
308 
309   if (TREE_CODE (t) == SSA_NAME)
310     {
311       *walk_subtrees = 0;
312       t = SSA_NAME_VAR (t);
313       if (!t)
314 	return NULL;
315     }
316 
317   if (IS_EXPR_CODE_CLASS (c)
318       && (b = TREE_BLOCK (t)) != NULL)
319     TREE_USED (b) = true;
320 
321   /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
322      fields do not contain vars.  */
323   if (TREE_CODE (t) == TARGET_MEM_REF)
324     {
325       mark_all_vars_used (&TMR_BASE (t));
326       mark_all_vars_used (&TMR_INDEX (t));
327       mark_all_vars_used (&TMR_INDEX2 (t));
328       *walk_subtrees = 0;
329       return NULL;
330     }
331 
332   /* Only need to mark VAR_DECLS; parameters and return results are not
333      eliminated as unused.  */
334   if (VAR_P (t))
335     {
336       /* When a global var becomes used for the first time also walk its
337          initializer (non global ones don't have any).  */
338       if (set_is_used (t) && is_global_var (t)
339 	  && DECL_CONTEXT (t) == current_function_decl)
340 	mark_all_vars_used (&DECL_INITIAL (t));
341     }
342   /* remove_unused_scope_block_p requires information about labels
343      which are not DECL_IGNORED_P to tell if they might be used in the IL.  */
344   else if (TREE_CODE (t) == LABEL_DECL)
345     /* Although the TREE_USED values that the frontend uses would be
346        acceptable (albeit slightly over-conservative) for our purposes,
347        init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
348        must re-compute it here.  */
349     TREE_USED (t) = 1;
350 
351   if (IS_TYPE_OR_DECL_P (t))
352     *walk_subtrees = 0;
353 
354   return NULL;
355 }
356 
357 /* Mark the scope block SCOPE and its subblocks unused when they can be
358    possibly eliminated if dead.  */
359 
360 static void
mark_scope_block_unused(tree scope)361 mark_scope_block_unused (tree scope)
362 {
363   tree t;
364   TREE_USED (scope) = false;
365   if (!(*debug_hooks->ignore_block) (scope))
366     TREE_USED (scope) = true;
367   for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
368     mark_scope_block_unused (t);
369 }
370 
371 /* Look if the block is dead (by possibly eliminating its dead subblocks)
372    and return true if so.
373    Block is declared dead if:
374      1) No statements are associated with it.
375      2) Declares no live variables
376      3) All subblocks are dead
377 	or there is precisely one subblocks and the block
378 	has same abstract origin as outer block and declares
379 	no variables, so it is pure wrapper.
380    When we are not outputting full debug info, we also eliminate dead variables
381    out of scope blocks to let them to be recycled by GGC and to save copying work
382    done by the inliner.  */
383 
384 static bool
remove_unused_scope_block_p(tree scope,bool in_ctor_dtor_block)385 remove_unused_scope_block_p (tree scope, bool in_ctor_dtor_block)
386 {
387   tree *t, *next;
388   bool unused = !TREE_USED (scope);
389   int nsubblocks = 0;
390 
391   /* For ipa-polymorphic-call.c purposes, preserve blocks:
392      1) with BLOCK_ABSTRACT_ORIGIN of a ctor/dtor or their clones  */
393   if (inlined_polymorphic_ctor_dtor_block_p (scope, true))
394     {
395       in_ctor_dtor_block = true;
396       unused = false;
397     }
398   /* 2) inside such blocks, the outermost block with block_ultimate_origin
399      being a FUNCTION_DECL.  */
400   else if (in_ctor_dtor_block)
401     {
402       tree fn = block_ultimate_origin (scope);
403       if (fn && TREE_CODE (fn) == FUNCTION_DECL)
404 	{
405 	  in_ctor_dtor_block = false;
406 	  unused = false;
407 	}
408     }
409 
410   for (t = &BLOCK_VARS (scope); *t; t = next)
411     {
412       next = &DECL_CHAIN (*t);
413 
414       /* Debug info of nested function refers to the block of the
415 	 function.  We might stil call it even if all statements
416 	 of function it was nested into was elliminated.
417 
418 	 TODO: We can actually look into cgraph to see if function
419 	 will be output to file.  */
420       if (TREE_CODE (*t) == FUNCTION_DECL)
421 	unused = false;
422 
423       /* If a decl has a value expr, we need to instantiate it
424 	 regardless of debug info generation, to avoid codegen
425 	 differences in memory overlap tests.  update_equiv_regs() may
426 	 indirectly call validate_equiv_mem() to test whether a
427 	 SET_DEST overlaps with others, and if the value expr changes
428 	 by virtual register instantiation, we may get end up with
429 	 different results.  */
430       else if (VAR_P (*t) && DECL_HAS_VALUE_EXPR_P (*t))
431 	unused = false;
432 
433       /* Remove everything we don't generate debug info for.  */
434       else if (DECL_IGNORED_P (*t))
435 	{
436 	  *t = DECL_CHAIN (*t);
437 	  next = t;
438 	}
439 
440       /* When we are outputting debug info, we usually want to output
441 	 info about optimized-out variables in the scope blocks.
442 	 Exception are the scope blocks not containing any instructions
443 	 at all so user can't get into the scopes at first place.  */
444       else if (is_used_p (*t))
445 	unused = false;
446       else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
447 	/* For labels that are still used in the IL, the decision to
448 	   preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
449 	   risk having different ordering in debug vs.  non-debug builds
450 	   during inlining or versioning.
451 	   A label appearing here (we have already checked DECL_IGNORED_P)
452 	   should not be used in the IL unless it has been explicitly used
453 	   before, so we use TREE_USED as an approximation.  */
454 	/* In principle, we should do the same here as for the debug case
455 	   below, however, when debugging, there might be additional nested
456 	   levels that keep an upper level with a label live, so we have to
457 	   force this block to be considered used, too.  */
458 	unused = false;
459 
460       /* When we are not doing full debug info, we however can keep around
461 	 only the used variables for cfgexpand's memory packing saving quite
462 	 a lot of memory.
463 
464 	 For sake of -g3, we keep around those vars but we don't count this as
465 	 use of block, so innermost block with no used vars and no instructions
466 	 can be considered dead.  We only want to keep around blocks user can
467 	 breakpoint into and ask about value of optimized out variables.
468 
469 	 Similarly we need to keep around types at least until all
470 	 variables of all nested blocks are gone.  We track no
471 	 information on whether given type is used or not, so we have
472 	 to keep them even when not emitting debug information,
473 	 otherwise we may end up remapping variables and their (local)
474 	 types in different orders depending on whether debug
475 	 information is being generated.  */
476 
477       else if (TREE_CODE (*t) == TYPE_DECL
478 	       || debug_info_level == DINFO_LEVEL_NORMAL
479 	       || debug_info_level == DINFO_LEVEL_VERBOSE)
480 	;
481       else
482 	{
483 	  *t = DECL_CHAIN (*t);
484 	  next = t;
485 	}
486     }
487 
488   for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
489     if (remove_unused_scope_block_p (*t, in_ctor_dtor_block))
490       {
491 	if (BLOCK_SUBBLOCKS (*t))
492 	  {
493 	    tree next = BLOCK_CHAIN (*t);
494 	    tree supercontext = BLOCK_SUPERCONTEXT (*t);
495 
496 	    *t = BLOCK_SUBBLOCKS (*t);
497 	    while (BLOCK_CHAIN (*t))
498 	      {
499 	        BLOCK_SUPERCONTEXT (*t) = supercontext;
500 	        t = &BLOCK_CHAIN (*t);
501 	      }
502 	    BLOCK_CHAIN (*t) = next;
503 	    BLOCK_SUPERCONTEXT (*t) = supercontext;
504 	    t = &BLOCK_CHAIN (*t);
505 	    nsubblocks ++;
506 	  }
507 	else
508 	  *t = BLOCK_CHAIN (*t);
509       }
510     else
511       {
512         t = &BLOCK_CHAIN (*t);
513 	nsubblocks ++;
514       }
515 
516 
517    if (!unused)
518      ;
519    /* Outer scope is always used.  */
520    else if (!BLOCK_SUPERCONTEXT (scope)
521             || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
522      unused = false;
523    /* Innermost blocks with no live variables nor statements can be always
524       eliminated.  */
525    else if (!nsubblocks)
526      ;
527    /* When not generating debug info we can eliminate info on unused
528       variables.  */
529    else if (!flag_auto_profile && debug_info_level == DINFO_LEVEL_NONE)
530      {
531        /* Even for -g0 don't prune outer scopes from artificial
532 	  functions, otherwise diagnostics using tree_nonartificial_location
533 	  will not be emitted properly.  */
534        if (inlined_function_outer_scope_p (scope))
535 	 {
536 	   tree ao = scope;
537 
538 	   while (ao
539 		  && TREE_CODE (ao) == BLOCK
540 		  && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
541 	     ao = BLOCK_ABSTRACT_ORIGIN (ao);
542 	   if (ao
543 	       && TREE_CODE (ao) == FUNCTION_DECL
544 	       && DECL_DECLARED_INLINE_P (ao)
545 	       && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
546 	     unused = false;
547 	 }
548      }
549    else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
550      unused = false;
551    /* See if this block is important for representation of inlined
552       function.  Inlined functions are always represented by block
553       with block_ultimate_origin being set to FUNCTION_DECL and
554       DECL_SOURCE_LOCATION set, unless they expand to nothing...  */
555    else if (inlined_function_outer_scope_p (scope))
556      unused = false;
557    else
558    /* Verfify that only blocks with source location set
559       are entry points to the inlined functions.  */
560      gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope))
561 		 == UNKNOWN_LOCATION);
562 
563    TREE_USED (scope) = !unused;
564    return unused;
565 }
566 
567 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
568    eliminated during the tree->rtl conversion process.  */
569 
570 static inline void
mark_all_vars_used(tree * expr_p)571 mark_all_vars_used (tree *expr_p)
572 {
573   walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
574 }
575 
576 /* Helper function for clear_unused_block_pointer, called via walk_tree.  */
577 
578 static tree
clear_unused_block_pointer_1(tree * tp,int *,void *)579 clear_unused_block_pointer_1 (tree *tp, int *, void *)
580 {
581   if (EXPR_P (*tp) && TREE_BLOCK (*tp)
582       && !TREE_USED (TREE_BLOCK (*tp)))
583     TREE_SET_BLOCK (*tp, NULL);
584   return NULL_TREE;
585 }
586 
587 /* Set all block pointer in debug or clobber stmt to NULL if the block
588    is unused, so that they will not be streamed out.  */
589 
590 static void
clear_unused_block_pointer(void)591 clear_unused_block_pointer (void)
592 {
593   basic_block bb;
594   gimple_stmt_iterator gsi;
595 
596   FOR_EACH_BB_FN (bb, cfun)
597     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
598       {
599 	unsigned i;
600 	tree b;
601 	gimple *stmt = gsi_stmt (gsi);
602 
603 	if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
604 	  continue;
605 	b = gimple_block (stmt);
606 	if (b && !TREE_USED (b))
607 	  gimple_set_block (stmt, NULL);
608 	for (i = 0; i < gimple_num_ops (stmt); i++)
609 	  walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1,
610 		     NULL, NULL);
611       }
612 }
613 
614 /* Dump scope blocks starting at SCOPE to FILE.  INDENT is the
615    indentation level and FLAGS is as in print_generic_expr.  */
616 
617 static void
dump_scope_block(FILE * file,int indent,tree scope,dump_flags_t flags)618 dump_scope_block (FILE *file, int indent, tree scope, dump_flags_t flags)
619 {
620   tree var, t;
621   unsigned int i;
622 
623   fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope),
624   	   TREE_USED (scope) ? "" : " (unused)",
625 	   BLOCK_ABSTRACT (scope) ? " (abstract)": "");
626   if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION)
627     {
628       expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
629       fprintf (file, " %s:%i", s.file, s.line);
630     }
631   if (BLOCK_ABSTRACT_ORIGIN (scope))
632     {
633       tree origin = block_ultimate_origin (scope);
634       if (origin)
635 	{
636 	  fprintf (file, " Originating from :");
637 	  if (DECL_P (origin))
638 	    print_generic_decl (file, origin, flags);
639 	  else
640 	    fprintf (file, "#%i", BLOCK_NUMBER (origin));
641 	}
642     }
643   if (BLOCK_FRAGMENT_ORIGIN (scope))
644     fprintf (file, " Fragment of : #%i",
645 	     BLOCK_NUMBER (BLOCK_FRAGMENT_ORIGIN (scope)));
646   else if (BLOCK_FRAGMENT_CHAIN (scope))
647     {
648       fprintf (file, " Fragment chain :");
649       for (t = BLOCK_FRAGMENT_CHAIN (scope); t ;
650 	   t = BLOCK_FRAGMENT_CHAIN (t))
651 	fprintf (file, " #%i", BLOCK_NUMBER (t));
652     }
653   fprintf (file, " \n");
654   for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
655     {
656       fprintf (file, "%*s", indent, "");
657       print_generic_decl (file, var, flags);
658       fprintf (file, "\n");
659     }
660   for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
661     {
662       fprintf (file, "%*s",indent, "");
663       print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
664       			  flags);
665       fprintf (file, " (nonlocalized)\n");
666     }
667   for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
668     dump_scope_block (file, indent + 2, t, flags);
669   fprintf (file, "\n%*s}\n",indent, "");
670 }
671 
672 /* Dump the tree of lexical scopes starting at SCOPE to stderr.  FLAGS
673    is as in print_generic_expr.  */
674 
675 DEBUG_FUNCTION void
debug_scope_block(tree scope,dump_flags_t flags)676 debug_scope_block (tree scope, dump_flags_t flags)
677 {
678   dump_scope_block (stderr, 0, scope, flags);
679 }
680 
681 
682 /* Dump the tree of lexical scopes of current_function_decl to FILE.
683    FLAGS is as in print_generic_expr.  */
684 
685 void
dump_scope_blocks(FILE * file,dump_flags_t flags)686 dump_scope_blocks (FILE *file, dump_flags_t flags)
687 {
688   dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
689 }
690 
691 
692 /* Dump the tree of lexical scopes of current_function_decl to stderr.
693    FLAGS is as in print_generic_expr.  */
694 
695 DEBUG_FUNCTION void
debug_scope_blocks(dump_flags_t flags)696 debug_scope_blocks (dump_flags_t flags)
697 {
698   dump_scope_blocks (stderr, flags);
699 }
700 
701 /* Remove local variables that are not referenced in the IL.  */
702 
703 void
remove_unused_locals(void)704 remove_unused_locals (void)
705 {
706   basic_block bb;
707   tree var;
708   unsigned srcidx, dstidx, num;
709   bool have_local_clobbers = false;
710 
711   /* Removing declarations from lexical blocks when not optimizing is
712      not only a waste of time, it actually causes differences in stack
713      layout.  */
714   if (!optimize)
715     return;
716 
717   timevar_push (TV_REMOVE_UNUSED);
718 
719   mark_scope_block_unused (DECL_INITIAL (current_function_decl));
720 
721   usedvars = BITMAP_ALLOC (NULL);
722 
723   /* Walk the CFG marking all referenced symbols.  */
724   FOR_EACH_BB_FN (bb, cfun)
725     {
726       gimple_stmt_iterator gsi;
727       size_t i;
728       edge_iterator ei;
729       edge e;
730 
731       /* Walk the statements.  */
732       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
733 	{
734 	  gimple *stmt = gsi_stmt (gsi);
735 	  tree b = gimple_block (stmt);
736 
737 	  /* If we wanted to mark the block referenced by the inline
738 	     entry point marker as used, this would be a good spot to
739 	     do it.  If the block is not otherwise used, the stmt will
740 	     be cleaned up in clean_unused_block_pointer.  */
741 	  if (is_gimple_debug (stmt))
742 	    continue;
743 
744 	  if (gimple_clobber_p (stmt))
745 	    {
746 	      have_local_clobbers = true;
747 	      continue;
748 	    }
749 
750 	  if (b)
751 	    TREE_USED (b) = true;
752 
753 	  for (i = 0; i < gimple_num_ops (stmt); i++)
754 	    mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i));
755 	}
756 
757       for (gphi_iterator gpi = gsi_start_phis (bb);
758 	   !gsi_end_p (gpi);
759 	   gsi_next (&gpi))
760         {
761           use_operand_p arg_p;
762           ssa_op_iter i;
763 	  tree def;
764 	  gphi *phi = gpi.phi ();
765 
766 	  if (virtual_operand_p (gimple_phi_result (phi)))
767 	    continue;
768 
769 	  def = gimple_phi_result (phi);
770 	  mark_all_vars_used (&def);
771 
772           FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
773             {
774 	      tree arg = USE_FROM_PTR (arg_p);
775 	      int index = PHI_ARG_INDEX_FROM_USE (arg_p);
776 	      tree block =
777 		LOCATION_BLOCK (gimple_phi_arg_location (phi, index));
778 	      if (block != NULL)
779 		TREE_USED (block) = true;
780 	      mark_all_vars_used (&arg);
781             }
782         }
783 
784       FOR_EACH_EDGE (e, ei, bb->succs)
785 	if (LOCATION_BLOCK (e->goto_locus) != NULL)
786 	  TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true;
787     }
788 
789   /* We do a two-pass approach about the out-of-scope clobbers.  We want
790      to remove them if they are the only references to a local variable,
791      but we want to retain them when there's any other.  So the first pass
792      ignores them, and the second pass (if there were any) tries to remove
793      them.  */
794   if (have_local_clobbers)
795     FOR_EACH_BB_FN (bb, cfun)
796       {
797 	gimple_stmt_iterator gsi;
798 
799 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
800 	  {
801 	    gimple *stmt = gsi_stmt (gsi);
802 	    tree b = gimple_block (stmt);
803 
804 	    if (gimple_clobber_p (stmt))
805 	      {
806 		tree lhs = gimple_assign_lhs (stmt);
807 		tree base = get_base_address (lhs);
808 		/* Remove clobbers referencing unused vars, or clobbers
809 		   with MEM_REF lhs referencing uninitialized pointers.  */
810 		if ((VAR_P (base) && !is_used_p (base))
811 		    || (TREE_CODE (lhs) == MEM_REF
812 			&& TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
813 			&& SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0))
814 			&& (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs, 0)))
815 			    != PARM_DECL)))
816 		  {
817 		    unlink_stmt_vdef (stmt);
818 		    gsi_remove (&gsi, true);
819 		    release_defs (stmt);
820 		    continue;
821 		  }
822 		if (b)
823 		  TREE_USED (b) = true;
824 	      }
825 	    gsi_next (&gsi);
826 	  }
827       }
828 
829   if (cfun->has_simduid_loops)
830     {
831       struct loop *loop;
832       FOR_EACH_LOOP (loop, 0)
833 	if (loop->simduid && !is_used_p (loop->simduid))
834 	  loop->simduid = NULL_TREE;
835     }
836 
837   cfun->has_local_explicit_reg_vars = false;
838 
839   /* Remove unmarked local and global vars from local_decls.  */
840   num = vec_safe_length (cfun->local_decls);
841   for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
842     {
843       var = (*cfun->local_decls)[srcidx];
844       if (VAR_P (var))
845 	{
846 	  if (!is_used_p (var))
847 	    {
848 	      tree def;
849 	      if (cfun->nonlocal_goto_save_area
850 		  && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var)
851 		cfun->nonlocal_goto_save_area = NULL;
852 	      /* Release any default def associated with var.  */
853 	      if ((def = ssa_default_def (cfun, var)) != NULL_TREE)
854 		{
855 		  set_ssa_default_def (cfun, var, NULL_TREE);
856 		  release_ssa_name (def);
857 		}
858 	      continue;
859 	    }
860 	}
861       if (VAR_P (var) && DECL_HARD_REGISTER (var) && !is_global_var (var))
862 	cfun->has_local_explicit_reg_vars = true;
863 
864       if (srcidx != dstidx)
865 	(*cfun->local_decls)[dstidx] = var;
866       dstidx++;
867     }
868   if (dstidx != num)
869     {
870       statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx);
871       cfun->local_decls->truncate (dstidx);
872     }
873 
874   remove_unused_scope_block_p (DECL_INITIAL (current_function_decl),
875 			       polymorphic_ctor_dtor_p (current_function_decl,
876 							true) != NULL_TREE);
877   clear_unused_block_pointer ();
878 
879   BITMAP_FREE (usedvars);
880 
881   if (dump_file && (dump_flags & TDF_DETAILS))
882     {
883       fprintf (dump_file, "Scope blocks after cleanups:\n");
884       dump_scope_blocks (dump_file, dump_flags);
885     }
886 
887   timevar_pop (TV_REMOVE_UNUSED);
888 }
889 
890 /* Allocate and return a new live range information object base on MAP.  */
891 
892 static tree_live_info_p
new_tree_live_info(var_map map)893 new_tree_live_info (var_map map)
894 {
895   tree_live_info_p live;
896   basic_block bb;
897 
898   live = XNEW (struct tree_live_info_d);
899   live->map = map;
900   live->num_blocks = last_basic_block_for_fn (cfun);
901 
902   bitmap_obstack_initialize (&live->livein_obstack);
903   bitmap_obstack_initialize (&live->liveout_obstack);
904   live->livein = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
905   FOR_EACH_BB_FN (bb, cfun)
906     bitmap_initialize (&live->livein[bb->index], &live->livein_obstack);
907 
908   live->liveout = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
909   FOR_EACH_BB_FN (bb, cfun)
910     bitmap_initialize (&live->liveout[bb->index], &live->liveout_obstack);
911 
912   live->work_stack = XNEWVEC (int, last_basic_block_for_fn (cfun));
913   live->stack_top = live->work_stack;
914 
915   live->global = BITMAP_ALLOC (NULL);
916   return live;
917 }
918 
919 
920 /* Free storage for live range info object LIVE.  */
921 
922 void
delete_tree_live_info(tree_live_info_p live)923 delete_tree_live_info (tree_live_info_p live)
924 {
925   if (live->livein)
926     {
927       bitmap_obstack_release (&live->livein_obstack);
928       free (live->livein);
929     }
930   if (live->liveout)
931     {
932       bitmap_obstack_release (&live->liveout_obstack);
933       free (live->liveout);
934     }
935   BITMAP_FREE (live->global);
936   free (live->work_stack);
937   free (live);
938 }
939 
940 
941 /* Visit basic block BB and propagate any required live on entry bits from
942    LIVE into the predecessors.  VISITED is the bitmap of visited blocks.
943    TMP is a temporary work bitmap which is passed in to avoid reallocating
944    it each time.  */
945 
946 static void
loe_visit_block(tree_live_info_p live,basic_block bb,sbitmap visited)947 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited)
948 {
949   edge e;
950   bool change;
951   edge_iterator ei;
952   basic_block pred_bb;
953   bitmap loe;
954 
955   gcc_checking_assert (!bitmap_bit_p (visited, bb->index));
956   bitmap_set_bit (visited, bb->index);
957 
958   loe = live_on_entry (live, bb);
959 
960   FOR_EACH_EDGE (e, ei, bb->preds)
961     {
962       pred_bb = e->src;
963       if (pred_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
964 	continue;
965       /* Variables live-on-entry from BB that aren't defined in the
966 	 predecessor block.  This should be the live on entry vars to pred.
967 	 Note that liveout is the DEFs in a block while live on entry is
968 	 being calculated.
969 	 Add these bits to live-on-entry for the pred. if there are any
970 	 changes, and pred_bb has been visited already, add it to the
971 	 revisit stack.  */
972       change = bitmap_ior_and_compl_into (live_on_entry (live, pred_bb),
973 					  loe, &live->liveout[pred_bb->index]);
974       if (change
975 	  && bitmap_bit_p (visited, pred_bb->index))
976 	{
977 	  bitmap_clear_bit (visited, pred_bb->index);
978 	  *(live->stack_top)++ = pred_bb->index;
979 	}
980     }
981 }
982 
983 
984 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
985    of all the variables.  */
986 
987 static void
live_worklist(tree_live_info_p live)988 live_worklist (tree_live_info_p live)
989 {
990   unsigned b;
991   basic_block bb;
992   auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
993 
994   bitmap_clear (visited);
995 
996   /* Visit all the blocks in reverse order and propagate live on entry values
997      into the predecessors blocks.  */
998   FOR_EACH_BB_REVERSE_FN (bb, cfun)
999     loe_visit_block (live, bb, visited);
1000 
1001   /* Process any blocks which require further iteration.  */
1002   while (live->stack_top != live->work_stack)
1003     {
1004       b = *--(live->stack_top);
1005       loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited);
1006     }
1007 }
1008 
1009 
1010 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1011    links.  Set the live on entry fields in LIVE.  Def's are marked temporarily
1012    in the liveout vector.  */
1013 
1014 static void
set_var_live_on_entry(tree ssa_name,tree_live_info_p live)1015 set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
1016 {
1017   int p;
1018   gimple *stmt;
1019   use_operand_p use;
1020   basic_block def_bb = NULL;
1021   imm_use_iterator imm_iter;
1022   bool global = false;
1023 
1024   p = var_to_partition (live->map, ssa_name);
1025   if (p == NO_PARTITION)
1026     return;
1027 
1028   stmt = SSA_NAME_DEF_STMT (ssa_name);
1029   if (stmt)
1030     {
1031       def_bb = gimple_bb (stmt);
1032       /* Mark defs in liveout bitmap temporarily.  */
1033       if (def_bb)
1034 	bitmap_set_bit (&live->liveout[def_bb->index], p);
1035     }
1036   else
1037     def_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1038 
1039   /* An undefined local variable does not need to be very alive.  */
1040   if (ssa_undefined_value_p (ssa_name, false))
1041     return;
1042 
1043   /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1044      add it to the list of live on entry blocks.  */
1045   FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
1046     {
1047       gimple *use_stmt = USE_STMT (use);
1048       basic_block add_block = NULL;
1049 
1050       if (gimple_code (use_stmt) == GIMPLE_PHI)
1051         {
1052 	  /* Uses in PHI's are considered to be live at exit of the SRC block
1053 	     as this is where a copy would be inserted.  Check to see if it is
1054 	     defined in that block, or whether its live on entry.  */
1055 	  int index = PHI_ARG_INDEX_FROM_USE (use);
1056 	  edge e = gimple_phi_arg_edge (as_a <gphi *> (use_stmt), index);
1057 	  if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1058 	    {
1059 	      if (e->src != def_bb)
1060 		add_block = e->src;
1061 	    }
1062 	}
1063       else if (is_gimple_debug (use_stmt))
1064 	continue;
1065       else
1066         {
1067 	  /* If its not defined in this block, its live on entry.  */
1068 	  basic_block use_bb = gimple_bb (use_stmt);
1069 	  if (use_bb != def_bb)
1070 	    add_block = use_bb;
1071 	}
1072 
1073       /* If there was a live on entry use, set the bit.  */
1074       if (add_block)
1075         {
1076 	  global = true;
1077 	  bitmap_set_bit (&live->livein[add_block->index], p);
1078 	}
1079     }
1080 
1081   /* If SSA_NAME is live on entry to at least one block, fill in all the live
1082      on entry blocks between the def and all the uses.  */
1083   if (global)
1084     bitmap_set_bit (live->global, p);
1085 }
1086 
1087 
1088 /* Calculate the live on exit vectors based on the entry info in LIVEINFO.  */
1089 
1090 static void
calculate_live_on_exit(tree_live_info_p liveinfo)1091 calculate_live_on_exit (tree_live_info_p liveinfo)
1092 {
1093   basic_block bb;
1094   edge e;
1095   edge_iterator ei;
1096 
1097   /* live on entry calculations used liveout vectors for defs, clear them.  */
1098   FOR_EACH_BB_FN (bb, cfun)
1099     bitmap_clear (&liveinfo->liveout[bb->index]);
1100 
1101   /* Set all the live-on-exit bits for uses in PHIs.  */
1102   FOR_EACH_BB_FN (bb, cfun)
1103     {
1104       gphi_iterator gsi;
1105       size_t i;
1106 
1107       /* Mark the PHI arguments which are live on exit to the pred block.  */
1108       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1109 	{
1110 	  gphi *phi = gsi.phi ();
1111 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
1112 	    {
1113 	      tree t = PHI_ARG_DEF (phi, i);
1114 	      int p;
1115 
1116 	      if (TREE_CODE (t) != SSA_NAME)
1117 		continue;
1118 
1119 	      p = var_to_partition (liveinfo->map, t);
1120 	      if (p == NO_PARTITION)
1121 		continue;
1122 	      e = gimple_phi_arg_edge (phi, i);
1123 	      if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1124 		bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
1125 	    }
1126 	}
1127 
1128       /* Add each successors live on entry to this bock live on exit.  */
1129       FOR_EACH_EDGE (e, ei, bb->succs)
1130 	if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1131 	  bitmap_ior_into (&liveinfo->liveout[bb->index],
1132 			   live_on_entry (liveinfo, e->dest));
1133     }
1134 }
1135 
1136 
1137 /* Given partition map MAP, calculate all the live on entry bitmaps for
1138    each partition.  Return a new live info object.  */
1139 
1140 tree_live_info_p
calculate_live_ranges(var_map map,bool want_livein)1141 calculate_live_ranges (var_map map, bool want_livein)
1142 {
1143   tree var;
1144   unsigned i;
1145   tree_live_info_p live;
1146 
1147   live = new_tree_live_info (map);
1148   for (i = 0; i < num_var_partitions (map); i++)
1149     {
1150       var = partition_to_var (map, i);
1151       if (var != NULL_TREE)
1152 	set_var_live_on_entry (var, live);
1153     }
1154 
1155   live_worklist (live);
1156 
1157   if (flag_checking)
1158     verify_live_on_entry (live);
1159 
1160   calculate_live_on_exit (live);
1161 
1162   if (!want_livein)
1163     {
1164       bitmap_obstack_release (&live->livein_obstack);
1165       free (live->livein);
1166       live->livein = NULL;
1167     }
1168 
1169   return live;
1170 }
1171 
1172 
1173 /* Output partition map MAP to file F.  */
1174 
1175 void
dump_var_map(FILE * f,var_map map)1176 dump_var_map (FILE *f, var_map map)
1177 {
1178   int t;
1179   unsigned x, y;
1180   int p;
1181 
1182   fprintf (f, "\nPartition map \n\n");
1183 
1184   for (x = 0; x < map->num_partitions; x++)
1185     {
1186       if (map->view_to_partition != NULL)
1187 	p = map->view_to_partition[x];
1188       else
1189 	p = x;
1190 
1191       if (ssa_name (p) == NULL_TREE
1192 	  || virtual_operand_p (ssa_name (p)))
1193         continue;
1194 
1195       t = 0;
1196       for (y = 1; y < num_ssa_names; y++)
1197         {
1198 	  p = partition_find (map->var_partition, y);
1199 	  if (map->partition_to_view)
1200 	    p = map->partition_to_view[p];
1201 	  if (p == (int)x)
1202 	    {
1203 	      if (t++ == 0)
1204 	        {
1205 		  fprintf (f, "Partition %d (", x);
1206 		  print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1207 		  fprintf (f, " - ");
1208 		}
1209 	      fprintf (f, "%d ", y);
1210 	    }
1211 	}
1212       if (t != 0)
1213 	fprintf (f, ")\n");
1214     }
1215   fprintf (f, "\n");
1216 }
1217 
1218 
1219 /* Generic dump for the above.  */
1220 
1221 DEBUG_FUNCTION void
debug(_var_map & ref)1222 debug (_var_map &ref)
1223 {
1224   dump_var_map (stderr, &ref);
1225 }
1226 
1227 DEBUG_FUNCTION void
debug(_var_map * ptr)1228 debug (_var_map *ptr)
1229 {
1230   if (ptr)
1231     debug (*ptr);
1232   else
1233     fprintf (stderr, "<nil>\n");
1234 }
1235 
1236 
1237 /* Output live range info LIVE to file F, controlled by FLAG.  */
1238 
1239 void
dump_live_info(FILE * f,tree_live_info_p live,int flag)1240 dump_live_info (FILE *f, tree_live_info_p live, int flag)
1241 {
1242   basic_block bb;
1243   unsigned i;
1244   var_map map = live->map;
1245   bitmap_iterator bi;
1246 
1247   if ((flag & LIVEDUMP_ENTRY) && live->livein)
1248     {
1249       FOR_EACH_BB_FN (bb, cfun)
1250 	{
1251 	  fprintf (f, "\nLive on entry to BB%d : ", bb->index);
1252 	  EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi)
1253 	    {
1254 	      print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1255 	      fprintf (f, "  ");
1256 	    }
1257 	  fprintf (f, "\n");
1258 	}
1259     }
1260 
1261   if ((flag & LIVEDUMP_EXIT) && live->liveout)
1262     {
1263       FOR_EACH_BB_FN (bb, cfun)
1264 	{
1265 	  fprintf (f, "\nLive on exit from BB%d : ", bb->index);
1266 	  EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi)
1267 	    {
1268 	      print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1269 	      fprintf (f, "  ");
1270 	    }
1271 	  fprintf (f, "\n");
1272 	}
1273     }
1274 }
1275 
1276 
1277 /* Generic dump for the above.  */
1278 
1279 DEBUG_FUNCTION void
debug(tree_live_info_d & ref)1280 debug (tree_live_info_d &ref)
1281 {
1282   dump_live_info (stderr, &ref, 0);
1283 }
1284 
1285 DEBUG_FUNCTION void
debug(tree_live_info_d * ptr)1286 debug (tree_live_info_d *ptr)
1287 {
1288   if (ptr)
1289     debug (*ptr);
1290   else
1291     fprintf (stderr, "<nil>\n");
1292 }
1293 
1294 
1295 /* Verify that the info in LIVE matches the current cfg.  */
1296 
1297 static void
verify_live_on_entry(tree_live_info_p live)1298 verify_live_on_entry (tree_live_info_p live)
1299 {
1300   unsigned i;
1301   tree var;
1302   gimple *stmt;
1303   basic_block bb;
1304   edge e;
1305   int num;
1306   edge_iterator ei;
1307   var_map map = live->map;
1308 
1309    /* Check for live on entry partitions and report those with a DEF in
1310       the program. This will typically mean an optimization has done
1311       something wrong.  */
1312   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1313   num = 0;
1314   FOR_EACH_EDGE (e, ei, bb->succs)
1315     {
1316       int entry_block = e->dest->index;
1317       if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1318         continue;
1319       for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1320 	{
1321 	  basic_block tmp;
1322 	  tree d = NULL_TREE;
1323 	  bitmap loe;
1324 	  var = partition_to_var (map, i);
1325 	  stmt = SSA_NAME_DEF_STMT (var);
1326 	  tmp = gimple_bb (stmt);
1327 	  if (SSA_NAME_VAR (var))
1328 	    d = ssa_default_def (cfun, SSA_NAME_VAR (var));
1329 
1330 	  loe = live_on_entry (live, e->dest);
1331 	  if (loe && bitmap_bit_p (loe, i))
1332 	    {
1333 	      if (!gimple_nop_p (stmt))
1334 		{
1335 		  num++;
1336 		  print_generic_expr (stderr, var, TDF_SLIM);
1337 		  fprintf (stderr, " is defined ");
1338 		  if (tmp)
1339 		    fprintf (stderr, " in BB%d, ", tmp->index);
1340 		  fprintf (stderr, "by:\n");
1341 		  print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1342 		  fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
1343 			   entry_block);
1344 		  fprintf (stderr, " So it appears to have multiple defs.\n");
1345 		}
1346 	      else
1347 	        {
1348 		  if (d != var)
1349 		    {
1350 		      num++;
1351 		      print_generic_expr (stderr, var, TDF_SLIM);
1352 		      fprintf (stderr, " is live-on-entry to BB%d ",
1353 			       entry_block);
1354 		      if (d)
1355 		        {
1356 			  fprintf (stderr, " but is not the default def of ");
1357 			  print_generic_expr (stderr, d, TDF_SLIM);
1358 			  fprintf (stderr, "\n");
1359 			}
1360 		      else
1361 			fprintf (stderr, " and there is no default def.\n");
1362 		    }
1363 		}
1364 	    }
1365 	  else
1366 	    if (d == var)
1367 	      {
1368 		/* An undefined local variable does not need to be very
1369 		   alive.  */
1370 		if (ssa_undefined_value_p (var, false))
1371 		  continue;
1372 
1373 		/* The only way this var shouldn't be marked live on entry is
1374 		   if it occurs in a PHI argument of the block.  */
1375 		size_t z;
1376 		bool ok = false;
1377 		gphi_iterator gsi;
1378 		for (gsi = gsi_start_phis (e->dest);
1379 		     !gsi_end_p (gsi) && !ok;
1380 		     gsi_next (&gsi))
1381 		  {
1382 		    gphi *phi = gsi.phi ();
1383 		    for (z = 0; z < gimple_phi_num_args (phi); z++)
1384 		      if (var == gimple_phi_arg_def (phi, z))
1385 			{
1386 			  ok = true;
1387 			  break;
1388 			}
1389 		  }
1390 		if (ok)
1391 		  continue;
1392 		/* Expand adds unused default defs for PARM_DECLs and
1393 		   RESULT_DECLs.  They're ok.  */
1394 		if (has_zero_uses (var)
1395 		    && SSA_NAME_VAR (var)
1396 		    && !VAR_P (SSA_NAME_VAR (var)))
1397 		  continue;
1398 	        num++;
1399 		print_generic_expr (stderr, var, TDF_SLIM);
1400 		fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
1401 			 entry_block);
1402 		fprintf (stderr, "but it is a default def so it should be.\n");
1403 	      }
1404 	}
1405     }
1406   gcc_assert (num <= 0);
1407 }
1408