xref: /dragonfly/contrib/gcc-8.0/gcc/profile.c (revision 37de577a)
1 /* Calculate branch probabilities, and basic block execution counts.
2    Copyright (C) 1990-2018 Free Software Foundation, Inc.
3    Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
4    based on some ideas from Dain Samples of UC Berkeley.
5    Further mangling by Bob Manson, Cygnus Support.
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 /* Generate basic block profile instrumentation and auxiliary files.
24    Profile generation is optimized, so that not all arcs in the basic
25    block graph need instrumenting. First, the BB graph is closed with
26    one entry (function start), and one exit (function exit).  Any
27    ABNORMAL_EDGE cannot be instrumented (because there is no control
28    path to place the code). We close the graph by inserting fake
29    EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
30    edges that do not go to the exit_block. We ignore such abnormal
31    edges.  Naturally these fake edges are never directly traversed,
32    and so *cannot* be directly instrumented.  Some other graph
33    massaging is done. To optimize the instrumentation we generate the
34    BB minimal span tree, only edges that are not on the span tree
35    (plus the entry point) need instrumenting. From that information
36    all other edge counts can be deduced.  By construction all fake
37    edges must be on the spanning tree. We also attempt to place
38    EDGE_CRITICAL edges on the spanning tree.
39 
40    The auxiliary files generated are <dumpbase>.gcno (at compile time)
41    and <dumpbase>.gcda (at run time).  The format is
42    described in full in gcov-io.h.  */
43 
44 /* ??? Register allocation should use basic block execution counts to
45    give preference to the most commonly executed blocks.  */
46 
47 /* ??? Should calculate branch probabilities before instrumenting code, since
48    then we can use arc counts to help decide which arcs to instrument.  */
49 
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "backend.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "gimple.h"
57 #include "cfghooks.h"
58 #include "cgraph.h"
59 #include "coverage.h"
60 #include "diagnostic-core.h"
61 #include "cfganal.h"
62 #include "value-prof.h"
63 #include "gimple-iterator.h"
64 #include "tree-cfg.h"
65 #include "dumpfile.h"
66 #include "cfgloop.h"
67 
68 #include "profile.h"
69 
70 /* Map from BBs/edges to gcov counters.  */
71 vec<gcov_type> bb_gcov_counts;
72 hash_map<edge,gcov_type> *edge_gcov_counts;
73 
74 struct bb_profile_info {
75   unsigned int count_valid : 1;
76 
77   /* Number of successor and predecessor edges.  */
78   gcov_type succ_count;
79   gcov_type pred_count;
80 };
81 
82 #define BB_INFO(b)  ((struct bb_profile_info *) (b)->aux)
83 
84 
85 /* Counter summary from the last set of coverage counts read.  */
86 
87 const struct gcov_ctr_summary *profile_info;
88 
89 /* Counter working set information computed from the current counter
90    summary. Not initialized unless profile_info summary is non-NULL.  */
91 static gcov_working_set_t gcov_working_sets[NUM_GCOV_WORKING_SETS];
92 
93 /* Collect statistics on the performance of this pass for the entire source
94    file.  */
95 
96 static int total_num_blocks;
97 static int total_num_edges;
98 static int total_num_edges_ignored;
99 static int total_num_edges_instrumented;
100 static int total_num_blocks_created;
101 static int total_num_passes;
102 static int total_num_times_called;
103 static int total_hist_br_prob[20];
104 static int total_num_branches;
105 
106 /* Helper function to update gcov_working_sets.  */
107 
108 void add_working_set (gcov_working_set_t *set) {
109   int i = 0;
110   for (; i < NUM_GCOV_WORKING_SETS; i++)
111     gcov_working_sets[i] = set[i];
112 }
113 
114 /* Forward declarations.  */
115 static void find_spanning_tree (struct edge_list *);
116 
117 /* Add edge instrumentation code to the entire insn chain.
118 
119    F is the first insn of the chain.
120    NUM_BLOCKS is the number of basic blocks found in F.  */
121 
122 static unsigned
123 instrument_edges (struct edge_list *el)
124 {
125   unsigned num_instr_edges = 0;
126   int num_edges = NUM_EDGES (el);
127   basic_block bb;
128 
129   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
130     {
131       edge e;
132       edge_iterator ei;
133 
134       FOR_EACH_EDGE (e, ei, bb->succs)
135 	{
136 	  struct edge_profile_info *inf = EDGE_INFO (e);
137 
138 	  if (!inf->ignore && !inf->on_tree)
139 	    {
140 	      gcc_assert (!(e->flags & EDGE_ABNORMAL));
141 	      if (dump_file)
142 		fprintf (dump_file, "Edge %d to %d instrumented%s\n",
143 			 e->src->index, e->dest->index,
144 			 EDGE_CRITICAL_P (e) ? " (and split)" : "");
145 	      gimple_gen_edge_profiler (num_instr_edges++, e);
146 	    }
147 	}
148     }
149 
150   total_num_blocks_created += num_edges;
151   if (dump_file)
152     fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
153   return num_instr_edges;
154 }
155 
156 /* Add code to measure histograms for values in list VALUES.  */
157 static void
158 instrument_values (histogram_values values)
159 {
160   unsigned i;
161 
162   /* Emit code to generate the histograms before the insns.  */
163 
164   for (i = 0; i < values.length (); i++)
165     {
166       histogram_value hist = values[i];
167       unsigned t = COUNTER_FOR_HIST_TYPE (hist->type);
168 
169       if (!coverage_counter_alloc (t, hist->n_counters))
170 	continue;
171 
172       switch (hist->type)
173 	{
174 	case HIST_TYPE_INTERVAL:
175 	  gimple_gen_interval_profiler (hist, t, 0);
176 	  break;
177 
178 	case HIST_TYPE_POW2:
179 	  gimple_gen_pow2_profiler (hist, t, 0);
180 	  break;
181 
182 	case HIST_TYPE_SINGLE_VALUE:
183 	  gimple_gen_one_value_profiler (hist, t, 0);
184 	  break;
185 
186  	case HIST_TYPE_INDIR_CALL:
187  	case HIST_TYPE_INDIR_CALL_TOPN:
188  	  gimple_gen_ic_profiler (hist, t, 0);
189   	  break;
190 
191 	case HIST_TYPE_AVERAGE:
192 	  gimple_gen_average_profiler (hist, t, 0);
193 	  break;
194 
195 	case HIST_TYPE_IOR:
196 	  gimple_gen_ior_profiler (hist, t, 0);
197 	  break;
198 
199 	case HIST_TYPE_TIME_PROFILE:
200 	  gimple_gen_time_profiler (t, 0);
201 	  break;
202 
203 	default:
204 	  gcc_unreachable ();
205 	}
206     }
207 }
208 
209 
210 /* Fill the working set information into the profile_info structure.  */
211 
212 void
213 get_working_sets (void)
214 {
215   unsigned ws_ix, pctinc, pct;
216   gcov_working_set_t *ws_info;
217 
218   if (!profile_info)
219     return;
220 
221   compute_working_sets (profile_info, gcov_working_sets);
222 
223   if (dump_file)
224     {
225       fprintf (dump_file, "Counter working sets:\n");
226       /* Multiply the percentage by 100 to avoid float.  */
227       pctinc = 100 * 100 / NUM_GCOV_WORKING_SETS;
228       for (ws_ix = 0, pct = pctinc; ws_ix < NUM_GCOV_WORKING_SETS;
229            ws_ix++, pct += pctinc)
230         {
231           if (ws_ix == NUM_GCOV_WORKING_SETS - 1)
232             pct = 9990;
233           ws_info = &gcov_working_sets[ws_ix];
234           /* Print out the percentage using int arithmatic to avoid float.  */
235           fprintf (dump_file, "\t\t%u.%02u%%: num counts=%u, min counter="
236                    "%" PRId64 "\n",
237                    pct / 100, pct - (pct / 100 * 100),
238                    ws_info->num_counters,
239                    (int64_t)ws_info->min_counter);
240         }
241     }
242 }
243 
244 /* Given a the desired percentage of the full profile (sum_all from the
245    summary), multiplied by 10 to avoid float in PCT_TIMES_10, returns
246    the corresponding working set information. If an exact match for
247    the percentage isn't found, the closest value is used.  */
248 
249 gcov_working_set_t *
250 find_working_set (unsigned pct_times_10)
251 {
252   unsigned i;
253   if (!profile_info)
254     return NULL;
255   gcc_assert (pct_times_10 <= 1000);
256   if (pct_times_10 >= 999)
257     return &gcov_working_sets[NUM_GCOV_WORKING_SETS - 1];
258   i = pct_times_10 * NUM_GCOV_WORKING_SETS / 1000;
259   if (!i)
260     return &gcov_working_sets[0];
261   return &gcov_working_sets[i - 1];
262 }
263 
264 /* Computes hybrid profile for all matching entries in da_file.
265 
266    CFG_CHECKSUM is the precomputed checksum for the CFG.  */
267 
268 static gcov_type *
269 get_exec_counts (unsigned cfg_checksum, unsigned lineno_checksum)
270 {
271   unsigned num_edges = 0;
272   basic_block bb;
273   gcov_type *counts;
274 
275   /* Count the edges to be (possibly) instrumented.  */
276   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
277     {
278       edge e;
279       edge_iterator ei;
280 
281       FOR_EACH_EDGE (e, ei, bb->succs)
282 	if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
283 	  num_edges++;
284     }
285 
286   counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, cfg_checksum,
287 				lineno_checksum, &profile_info);
288   if (!counts)
289     return NULL;
290 
291   get_working_sets ();
292 
293   if (dump_file && profile_info)
294     fprintf (dump_file, "Merged %u profiles with maximal count %u.\n",
295 	     profile_info->runs, (unsigned) profile_info->sum_max);
296 
297   return counts;
298 }
299 
300 
301 static bool
302 is_edge_inconsistent (vec<edge, va_gc> *edges)
303 {
304   edge e;
305   edge_iterator ei;
306   FOR_EACH_EDGE (e, ei, edges)
307     {
308       if (!EDGE_INFO (e)->ignore)
309         {
310           if (edge_gcov_count (e) < 0
311 	      && (!(e->flags & EDGE_FAKE)
312 	          || !block_ends_with_call_p (e->src)))
313 	    {
314 	      if (dump_file)
315 		{
316 		  fprintf (dump_file,
317 		  	   "Edge %i->%i is inconsistent, count%" PRId64,
318 			   e->src->index, e->dest->index, edge_gcov_count (e));
319 		  dump_bb (dump_file, e->src, 0, TDF_DETAILS);
320 		  dump_bb (dump_file, e->dest, 0, TDF_DETAILS);
321 		}
322               return true;
323 	    }
324         }
325     }
326   return false;
327 }
328 
329 static void
330 correct_negative_edge_counts (void)
331 {
332   basic_block bb;
333   edge e;
334   edge_iterator ei;
335 
336   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
337     {
338       FOR_EACH_EDGE (e, ei, bb->succs)
339         {
340            if (edge_gcov_count (e) < 0)
341              edge_gcov_count (e) = 0;
342         }
343     }
344 }
345 
346 /* Check consistency.
347    Return true if inconsistency is found.  */
348 static bool
349 is_inconsistent (void)
350 {
351   basic_block bb;
352   bool inconsistent = false;
353   FOR_EACH_BB_FN (bb, cfun)
354     {
355       inconsistent |= is_edge_inconsistent (bb->preds);
356       if (!dump_file && inconsistent)
357 	return true;
358       inconsistent |= is_edge_inconsistent (bb->succs);
359       if (!dump_file && inconsistent)
360 	return true;
361       if (bb_gcov_count (bb) < 0)
362         {
363 	  if (dump_file)
364 	    {
365 	      fprintf (dump_file, "BB %i count is negative "
366 		       "%" PRId64,
367 		       bb->index,
368 		       bb_gcov_count (bb));
369 	      dump_bb (dump_file, bb, 0, TDF_DETAILS);
370 	    }
371 	  inconsistent = true;
372 	}
373       if (bb_gcov_count (bb) != sum_edge_counts (bb->preds))
374         {
375 	  if (dump_file)
376 	    {
377 	      fprintf (dump_file, "BB %i count does not match sum of incoming edges "
378 		       "%" PRId64" should be %" PRId64,
379 		       bb->index,
380 		       bb_gcov_count (bb),
381 		       sum_edge_counts (bb->preds));
382 	      dump_bb (dump_file, bb, 0, TDF_DETAILS);
383 	    }
384 	  inconsistent = true;
385 	}
386       if (bb_gcov_count (bb) != sum_edge_counts (bb->succs) &&
387 	  ! (find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun)) != NULL
388 	     && block_ends_with_call_p (bb)))
389 	{
390 	  if (dump_file)
391 	    {
392 	      fprintf (dump_file, "BB %i count does not match sum of outgoing edges "
393 		       "%" PRId64" should be %" PRId64,
394 		       bb->index,
395 		       bb_gcov_count (bb),
396 		       sum_edge_counts (bb->succs));
397 	      dump_bb (dump_file, bb, 0, TDF_DETAILS);
398 	    }
399 	  inconsistent = true;
400 	}
401       if (!dump_file && inconsistent)
402 	return true;
403     }
404 
405   return inconsistent;
406 }
407 
408 /* Set each basic block count to the sum of its outgoing edge counts */
409 static void
410 set_bb_counts (void)
411 {
412   basic_block bb;
413   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
414     {
415       bb_gcov_count (bb) = sum_edge_counts (bb->succs);
416       gcc_assert (bb_gcov_count (bb) >= 0);
417     }
418 }
419 
420 /* Reads profile data and returns total number of edge counts read */
421 static int
422 read_profile_edge_counts (gcov_type *exec_counts)
423 {
424   basic_block bb;
425   int num_edges = 0;
426   int exec_counts_pos = 0;
427   /* For each edge not on the spanning tree, set its execution count from
428      the .da file.  */
429   /* The first count in the .da file is the number of times that the function
430      was entered.  This is the exec_count for block zero.  */
431 
432   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
433     {
434       edge e;
435       edge_iterator ei;
436 
437       FOR_EACH_EDGE (e, ei, bb->succs)
438 	if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
439 	  {
440 	    num_edges++;
441 	    if (exec_counts)
442 	      {
443 		edge_gcov_count (e) = exec_counts[exec_counts_pos++];
444 		if (edge_gcov_count (e) > profile_info->sum_max)
445 		  {
446 		    if (flag_profile_correction)
447 		      {
448 			static bool informed = 0;
449 			if (dump_enabled_p () && !informed)
450 		          dump_printf_loc (MSG_NOTE, input_location,
451                                            "corrupted profile info: edge count"
452                                            " exceeds maximal count\n");
453 			informed = 1;
454 		      }
455 		    else
456 		      error ("corrupted profile info: edge from %i to %i exceeds maximal count",
457 			     bb->index, e->dest->index);
458 		  }
459 	      }
460 	    else
461 	      edge_gcov_count (e) = 0;
462 
463 	    EDGE_INFO (e)->count_valid = 1;
464 	    BB_INFO (bb)->succ_count--;
465 	    BB_INFO (e->dest)->pred_count--;
466 	    if (dump_file)
467 	      {
468 		fprintf (dump_file, "\nRead edge from %i to %i, count:",
469 			 bb->index, e->dest->index);
470 		fprintf (dump_file, "%" PRId64,
471 			 (int64_t) edge_gcov_count (e));
472 	      }
473 	  }
474     }
475 
476     return num_edges;
477 }
478 
479 
480 /* Compute the branch probabilities for the various branches.
481    Annotate them accordingly.
482 
483    CFG_CHECKSUM is the precomputed checksum for the CFG.  */
484 
485 static void
486 compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
487 {
488   basic_block bb;
489   int i;
490   int num_edges = 0;
491   int changes;
492   int passes;
493   int hist_br_prob[20];
494   int num_branches;
495   gcov_type *exec_counts = get_exec_counts (cfg_checksum, lineno_checksum);
496   int inconsistent = 0;
497 
498   /* Very simple sanity checks so we catch bugs in our profiling code.  */
499   if (!profile_info)
500     {
501       if (dump_file)
502 	fprintf (dump_file, "Profile info is missing; giving up\n");
503       return;
504     }
505 
506   bb_gcov_counts.safe_grow_cleared (last_basic_block_for_fn (cfun));
507   edge_gcov_counts = new hash_map<edge,gcov_type>;
508 
509   if (profile_info->sum_all < profile_info->sum_max)
510     {
511       error ("corrupted profile info: sum_all is smaller than sum_max");
512       exec_counts = NULL;
513     }
514 
515   /* Attach extra info block to each bb.  */
516   alloc_aux_for_blocks (sizeof (struct bb_profile_info));
517   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
518     {
519       edge e;
520       edge_iterator ei;
521 
522       FOR_EACH_EDGE (e, ei, bb->succs)
523 	if (!EDGE_INFO (e)->ignore)
524 	  BB_INFO (bb)->succ_count++;
525       FOR_EACH_EDGE (e, ei, bb->preds)
526 	if (!EDGE_INFO (e)->ignore)
527 	  BB_INFO (bb)->pred_count++;
528     }
529 
530   /* Avoid predicting entry on exit nodes.  */
531   BB_INFO (EXIT_BLOCK_PTR_FOR_FN (cfun))->succ_count = 2;
532   BB_INFO (ENTRY_BLOCK_PTR_FOR_FN (cfun))->pred_count = 2;
533 
534   num_edges = read_profile_edge_counts (exec_counts);
535 
536   if (dump_file)
537     fprintf (dump_file, "\n%d edge counts read\n", num_edges);
538 
539   /* For every block in the file,
540      - if every exit/entrance edge has a known count, then set the block count
541      - if the block count is known, and every exit/entrance edge but one has
542      a known execution count, then set the count of the remaining edge
543 
544      As edge counts are set, decrement the succ/pred count, but don't delete
545      the edge, that way we can easily tell when all edges are known, or only
546      one edge is unknown.  */
547 
548   /* The order that the basic blocks are iterated through is important.
549      Since the code that finds spanning trees starts with block 0, low numbered
550      edges are put on the spanning tree in preference to high numbered edges.
551      Hence, most instrumented edges are at the end.  Graph solving works much
552      faster if we propagate numbers from the end to the start.
553 
554      This takes an average of slightly more than 3 passes.  */
555 
556   changes = 1;
557   passes = 0;
558   while (changes)
559     {
560       passes++;
561       changes = 0;
562       FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, prev_bb)
563 	{
564 	  struct bb_profile_info *bi = BB_INFO (bb);
565 	  if (! bi->count_valid)
566 	    {
567 	      if (bi->succ_count == 0)
568 		{
569 		  edge e;
570 		  edge_iterator ei;
571 		  gcov_type total = 0;
572 
573 		  FOR_EACH_EDGE (e, ei, bb->succs)
574 		    total += edge_gcov_count (e);
575 		  bb_gcov_count (bb) = total;
576 		  bi->count_valid = 1;
577 		  changes = 1;
578 		}
579 	      else if (bi->pred_count == 0)
580 		{
581 		  edge e;
582 		  edge_iterator ei;
583 		  gcov_type total = 0;
584 
585 		  FOR_EACH_EDGE (e, ei, bb->preds)
586 		    total += edge_gcov_count (e);
587 		  bb_gcov_count (bb) = total;
588 		  bi->count_valid = 1;
589 		  changes = 1;
590 		}
591 	    }
592 	  if (bi->count_valid)
593 	    {
594 	      if (bi->succ_count == 1)
595 		{
596 		  edge e;
597 		  edge_iterator ei;
598 		  gcov_type total = 0;
599 
600 		  /* One of the counts will be invalid, but it is zero,
601 		     so adding it in also doesn't hurt.  */
602 		  FOR_EACH_EDGE (e, ei, bb->succs)
603 		    total += edge_gcov_count (e);
604 
605 		  /* Search for the invalid edge, and set its count.  */
606 		  FOR_EACH_EDGE (e, ei, bb->succs)
607 		    if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
608 		      break;
609 
610 		  /* Calculate count for remaining edge by conservation.  */
611 		  total = bb_gcov_count (bb) - total;
612 
613 		  gcc_assert (e);
614 		  EDGE_INFO (e)->count_valid = 1;
615 		  edge_gcov_count (e) = total;
616 		  bi->succ_count--;
617 
618 		  BB_INFO (e->dest)->pred_count--;
619 		  changes = 1;
620 		}
621 	      if (bi->pred_count == 1)
622 		{
623 		  edge e;
624 		  edge_iterator ei;
625 		  gcov_type total = 0;
626 
627 		  /* One of the counts will be invalid, but it is zero,
628 		     so adding it in also doesn't hurt.  */
629 		  FOR_EACH_EDGE (e, ei, bb->preds)
630 		    total += edge_gcov_count (e);
631 
632 		  /* Search for the invalid edge, and set its count.  */
633 		  FOR_EACH_EDGE (e, ei, bb->preds)
634 		    if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
635 		      break;
636 
637 		  /* Calculate count for remaining edge by conservation.  */
638 		  total = bb_gcov_count (bb) - total + edge_gcov_count (e);
639 
640 		  gcc_assert (e);
641 		  EDGE_INFO (e)->count_valid = 1;
642 		  edge_gcov_count (e) = total;
643 		  bi->pred_count--;
644 
645 		  BB_INFO (e->src)->succ_count--;
646 		  changes = 1;
647 		}
648 	    }
649 	}
650     }
651 
652   total_num_passes += passes;
653   if (dump_file)
654     fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
655 
656   /* If the graph has been correctly solved, every block will have a
657      succ and pred count of zero.  */
658   FOR_EACH_BB_FN (bb, cfun)
659     {
660       gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
661     }
662 
663   /* Check for inconsistent basic block counts */
664   inconsistent = is_inconsistent ();
665 
666   if (inconsistent)
667    {
668      if (flag_profile_correction)
669        {
670          /* Inconsistency detected. Make it flow-consistent. */
671          static int informed = 0;
672          if (dump_enabled_p () && informed == 0)
673            {
674              informed = 1;
675              dump_printf_loc (MSG_NOTE, input_location,
676                               "correcting inconsistent profile data\n");
677            }
678          correct_negative_edge_counts ();
679          /* Set bb counts to the sum of the outgoing edge counts */
680          set_bb_counts ();
681          if (dump_file)
682            fprintf (dump_file, "\nCalling mcf_smooth_cfg\n");
683          mcf_smooth_cfg ();
684        }
685      else
686        error ("corrupted profile info: profile data is not flow-consistent");
687    }
688 
689   /* For every edge, calculate its branch probability and add a reg_note
690      to the branch insn to indicate this.  */
691 
692   for (i = 0; i < 20; i++)
693     hist_br_prob[i] = 0;
694   num_branches = 0;
695 
696   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
697     {
698       edge e;
699       edge_iterator ei;
700 
701       if (bb_gcov_count (bb) < 0)
702 	{
703 	  error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
704 		 bb->index, (int)bb_gcov_count (bb));
705 	  bb_gcov_count (bb) = 0;
706 	}
707       FOR_EACH_EDGE (e, ei, bb->succs)
708 	{
709 	  /* Function may return twice in the cased the called function is
710 	     setjmp or calls fork, but we can't represent this by extra
711 	     edge from the entry, since extra edge from the exit is
712 	     already present.  We get negative frequency from the entry
713 	     point.  */
714 	  if ((edge_gcov_count (e) < 0
715 	       && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
716 	      || (edge_gcov_count (e) > bb_gcov_count (bb)
717 		  && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)))
718 	    {
719 	      if (block_ends_with_call_p (bb))
720 		edge_gcov_count (e) = edge_gcov_count (e) < 0
721 				      ? 0 : bb_gcov_count (bb);
722 	    }
723 	  if (edge_gcov_count (e) < 0
724 	      || edge_gcov_count (e) > bb_gcov_count (bb))
725 	    {
726 	      error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
727 		     e->src->index, e->dest->index,
728 		     (int)edge_gcov_count (e));
729 	      edge_gcov_count (e) = bb_gcov_count (bb) / 2;
730 	    }
731 	}
732       if (bb_gcov_count (bb))
733 	{
734 	  FOR_EACH_EDGE (e, ei, bb->succs)
735 	    e->probability = profile_probability::probability_in_gcov_type
736 		(edge_gcov_count (e), bb_gcov_count (bb));
737 	  if (bb->index >= NUM_FIXED_BLOCKS
738 	      && block_ends_with_condjump_p (bb)
739 	      && EDGE_COUNT (bb->succs) >= 2)
740 	    {
741 	      int prob;
742 	      edge e;
743 	      int index;
744 
745 	      /* Find the branch edge.  It is possible that we do have fake
746 		 edges here.  */
747 	      FOR_EACH_EDGE (e, ei, bb->succs)
748 		if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
749 		  break;
750 
751 	      prob = e->probability.to_reg_br_prob_base ();
752 	      index = prob * 20 / REG_BR_PROB_BASE;
753 
754 	      if (index == 20)
755 		index = 19;
756 	      hist_br_prob[index]++;
757 
758 	      num_branches++;
759 	    }
760 	}
761       /* As a last resort, distribute the probabilities evenly.
762 	 Use simple heuristics that if there are normal edges,
763 	 give all abnormals frequency of 0, otherwise distribute the
764 	 frequency over abnormals (this is the case of noreturn
765 	 calls).  */
766       else if (profile_status_for_fn (cfun) == PROFILE_ABSENT)
767 	{
768 	  int total = 0;
769 
770 	  FOR_EACH_EDGE (e, ei, bb->succs)
771 	    if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
772 	      total ++;
773 	  if (total)
774 	    {
775 	      FOR_EACH_EDGE (e, ei, bb->succs)
776 		if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
777 		  e->probability
778 		    = profile_probability::guessed_always ().apply_scale (1, total);
779 		else
780 		  e->probability = profile_probability::never ();
781 	    }
782 	  else
783 	    {
784 	      total += EDGE_COUNT (bb->succs);
785 	      FOR_EACH_EDGE (e, ei, bb->succs)
786 		e->probability
787 		 = profile_probability::guessed_always ().apply_scale (1, total);
788 	    }
789 	  if (bb->index >= NUM_FIXED_BLOCKS
790 	      && block_ends_with_condjump_p (bb)
791 	      && EDGE_COUNT (bb->succs) >= 2)
792 	    num_branches++;
793 	}
794     }
795 
796   /* If we have real data, use them!  */
797   if (bb_gcov_count (ENTRY_BLOCK_PTR_FOR_FN (cfun))
798       || !flag_guess_branch_prob)
799     FOR_ALL_BB_FN (bb, cfun)
800       bb->count = profile_count::from_gcov_type (bb_gcov_count (bb));
801   /* If function was not trained, preserve local estimates including statically
802      determined zero counts.  */
803   else
804     FOR_ALL_BB_FN (bb, cfun)
805       if (!(bb->count == profile_count::zero ()))
806         bb->count = bb->count.global0 ();
807 
808   bb_gcov_counts.release ();
809   delete edge_gcov_counts;
810   edge_gcov_counts = NULL;
811 
812   update_max_bb_count ();
813 
814   if (dump_file)
815     {
816       fprintf (dump_file, "%d branches\n", num_branches);
817       if (num_branches)
818 	for (i = 0; i < 10; i++)
819 	  fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
820 		   (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
821 		   5 * i, 5 * i + 5);
822 
823       total_num_branches += num_branches;
824       for (i = 0; i < 20; i++)
825 	total_hist_br_prob[i] += hist_br_prob[i];
826 
827       fputc ('\n', dump_file);
828       fputc ('\n', dump_file);
829     }
830 
831   free_aux_for_blocks ();
832 }
833 
834 /* Load value histograms values whose description is stored in VALUES array
835    from .gcda file.
836 
837    CFG_CHECKSUM is the precomputed checksum for the CFG.  */
838 
839 static void
840 compute_value_histograms (histogram_values values, unsigned cfg_checksum,
841                           unsigned lineno_checksum)
842 {
843   unsigned i, j, t, any;
844   unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
845   gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
846   gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
847   gcov_type *aact_count;
848   struct cgraph_node *node;
849 
850   for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
851     n_histogram_counters[t] = 0;
852 
853   for (i = 0; i < values.length (); i++)
854     {
855       histogram_value hist = values[i];
856       n_histogram_counters[(int) hist->type] += hist->n_counters;
857     }
858 
859   any = 0;
860   for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
861     {
862       if (!n_histogram_counters[t])
863 	{
864 	  histogram_counts[t] = NULL;
865 	  continue;
866 	}
867 
868       histogram_counts[t] =
869 	get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
870 			     n_histogram_counters[t], cfg_checksum,
871 			     lineno_checksum, NULL);
872       if (histogram_counts[t])
873 	any = 1;
874       act_count[t] = histogram_counts[t];
875     }
876   if (!any)
877     return;
878 
879   for (i = 0; i < values.length (); i++)
880     {
881       histogram_value hist = values[i];
882       gimple *stmt = hist->hvalue.stmt;
883 
884       t = (int) hist->type;
885 
886       aact_count = act_count[t];
887 
888       if (act_count[t])
889         act_count[t] += hist->n_counters;
890 
891       gimple_add_histogram_value (cfun, stmt, hist);
892       hist->hvalue.counters =  XNEWVEC (gcov_type, hist->n_counters);
893       for (j = 0; j < hist->n_counters; j++)
894         if (aact_count)
895           hist->hvalue.counters[j] = aact_count[j];
896         else
897           hist->hvalue.counters[j] = 0;
898 
899       /* Time profiler counter is not related to any statement,
900          so that we have to read the counter and set the value to
901          the corresponding call graph node.  */
902       if (hist->type == HIST_TYPE_TIME_PROFILE)
903         {
904 	  node = cgraph_node::get (hist->fun->decl);
905 	  node->tp_first_run = hist->hvalue.counters[0];
906 
907           if (dump_file)
908             fprintf (dump_file, "Read tp_first_run: %d\n", node->tp_first_run);
909         }
910     }
911 
912   for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
913     free (histogram_counts[t]);
914 }
915 
916 /* When passed NULL as file_name, initialize.
917    When passed something else, output the necessary commands to change
918    line to LINE and offset to FILE_NAME.  */
919 static void
920 output_location (char const *file_name, int line,
921 		 gcov_position_t *offset, basic_block bb)
922 {
923   static char const *prev_file_name;
924   static int prev_line;
925   bool name_differs, line_differs;
926 
927   if (!file_name)
928     {
929       prev_file_name = NULL;
930       prev_line = -1;
931       return;
932     }
933 
934   name_differs = !prev_file_name || filename_cmp (file_name, prev_file_name);
935   line_differs = prev_line != line;
936 
937   if (!*offset)
938     {
939       *offset = gcov_write_tag (GCOV_TAG_LINES);
940       gcov_write_unsigned (bb->index);
941       name_differs = line_differs = true;
942     }
943 
944   /* If this is a new source file, then output the
945      file's name to the .bb file.  */
946   if (name_differs)
947     {
948       prev_file_name = file_name;
949       gcov_write_unsigned (0);
950       gcov_write_filename (prev_file_name);
951     }
952   if (line_differs)
953     {
954       gcov_write_unsigned (line);
955       prev_line = line;
956     }
957 }
958 
959 /* Helper for qsort so edges get sorted from highest frequency to smallest.
960    This controls the weight for minimal spanning tree algorithm  */
961 static int
962 compare_freqs (const void *p1, const void *p2)
963 {
964   const_edge e1 = *(const const_edge *)p1;
965   const_edge e2 = *(const const_edge *)p2;
966 
967   /* Critical edges needs to be split which introduce extra control flow.
968      Make them more heavy.  */
969   int m1 = EDGE_CRITICAL_P (e1) ? 2 : 1;
970   int m2 = EDGE_CRITICAL_P (e2) ? 2 : 1;
971 
972   if (EDGE_FREQUENCY (e1) * m1 + m1 != EDGE_FREQUENCY (e2) * m2 + m2)
973     return EDGE_FREQUENCY (e2) * m2 + m2 - EDGE_FREQUENCY (e1) * m1 - m1;
974   /* Stabilize sort.  */
975   if (e1->src->index != e2->src->index)
976     return e2->src->index - e1->src->index;
977   return e2->dest->index - e1->dest->index;
978 }
979 
980 /* Only read execution count for thunks.  */
981 
982 void
983 read_thunk_profile (struct cgraph_node *node)
984 {
985   tree old = current_function_decl;
986   current_function_decl = node->decl;
987   gcov_type *counts = get_coverage_counts (GCOV_COUNTER_ARCS, 1, 0, 0, NULL);
988   if (counts)
989     {
990       node->callees->count = node->count
991 	 = profile_count::from_gcov_type (counts[0]);
992       free (counts);
993     }
994   current_function_decl = old;
995   return;
996 }
997 
998 
999 /* Instrument and/or analyze program behavior based on program the CFG.
1000 
1001    This function creates a representation of the control flow graph (of
1002    the function being compiled) that is suitable for the instrumentation
1003    of edges and/or converting measured edge counts to counts on the
1004    complete CFG.
1005 
1006    When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
1007    the flow graph that are needed to reconstruct the dynamic behavior of the
1008    flow graph.  This data is written to the gcno file for gcov.
1009 
1010    When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
1011    information from the gcda file containing edge count information from
1012    previous executions of the function being compiled.  In this case, the
1013    control flow graph is annotated with actual execution counts by
1014    compute_branch_probabilities().
1015 
1016    Main entry point of this file.  */
1017 
1018 void
1019 branch_prob (bool thunk)
1020 {
1021   basic_block bb;
1022   unsigned i;
1023   unsigned num_edges, ignored_edges;
1024   unsigned num_instrumented;
1025   struct edge_list *el;
1026   histogram_values values = histogram_values ();
1027   unsigned cfg_checksum, lineno_checksum;
1028 
1029   total_num_times_called++;
1030 
1031   flow_call_edges_add (NULL);
1032   add_noreturn_fake_exit_edges ();
1033 
1034   if (!thunk)
1035     {
1036       /* We can't handle cyclic regions constructed using abnormal edges.
1037 	 To avoid these we replace every source of abnormal edge by a fake
1038 	 edge from entry node and every destination by fake edge to exit.
1039 	 This keeps graph acyclic and our calculation exact for all normal
1040 	 edges except for exit and entrance ones.
1041 
1042 	 We also add fake exit edges for each call and asm statement in the
1043 	 basic, since it may not return.  */
1044 
1045       FOR_EACH_BB_FN (bb, cfun)
1046 	{
1047 	  int need_exit_edge = 0, need_entry_edge = 0;
1048 	  int have_exit_edge = 0, have_entry_edge = 0;
1049 	  edge e;
1050 	  edge_iterator ei;
1051 
1052 	  /* Functions returning multiple times are not handled by extra edges.
1053 	     Instead we simply allow negative counts on edges from exit to the
1054 	     block past call and corresponding probabilities.  We can't go
1055 	     with the extra edges because that would result in flowgraph that
1056 	     needs to have fake edges outside the spanning tree.  */
1057 
1058 	  FOR_EACH_EDGE (e, ei, bb->succs)
1059 	    {
1060 	      gimple_stmt_iterator gsi;
1061 	      gimple *last = NULL;
1062 
1063 	      /* It may happen that there are compiler generated statements
1064 		 without a locus at all.  Go through the basic block from the
1065 		 last to the first statement looking for a locus.  */
1066 	      for (gsi = gsi_last_nondebug_bb (bb);
1067 		   !gsi_end_p (gsi);
1068 		   gsi_prev_nondebug (&gsi))
1069 		{
1070 		  last = gsi_stmt (gsi);
1071 		  if (!RESERVED_LOCATION_P (gimple_location (last)))
1072 		    break;
1073 		}
1074 
1075 	      /* Edge with goto locus might get wrong coverage info unless
1076 		 it is the only edge out of BB.
1077 		 Don't do that when the locuses match, so
1078 		 if (blah) goto something;
1079 		 is not computed twice.  */
1080 	      if (last
1081 		  && gimple_has_location (last)
1082 		  && !RESERVED_LOCATION_P (e->goto_locus)
1083 		  && !single_succ_p (bb)
1084 		  && (LOCATION_FILE (e->goto_locus)
1085 		      != LOCATION_FILE (gimple_location (last))
1086 		      || (LOCATION_LINE (e->goto_locus)
1087 			  != LOCATION_LINE (gimple_location (last)))))
1088 		{
1089 		  basic_block new_bb = split_edge (e);
1090 		  edge ne = single_succ_edge (new_bb);
1091 		  ne->goto_locus = e->goto_locus;
1092 		}
1093 	      if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1094 		   && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1095 		need_exit_edge = 1;
1096 	      if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1097 		have_exit_edge = 1;
1098 	    }
1099 	  FOR_EACH_EDGE (e, ei, bb->preds)
1100 	    {
1101 	      if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1102 		   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1103 		need_entry_edge = 1;
1104 	      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1105 		have_entry_edge = 1;
1106 	    }
1107 
1108 	  if (need_exit_edge && !have_exit_edge)
1109 	    {
1110 	      if (dump_file)
1111 		fprintf (dump_file, "Adding fake exit edge to bb %i\n",
1112 			 bb->index);
1113 	      make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
1114 	    }
1115 	  if (need_entry_edge && !have_entry_edge)
1116 	    {
1117 	      if (dump_file)
1118 		fprintf (dump_file, "Adding fake entry edge to bb %i\n",
1119 			 bb->index);
1120 	      make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FAKE);
1121 	      /* Avoid bbs that have both fake entry edge and also some
1122 		 exit edge.  One of those edges wouldn't be added to the
1123 		 spanning tree, but we can't instrument any of them.  */
1124 	      if (have_exit_edge || need_exit_edge)
1125 		{
1126 		  gimple_stmt_iterator gsi;
1127 		  gimple *first;
1128 
1129 		  gsi = gsi_start_nondebug_after_labels_bb (bb);
1130 		  gcc_checking_assert (!gsi_end_p (gsi));
1131 		  first = gsi_stmt (gsi);
1132 		  /* Don't split the bbs containing __builtin_setjmp_receiver
1133 		     or ABNORMAL_DISPATCHER calls.  These are very
1134 		     special and don't expect anything to be inserted before
1135 		     them.  */
1136 		  if (is_gimple_call (first)
1137 		      && (gimple_call_builtin_p (first, BUILT_IN_SETJMP_RECEIVER)
1138 			  || (gimple_call_flags (first) & ECF_RETURNS_TWICE)
1139 			  || (gimple_call_internal_p (first)
1140 			      && (gimple_call_internal_fn (first)
1141 				  == IFN_ABNORMAL_DISPATCHER))))
1142 		    continue;
1143 
1144 		  if (dump_file)
1145 		    fprintf (dump_file, "Splitting bb %i after labels\n",
1146 			     bb->index);
1147 		  split_block_after_labels (bb);
1148 		}
1149 	    }
1150 	}
1151     }
1152 
1153   el = create_edge_list ();
1154   num_edges = NUM_EDGES (el);
1155   qsort (el->index_to_edge, num_edges, sizeof (edge), compare_freqs);
1156   alloc_aux_for_edges (sizeof (struct edge_profile_info));
1157 
1158   /* The basic blocks are expected to be numbered sequentially.  */
1159   compact_blocks ();
1160 
1161   ignored_edges = 0;
1162   for (i = 0 ; i < num_edges ; i++)
1163     {
1164       edge e = INDEX_EDGE (el, i);
1165 
1166       /* Mark edges we've replaced by fake edges above as ignored.  */
1167       if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
1168 	  && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1169 	  && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1170 	{
1171 	  EDGE_INFO (e)->ignore = 1;
1172 	  ignored_edges++;
1173 	}
1174     }
1175 
1176   /* Create spanning tree from basic block graph, mark each edge that is
1177      on the spanning tree.  We insert as many abnormal and critical edges
1178      as possible to minimize number of edge splits necessary.  */
1179 
1180   if (!thunk)
1181     find_spanning_tree (el);
1182   else
1183     {
1184       edge e;
1185       edge_iterator ei;
1186       /* Keep only edge from entry block to be instrumented.  */
1187       FOR_EACH_BB_FN (bb, cfun)
1188 	FOR_EACH_EDGE (e, ei, bb->succs)
1189 	  EDGE_INFO (e)->ignore = true;
1190     }
1191 
1192 
1193   /* Fake edges that are not on the tree will not be instrumented, so
1194      mark them ignored.  */
1195   for (num_instrumented = i = 0; i < num_edges; i++)
1196     {
1197       edge e = INDEX_EDGE (el, i);
1198       struct edge_profile_info *inf = EDGE_INFO (e);
1199 
1200       if (inf->ignore || inf->on_tree)
1201 	/*NOP*/;
1202       else if (e->flags & EDGE_FAKE)
1203 	{
1204 	  inf->ignore = 1;
1205 	  ignored_edges++;
1206 	}
1207       else
1208 	num_instrumented++;
1209     }
1210 
1211   total_num_blocks += n_basic_blocks_for_fn (cfun);
1212   if (dump_file)
1213     fprintf (dump_file, "%d basic blocks\n", n_basic_blocks_for_fn (cfun));
1214 
1215   total_num_edges += num_edges;
1216   if (dump_file)
1217     fprintf (dump_file, "%d edges\n", num_edges);
1218 
1219   total_num_edges_ignored += ignored_edges;
1220   if (dump_file)
1221     fprintf (dump_file, "%d ignored edges\n", ignored_edges);
1222 
1223   total_num_edges_instrumented += num_instrumented;
1224   if (dump_file)
1225     fprintf (dump_file, "%d instrumentation edges\n", num_instrumented);
1226 
1227   /* Compute two different checksums. Note that we want to compute
1228      the checksum in only once place, since it depends on the shape
1229      of the control flow which can change during
1230      various transformations.  */
1231   if (thunk)
1232     {
1233       /* At stream in time we do not have CFG, so we can not do checksums.  */
1234       cfg_checksum = 0;
1235       lineno_checksum = 0;
1236     }
1237   else
1238     {
1239       cfg_checksum = coverage_compute_cfg_checksum (cfun);
1240       lineno_checksum = coverage_compute_lineno_checksum ();
1241     }
1242 
1243   /* Write the data from which gcov can reconstruct the basic block
1244      graph and function line numbers (the gcno file).  */
1245   if (coverage_begin_function (lineno_checksum, cfg_checksum))
1246     {
1247       gcov_position_t offset;
1248 
1249       /* Basic block flags */
1250       offset = gcov_write_tag (GCOV_TAG_BLOCKS);
1251       gcov_write_unsigned (n_basic_blocks_for_fn (cfun));
1252       gcov_write_length (offset);
1253 
1254       /* Arcs */
1255       FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
1256 		      EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1257 	{
1258 	  edge e;
1259 	  edge_iterator ei;
1260 
1261 	  offset = gcov_write_tag (GCOV_TAG_ARCS);
1262 	  gcov_write_unsigned (bb->index);
1263 
1264 	  FOR_EACH_EDGE (e, ei, bb->succs)
1265 	    {
1266 	      struct edge_profile_info *i = EDGE_INFO (e);
1267 	      if (!i->ignore)
1268 		{
1269 		  unsigned flag_bits = 0;
1270 
1271 		  if (i->on_tree)
1272 		    flag_bits |= GCOV_ARC_ON_TREE;
1273 		  if (e->flags & EDGE_FAKE)
1274 		    flag_bits |= GCOV_ARC_FAKE;
1275 		  if (e->flags & EDGE_FALLTHRU)
1276 		    flag_bits |= GCOV_ARC_FALLTHROUGH;
1277 		  /* On trees we don't have fallthru flags, but we can
1278 		     recompute them from CFG shape.  */
1279 		  if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
1280 		      && e->src->next_bb == e->dest)
1281 		    flag_bits |= GCOV_ARC_FALLTHROUGH;
1282 
1283 		  gcov_write_unsigned (e->dest->index);
1284 		  gcov_write_unsigned (flag_bits);
1285 	        }
1286 	    }
1287 
1288 	  gcov_write_length (offset);
1289 	}
1290 
1291       /* Line numbers.  */
1292       /* Initialize the output.  */
1293       output_location (NULL, 0, NULL, NULL);
1294 
1295       FOR_EACH_BB_FN (bb, cfun)
1296 	{
1297 	  gimple_stmt_iterator gsi;
1298 	  gcov_position_t offset = 0;
1299 
1300 	  if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
1301 	    {
1302 	      expanded_location curr_location =
1303 		expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1304 	      output_location (curr_location.file, curr_location.line,
1305 			       &offset, bb);
1306 	    }
1307 
1308 	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1309 	    {
1310 	      gimple *stmt = gsi_stmt (gsi);
1311 	      if (!RESERVED_LOCATION_P (gimple_location (stmt)))
1312 		output_location (gimple_filename (stmt), gimple_lineno (stmt),
1313 				 &offset, bb);
1314 	    }
1315 
1316 	  /* Notice GOTO expressions eliminated while constructing the CFG.  */
1317 	  if (single_succ_p (bb)
1318 	      && !RESERVED_LOCATION_P (single_succ_edge (bb)->goto_locus))
1319 	    {
1320 	      expanded_location curr_location
1321 		= expand_location (single_succ_edge (bb)->goto_locus);
1322 	      output_location (curr_location.file, curr_location.line,
1323 			       &offset, bb);
1324 	    }
1325 
1326 	  if (offset)
1327 	    {
1328 	      /* A file of NULL indicates the end of run.  */
1329 	      gcov_write_unsigned (0);
1330 	      gcov_write_string (NULL);
1331 	      gcov_write_length (offset);
1332 	    }
1333 	}
1334     }
1335 
1336   if (flag_profile_values)
1337     gimple_find_values_to_profile (&values);
1338 
1339   if (flag_branch_probabilities)
1340     {
1341       compute_branch_probabilities (cfg_checksum, lineno_checksum);
1342       if (flag_profile_values)
1343 	compute_value_histograms (values, cfg_checksum, lineno_checksum);
1344     }
1345 
1346   remove_fake_edges ();
1347 
1348   /* For each edge not on the spanning tree, add counting code.  */
1349   if (profile_arc_flag
1350       && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1351     {
1352       unsigned n_instrumented;
1353 
1354       gimple_init_gcov_profiler ();
1355 
1356       n_instrumented = instrument_edges (el);
1357 
1358       gcc_assert (n_instrumented == num_instrumented);
1359 
1360       if (flag_profile_values)
1361 	instrument_values (values);
1362 
1363       /* Commit changes done by instrumentation.  */
1364       gsi_commit_edge_inserts ();
1365     }
1366 
1367   free_aux_for_edges ();
1368 
1369   values.release ();
1370   free_edge_list (el);
1371   coverage_end_function (lineno_checksum, cfg_checksum);
1372   if (flag_branch_probabilities && profile_info)
1373     {
1374       struct loop *loop;
1375       if (dump_file && (dump_flags & TDF_DETAILS))
1376 	report_predictor_hitrates ();
1377       profile_status_for_fn (cfun) = PROFILE_READ;
1378 
1379       /* At this moment we have precise loop iteration count estimates.
1380 	 Record them to loop structure before the profile gets out of date. */
1381       FOR_EACH_LOOP (loop, 0)
1382 	if (loop->header->count > 0)
1383 	  {
1384 	    gcov_type nit = expected_loop_iterations_unbounded (loop);
1385 	    widest_int bound = gcov_type_to_wide_int (nit);
1386 	    loop->any_estimate = false;
1387 	    record_niter_bound (loop, bound, true, false);
1388 	  }
1389       compute_function_frequency ();
1390     }
1391 }
1392 
1393 /* Union find algorithm implementation for the basic blocks using
1394    aux fields.  */
1395 
1396 static basic_block
1397 find_group (basic_block bb)
1398 {
1399   basic_block group = bb, bb1;
1400 
1401   while ((basic_block) group->aux != group)
1402     group = (basic_block) group->aux;
1403 
1404   /* Compress path.  */
1405   while ((basic_block) bb->aux != group)
1406     {
1407       bb1 = (basic_block) bb->aux;
1408       bb->aux = (void *) group;
1409       bb = bb1;
1410     }
1411   return group;
1412 }
1413 
1414 static void
1415 union_groups (basic_block bb1, basic_block bb2)
1416 {
1417   basic_block bb1g = find_group (bb1);
1418   basic_block bb2g = find_group (bb2);
1419 
1420   /* ??? I don't have a place for the rank field.  OK.  Lets go w/o it,
1421      this code is unlikely going to be performance problem anyway.  */
1422   gcc_assert (bb1g != bb2g);
1423 
1424   bb1g->aux = bb2g;
1425 }
1426 
1427 /* This function searches all of the edges in the program flow graph, and puts
1428    as many bad edges as possible onto the spanning tree.  Bad edges include
1429    abnormals edges, which can't be instrumented at the moment.  Since it is
1430    possible for fake edges to form a cycle, we will have to develop some
1431    better way in the future.  Also put critical edges to the tree, since they
1432    are more expensive to instrument.  */
1433 
1434 static void
1435 find_spanning_tree (struct edge_list *el)
1436 {
1437   int i;
1438   int num_edges = NUM_EDGES (el);
1439   basic_block bb;
1440 
1441   /* We use aux field for standard union-find algorithm.  */
1442   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
1443     bb->aux = bb;
1444 
1445   /* Add fake edge exit to entry we can't instrument.  */
1446   union_groups (EXIT_BLOCK_PTR_FOR_FN (cfun), ENTRY_BLOCK_PTR_FOR_FN (cfun));
1447 
1448   /* First add all abnormal edges to the tree unless they form a cycle. Also
1449      add all edges to the exit block to avoid inserting profiling code behind
1450      setting return value from function.  */
1451   for (i = 0; i < num_edges; i++)
1452     {
1453       edge e = INDEX_EDGE (el, i);
1454       if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1455 	   || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1456 	  && !EDGE_INFO (e)->ignore
1457 	  && (find_group (e->src) != find_group (e->dest)))
1458 	{
1459 	  if (dump_file)
1460 	    fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1461 		     e->src->index, e->dest->index);
1462 	  EDGE_INFO (e)->on_tree = 1;
1463 	  union_groups (e->src, e->dest);
1464 	}
1465     }
1466 
1467   /* And now the rest.  Edge list is sorted according to frequencies and
1468      thus we will produce minimal spanning tree.  */
1469   for (i = 0; i < num_edges; i++)
1470     {
1471       edge e = INDEX_EDGE (el, i);
1472       if (!EDGE_INFO (e)->ignore
1473 	  && find_group (e->src) != find_group (e->dest))
1474 	{
1475 	  if (dump_file)
1476 	    fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1477 		     e->src->index, e->dest->index);
1478 	  EDGE_INFO (e)->on_tree = 1;
1479 	  union_groups (e->src, e->dest);
1480 	}
1481     }
1482 
1483   clear_aux_for_blocks ();
1484 }
1485 
1486 /* Perform file-level initialization for branch-prob processing.  */
1487 
1488 void
1489 init_branch_prob (void)
1490 {
1491   int i;
1492 
1493   total_num_blocks = 0;
1494   total_num_edges = 0;
1495   total_num_edges_ignored = 0;
1496   total_num_edges_instrumented = 0;
1497   total_num_blocks_created = 0;
1498   total_num_passes = 0;
1499   total_num_times_called = 0;
1500   total_num_branches = 0;
1501   for (i = 0; i < 20; i++)
1502     total_hist_br_prob[i] = 0;
1503 }
1504 
1505 /* Performs file-level cleanup after branch-prob processing
1506    is completed.  */
1507 
1508 void
1509 end_branch_prob (void)
1510 {
1511   if (dump_file)
1512     {
1513       fprintf (dump_file, "\n");
1514       fprintf (dump_file, "Total number of blocks: %d\n",
1515 	       total_num_blocks);
1516       fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1517       fprintf (dump_file, "Total number of ignored edges: %d\n",
1518 	       total_num_edges_ignored);
1519       fprintf (dump_file, "Total number of instrumented edges: %d\n",
1520 	       total_num_edges_instrumented);
1521       fprintf (dump_file, "Total number of blocks created: %d\n",
1522 	       total_num_blocks_created);
1523       fprintf (dump_file, "Total number of graph solution passes: %d\n",
1524 	       total_num_passes);
1525       if (total_num_times_called != 0)
1526 	fprintf (dump_file, "Average number of graph solution passes: %d\n",
1527 		 (total_num_passes + (total_num_times_called  >> 1))
1528 		 / total_num_times_called);
1529       fprintf (dump_file, "Total number of branches: %d\n",
1530 	       total_num_branches);
1531       if (total_num_branches)
1532 	{
1533 	  int i;
1534 
1535 	  for (i = 0; i < 10; i++)
1536 	    fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1537 		     (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1538 		     / total_num_branches, 5*i, 5*i+5);
1539 	}
1540     }
1541 }
1542