138fd1498Szrj /* Calculate branch probabilities, and basic block execution counts.
238fd1498Szrj Copyright (C) 1990-2018 Free Software Foundation, Inc.
338fd1498Szrj Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
438fd1498Szrj based on some ideas from Dain Samples of UC Berkeley.
538fd1498Szrj Further mangling by Bob Manson, Cygnus Support.
638fd1498Szrj
738fd1498Szrj This file is part of GCC.
838fd1498Szrj
938fd1498Szrj GCC is free software; you can redistribute it and/or modify it under
1038fd1498Szrj the terms of the GNU General Public License as published by the Free
1138fd1498Szrj Software Foundation; either version 3, or (at your option) any later
1238fd1498Szrj version.
1338fd1498Szrj
1438fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT ANY
1538fd1498Szrj WARRANTY; without even the implied warranty of MERCHANTABILITY or
1638fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
1738fd1498Szrj for more details.
1838fd1498Szrj
1938fd1498Szrj You should have received a copy of the GNU General Public License
2038fd1498Szrj along with GCC; see the file COPYING3. If not see
2138fd1498Szrj <http://www.gnu.org/licenses/>. */
2238fd1498Szrj
2338fd1498Szrj /* Generate basic block profile instrumentation and auxiliary files.
2438fd1498Szrj Profile generation is optimized, so that not all arcs in the basic
2538fd1498Szrj block graph need instrumenting. First, the BB graph is closed with
2638fd1498Szrj one entry (function start), and one exit (function exit). Any
2738fd1498Szrj ABNORMAL_EDGE cannot be instrumented (because there is no control
2838fd1498Szrj path to place the code). We close the graph by inserting fake
2938fd1498Szrj EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
3038fd1498Szrj edges that do not go to the exit_block. We ignore such abnormal
3138fd1498Szrj edges. Naturally these fake edges are never directly traversed,
3238fd1498Szrj and so *cannot* be directly instrumented. Some other graph
3338fd1498Szrj massaging is done. To optimize the instrumentation we generate the
3438fd1498Szrj BB minimal span tree, only edges that are not on the span tree
3538fd1498Szrj (plus the entry point) need instrumenting. From that information
3638fd1498Szrj all other edge counts can be deduced. By construction all fake
3738fd1498Szrj edges must be on the spanning tree. We also attempt to place
3838fd1498Szrj EDGE_CRITICAL edges on the spanning tree.
3938fd1498Szrj
4038fd1498Szrj The auxiliary files generated are <dumpbase>.gcno (at compile time)
4138fd1498Szrj and <dumpbase>.gcda (at run time). The format is
4238fd1498Szrj described in full in gcov-io.h. */
4338fd1498Szrj
4438fd1498Szrj /* ??? Register allocation should use basic block execution counts to
4538fd1498Szrj give preference to the most commonly executed blocks. */
4638fd1498Szrj
4738fd1498Szrj /* ??? Should calculate branch probabilities before instrumenting code, since
4838fd1498Szrj then we can use arc counts to help decide which arcs to instrument. */
4938fd1498Szrj
5038fd1498Szrj #include "config.h"
5138fd1498Szrj #include "system.h"
5238fd1498Szrj #include "coretypes.h"
5338fd1498Szrj #include "backend.h"
5438fd1498Szrj #include "rtl.h"
5538fd1498Szrj #include "tree.h"
5638fd1498Szrj #include "gimple.h"
5738fd1498Szrj #include "cfghooks.h"
5838fd1498Szrj #include "cgraph.h"
5938fd1498Szrj #include "coverage.h"
6038fd1498Szrj #include "diagnostic-core.h"
6138fd1498Szrj #include "cfganal.h"
6238fd1498Szrj #include "value-prof.h"
6338fd1498Szrj #include "gimple-iterator.h"
6438fd1498Szrj #include "tree-cfg.h"
6538fd1498Szrj #include "dumpfile.h"
6638fd1498Szrj #include "cfgloop.h"
6738fd1498Szrj
6838fd1498Szrj #include "profile.h"
6938fd1498Szrj
7038fd1498Szrj /* Map from BBs/edges to gcov counters. */
7138fd1498Szrj vec<gcov_type> bb_gcov_counts;
7238fd1498Szrj hash_map<edge,gcov_type> *edge_gcov_counts;
7338fd1498Szrj
7438fd1498Szrj struct bb_profile_info {
7538fd1498Szrj unsigned int count_valid : 1;
7638fd1498Szrj
7738fd1498Szrj /* Number of successor and predecessor edges. */
7838fd1498Szrj gcov_type succ_count;
7938fd1498Szrj gcov_type pred_count;
8038fd1498Szrj };
8138fd1498Szrj
8238fd1498Szrj #define BB_INFO(b) ((struct bb_profile_info *) (b)->aux)
8338fd1498Szrj
8438fd1498Szrj
8538fd1498Szrj /* Counter summary from the last set of coverage counts read. */
8638fd1498Szrj
8738fd1498Szrj const struct gcov_ctr_summary *profile_info;
8838fd1498Szrj
8938fd1498Szrj /* Counter working set information computed from the current counter
9038fd1498Szrj summary. Not initialized unless profile_info summary is non-NULL. */
9138fd1498Szrj static gcov_working_set_t gcov_working_sets[NUM_GCOV_WORKING_SETS];
9238fd1498Szrj
9338fd1498Szrj /* Collect statistics on the performance of this pass for the entire source
9438fd1498Szrj file. */
9538fd1498Szrj
9638fd1498Szrj static int total_num_blocks;
9738fd1498Szrj static int total_num_edges;
9838fd1498Szrj static int total_num_edges_ignored;
9938fd1498Szrj static int total_num_edges_instrumented;
10038fd1498Szrj static int total_num_blocks_created;
10138fd1498Szrj static int total_num_passes;
10238fd1498Szrj static int total_num_times_called;
10338fd1498Szrj static int total_hist_br_prob[20];
10438fd1498Szrj static int total_num_branches;
10538fd1498Szrj
10638fd1498Szrj /* Helper function to update gcov_working_sets. */
10738fd1498Szrj
add_working_set(gcov_working_set_t * set)10838fd1498Szrj void add_working_set (gcov_working_set_t *set) {
10938fd1498Szrj int i = 0;
11038fd1498Szrj for (; i < NUM_GCOV_WORKING_SETS; i++)
11138fd1498Szrj gcov_working_sets[i] = set[i];
11238fd1498Szrj }
11338fd1498Szrj
11438fd1498Szrj /* Forward declarations. */
11538fd1498Szrj static void find_spanning_tree (struct edge_list *);
11638fd1498Szrj
11738fd1498Szrj /* Add edge instrumentation code to the entire insn chain.
11838fd1498Szrj
11938fd1498Szrj F is the first insn of the chain.
12038fd1498Szrj NUM_BLOCKS is the number of basic blocks found in F. */
12138fd1498Szrj
12238fd1498Szrj static unsigned
instrument_edges(struct edge_list * el)12338fd1498Szrj instrument_edges (struct edge_list *el)
12438fd1498Szrj {
12538fd1498Szrj unsigned num_instr_edges = 0;
12638fd1498Szrj int num_edges = NUM_EDGES (el);
12738fd1498Szrj basic_block bb;
12838fd1498Szrj
12938fd1498Szrj FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
13038fd1498Szrj {
13138fd1498Szrj edge e;
13238fd1498Szrj edge_iterator ei;
13338fd1498Szrj
13438fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
13538fd1498Szrj {
13638fd1498Szrj struct edge_profile_info *inf = EDGE_INFO (e);
13738fd1498Szrj
13838fd1498Szrj if (!inf->ignore && !inf->on_tree)
13938fd1498Szrj {
14038fd1498Szrj gcc_assert (!(e->flags & EDGE_ABNORMAL));
14138fd1498Szrj if (dump_file)
14238fd1498Szrj fprintf (dump_file, "Edge %d to %d instrumented%s\n",
14338fd1498Szrj e->src->index, e->dest->index,
14438fd1498Szrj EDGE_CRITICAL_P (e) ? " (and split)" : "");
14538fd1498Szrj gimple_gen_edge_profiler (num_instr_edges++, e);
14638fd1498Szrj }
14738fd1498Szrj }
14838fd1498Szrj }
14938fd1498Szrj
15038fd1498Szrj total_num_blocks_created += num_edges;
15138fd1498Szrj if (dump_file)
15238fd1498Szrj fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
15338fd1498Szrj return num_instr_edges;
15438fd1498Szrj }
15538fd1498Szrj
15638fd1498Szrj /* Add code to measure histograms for values in list VALUES. */
15738fd1498Szrj static void
instrument_values(histogram_values values)15838fd1498Szrj instrument_values (histogram_values values)
15938fd1498Szrj {
16038fd1498Szrj unsigned i;
16138fd1498Szrj
16238fd1498Szrj /* Emit code to generate the histograms before the insns. */
16338fd1498Szrj
16438fd1498Szrj for (i = 0; i < values.length (); i++)
16538fd1498Szrj {
16638fd1498Szrj histogram_value hist = values[i];
16738fd1498Szrj unsigned t = COUNTER_FOR_HIST_TYPE (hist->type);
16838fd1498Szrj
16938fd1498Szrj if (!coverage_counter_alloc (t, hist->n_counters))
17038fd1498Szrj continue;
17138fd1498Szrj
17238fd1498Szrj switch (hist->type)
17338fd1498Szrj {
17438fd1498Szrj case HIST_TYPE_INTERVAL:
17538fd1498Szrj gimple_gen_interval_profiler (hist, t, 0);
17638fd1498Szrj break;
17738fd1498Szrj
17838fd1498Szrj case HIST_TYPE_POW2:
17938fd1498Szrj gimple_gen_pow2_profiler (hist, t, 0);
18038fd1498Szrj break;
18138fd1498Szrj
18238fd1498Szrj case HIST_TYPE_SINGLE_VALUE:
18338fd1498Szrj gimple_gen_one_value_profiler (hist, t, 0);
18438fd1498Szrj break;
18538fd1498Szrj
18638fd1498Szrj case HIST_TYPE_INDIR_CALL:
18738fd1498Szrj case HIST_TYPE_INDIR_CALL_TOPN:
18838fd1498Szrj gimple_gen_ic_profiler (hist, t, 0);
18938fd1498Szrj break;
19038fd1498Szrj
19138fd1498Szrj case HIST_TYPE_AVERAGE:
19238fd1498Szrj gimple_gen_average_profiler (hist, t, 0);
19338fd1498Szrj break;
19438fd1498Szrj
19538fd1498Szrj case HIST_TYPE_IOR:
19638fd1498Szrj gimple_gen_ior_profiler (hist, t, 0);
19738fd1498Szrj break;
19838fd1498Szrj
19938fd1498Szrj case HIST_TYPE_TIME_PROFILE:
20038fd1498Szrj gimple_gen_time_profiler (t, 0);
20138fd1498Szrj break;
20238fd1498Szrj
20338fd1498Szrj default:
20438fd1498Szrj gcc_unreachable ();
20538fd1498Szrj }
20638fd1498Szrj }
20738fd1498Szrj }
20838fd1498Szrj
20938fd1498Szrj
21038fd1498Szrj /* Fill the working set information into the profile_info structure. */
21138fd1498Szrj
21238fd1498Szrj void
get_working_sets(void)21338fd1498Szrj get_working_sets (void)
21438fd1498Szrj {
21538fd1498Szrj unsigned ws_ix, pctinc, pct;
21638fd1498Szrj gcov_working_set_t *ws_info;
21738fd1498Szrj
21838fd1498Szrj if (!profile_info)
21938fd1498Szrj return;
22038fd1498Szrj
22138fd1498Szrj compute_working_sets (profile_info, gcov_working_sets);
22238fd1498Szrj
22338fd1498Szrj if (dump_file)
22438fd1498Szrj {
22538fd1498Szrj fprintf (dump_file, "Counter working sets:\n");
22638fd1498Szrj /* Multiply the percentage by 100 to avoid float. */
22738fd1498Szrj pctinc = 100 * 100 / NUM_GCOV_WORKING_SETS;
22838fd1498Szrj for (ws_ix = 0, pct = pctinc; ws_ix < NUM_GCOV_WORKING_SETS;
22938fd1498Szrj ws_ix++, pct += pctinc)
23038fd1498Szrj {
23138fd1498Szrj if (ws_ix == NUM_GCOV_WORKING_SETS - 1)
23238fd1498Szrj pct = 9990;
23338fd1498Szrj ws_info = &gcov_working_sets[ws_ix];
23438fd1498Szrj /* Print out the percentage using int arithmatic to avoid float. */
23538fd1498Szrj fprintf (dump_file, "\t\t%u.%02u%%: num counts=%u, min counter="
23638fd1498Szrj "%" PRId64 "\n",
23738fd1498Szrj pct / 100, pct - (pct / 100 * 100),
23838fd1498Szrj ws_info->num_counters,
23938fd1498Szrj (int64_t)ws_info->min_counter);
24038fd1498Szrj }
24138fd1498Szrj }
24238fd1498Szrj }
24338fd1498Szrj
24438fd1498Szrj /* Given a the desired percentage of the full profile (sum_all from the
24538fd1498Szrj summary), multiplied by 10 to avoid float in PCT_TIMES_10, returns
24638fd1498Szrj the corresponding working set information. If an exact match for
24738fd1498Szrj the percentage isn't found, the closest value is used. */
24838fd1498Szrj
24938fd1498Szrj gcov_working_set_t *
find_working_set(unsigned pct_times_10)25038fd1498Szrj find_working_set (unsigned pct_times_10)
25138fd1498Szrj {
25238fd1498Szrj unsigned i;
25338fd1498Szrj if (!profile_info)
25438fd1498Szrj return NULL;
25538fd1498Szrj gcc_assert (pct_times_10 <= 1000);
25638fd1498Szrj if (pct_times_10 >= 999)
25738fd1498Szrj return &gcov_working_sets[NUM_GCOV_WORKING_SETS - 1];
25838fd1498Szrj i = pct_times_10 * NUM_GCOV_WORKING_SETS / 1000;
25938fd1498Szrj if (!i)
26038fd1498Szrj return &gcov_working_sets[0];
26138fd1498Szrj return &gcov_working_sets[i - 1];
26238fd1498Szrj }
26338fd1498Szrj
26438fd1498Szrj /* Computes hybrid profile for all matching entries in da_file.
26538fd1498Szrj
26638fd1498Szrj CFG_CHECKSUM is the precomputed checksum for the CFG. */
26738fd1498Szrj
26838fd1498Szrj static gcov_type *
get_exec_counts(unsigned cfg_checksum,unsigned lineno_checksum)26938fd1498Szrj get_exec_counts (unsigned cfg_checksum, unsigned lineno_checksum)
27038fd1498Szrj {
27138fd1498Szrj unsigned num_edges = 0;
27238fd1498Szrj basic_block bb;
27338fd1498Szrj gcov_type *counts;
27438fd1498Szrj
27538fd1498Szrj /* Count the edges to be (possibly) instrumented. */
27638fd1498Szrj FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
27738fd1498Szrj {
27838fd1498Szrj edge e;
27938fd1498Szrj edge_iterator ei;
28038fd1498Szrj
28138fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
28238fd1498Szrj if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
28338fd1498Szrj num_edges++;
28438fd1498Szrj }
28538fd1498Szrj
28638fd1498Szrj counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, cfg_checksum,
28738fd1498Szrj lineno_checksum, &profile_info);
28838fd1498Szrj if (!counts)
28938fd1498Szrj return NULL;
29038fd1498Szrj
29138fd1498Szrj get_working_sets ();
29238fd1498Szrj
29338fd1498Szrj if (dump_file && profile_info)
29438fd1498Szrj fprintf (dump_file, "Merged %u profiles with maximal count %u.\n",
29538fd1498Szrj profile_info->runs, (unsigned) profile_info->sum_max);
29638fd1498Szrj
29738fd1498Szrj return counts;
29838fd1498Szrj }
29938fd1498Szrj
30038fd1498Szrj
30138fd1498Szrj static bool
is_edge_inconsistent(vec<edge,va_gc> * edges)30238fd1498Szrj is_edge_inconsistent (vec<edge, va_gc> *edges)
30338fd1498Szrj {
30438fd1498Szrj edge e;
30538fd1498Szrj edge_iterator ei;
30638fd1498Szrj FOR_EACH_EDGE (e, ei, edges)
30738fd1498Szrj {
30838fd1498Szrj if (!EDGE_INFO (e)->ignore)
30938fd1498Szrj {
31038fd1498Szrj if (edge_gcov_count (e) < 0
31138fd1498Szrj && (!(e->flags & EDGE_FAKE)
31238fd1498Szrj || !block_ends_with_call_p (e->src)))
31338fd1498Szrj {
31438fd1498Szrj if (dump_file)
31538fd1498Szrj {
31638fd1498Szrj fprintf (dump_file,
31738fd1498Szrj "Edge %i->%i is inconsistent, count%" PRId64,
31838fd1498Szrj e->src->index, e->dest->index, edge_gcov_count (e));
31938fd1498Szrj dump_bb (dump_file, e->src, 0, TDF_DETAILS);
32038fd1498Szrj dump_bb (dump_file, e->dest, 0, TDF_DETAILS);
32138fd1498Szrj }
32238fd1498Szrj return true;
32338fd1498Szrj }
32438fd1498Szrj }
32538fd1498Szrj }
32638fd1498Szrj return false;
32738fd1498Szrj }
32838fd1498Szrj
32938fd1498Szrj static void
correct_negative_edge_counts(void)33038fd1498Szrj correct_negative_edge_counts (void)
33138fd1498Szrj {
33238fd1498Szrj basic_block bb;
33338fd1498Szrj edge e;
33438fd1498Szrj edge_iterator ei;
33538fd1498Szrj
33638fd1498Szrj FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
33738fd1498Szrj {
33838fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
33938fd1498Szrj {
34038fd1498Szrj if (edge_gcov_count (e) < 0)
34138fd1498Szrj edge_gcov_count (e) = 0;
34238fd1498Szrj }
34338fd1498Szrj }
34438fd1498Szrj }
34538fd1498Szrj
34638fd1498Szrj /* Check consistency.
34738fd1498Szrj Return true if inconsistency is found. */
34838fd1498Szrj static bool
is_inconsistent(void)34938fd1498Szrj is_inconsistent (void)
35038fd1498Szrj {
35138fd1498Szrj basic_block bb;
35238fd1498Szrj bool inconsistent = false;
35338fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
35438fd1498Szrj {
35538fd1498Szrj inconsistent |= is_edge_inconsistent (bb->preds);
35638fd1498Szrj if (!dump_file && inconsistent)
35738fd1498Szrj return true;
35838fd1498Szrj inconsistent |= is_edge_inconsistent (bb->succs);
35938fd1498Szrj if (!dump_file && inconsistent)
36038fd1498Szrj return true;
36138fd1498Szrj if (bb_gcov_count (bb) < 0)
36238fd1498Szrj {
36338fd1498Szrj if (dump_file)
36438fd1498Szrj {
36538fd1498Szrj fprintf (dump_file, "BB %i count is negative "
36638fd1498Szrj "%" PRId64,
36738fd1498Szrj bb->index,
36838fd1498Szrj bb_gcov_count (bb));
36938fd1498Szrj dump_bb (dump_file, bb, 0, TDF_DETAILS);
37038fd1498Szrj }
37138fd1498Szrj inconsistent = true;
37238fd1498Szrj }
37338fd1498Szrj if (bb_gcov_count (bb) != sum_edge_counts (bb->preds))
37438fd1498Szrj {
37538fd1498Szrj if (dump_file)
37638fd1498Szrj {
37738fd1498Szrj fprintf (dump_file, "BB %i count does not match sum of incoming edges "
37838fd1498Szrj "%" PRId64" should be %" PRId64,
37938fd1498Szrj bb->index,
38038fd1498Szrj bb_gcov_count (bb),
38138fd1498Szrj sum_edge_counts (bb->preds));
38238fd1498Szrj dump_bb (dump_file, bb, 0, TDF_DETAILS);
38338fd1498Szrj }
38438fd1498Szrj inconsistent = true;
38538fd1498Szrj }
38638fd1498Szrj if (bb_gcov_count (bb) != sum_edge_counts (bb->succs) &&
38738fd1498Szrj ! (find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun)) != NULL
38838fd1498Szrj && block_ends_with_call_p (bb)))
38938fd1498Szrj {
39038fd1498Szrj if (dump_file)
39138fd1498Szrj {
39238fd1498Szrj fprintf (dump_file, "BB %i count does not match sum of outgoing edges "
39338fd1498Szrj "%" PRId64" should be %" PRId64,
39438fd1498Szrj bb->index,
39538fd1498Szrj bb_gcov_count (bb),
39638fd1498Szrj sum_edge_counts (bb->succs));
39738fd1498Szrj dump_bb (dump_file, bb, 0, TDF_DETAILS);
39838fd1498Szrj }
39938fd1498Szrj inconsistent = true;
40038fd1498Szrj }
40138fd1498Szrj if (!dump_file && inconsistent)
40238fd1498Szrj return true;
40338fd1498Szrj }
40438fd1498Szrj
40538fd1498Szrj return inconsistent;
40638fd1498Szrj }
40738fd1498Szrj
40838fd1498Szrj /* Set each basic block count to the sum of its outgoing edge counts */
40938fd1498Szrj static void
set_bb_counts(void)41038fd1498Szrj set_bb_counts (void)
41138fd1498Szrj {
41238fd1498Szrj basic_block bb;
41338fd1498Szrj FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
41438fd1498Szrj {
41538fd1498Szrj bb_gcov_count (bb) = sum_edge_counts (bb->succs);
41638fd1498Szrj gcc_assert (bb_gcov_count (bb) >= 0);
41738fd1498Szrj }
41838fd1498Szrj }
41938fd1498Szrj
42038fd1498Szrj /* Reads profile data and returns total number of edge counts read */
42138fd1498Szrj static int
read_profile_edge_counts(gcov_type * exec_counts)42238fd1498Szrj read_profile_edge_counts (gcov_type *exec_counts)
42338fd1498Szrj {
42438fd1498Szrj basic_block bb;
42538fd1498Szrj int num_edges = 0;
42638fd1498Szrj int exec_counts_pos = 0;
42738fd1498Szrj /* For each edge not on the spanning tree, set its execution count from
42838fd1498Szrj the .da file. */
42938fd1498Szrj /* The first count in the .da file is the number of times that the function
43038fd1498Szrj was entered. This is the exec_count for block zero. */
43138fd1498Szrj
43238fd1498Szrj FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
43338fd1498Szrj {
43438fd1498Szrj edge e;
43538fd1498Szrj edge_iterator ei;
43638fd1498Szrj
43738fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
43838fd1498Szrj if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
43938fd1498Szrj {
44038fd1498Szrj num_edges++;
44138fd1498Szrj if (exec_counts)
44238fd1498Szrj {
44338fd1498Szrj edge_gcov_count (e) = exec_counts[exec_counts_pos++];
44438fd1498Szrj if (edge_gcov_count (e) > profile_info->sum_max)
44538fd1498Szrj {
44638fd1498Szrj if (flag_profile_correction)
44738fd1498Szrj {
44838fd1498Szrj static bool informed = 0;
44938fd1498Szrj if (dump_enabled_p () && !informed)
45038fd1498Szrj dump_printf_loc (MSG_NOTE, input_location,
45138fd1498Szrj "corrupted profile info: edge count"
45238fd1498Szrj " exceeds maximal count\n");
45338fd1498Szrj informed = 1;
45438fd1498Szrj }
45538fd1498Szrj else
45638fd1498Szrj error ("corrupted profile info: edge from %i to %i exceeds maximal count",
45738fd1498Szrj bb->index, e->dest->index);
45838fd1498Szrj }
45938fd1498Szrj }
46038fd1498Szrj else
46138fd1498Szrj edge_gcov_count (e) = 0;
46238fd1498Szrj
46338fd1498Szrj EDGE_INFO (e)->count_valid = 1;
46438fd1498Szrj BB_INFO (bb)->succ_count--;
46538fd1498Szrj BB_INFO (e->dest)->pred_count--;
46638fd1498Szrj if (dump_file)
46738fd1498Szrj {
46838fd1498Szrj fprintf (dump_file, "\nRead edge from %i to %i, count:",
46938fd1498Szrj bb->index, e->dest->index);
47038fd1498Szrj fprintf (dump_file, "%" PRId64,
47138fd1498Szrj (int64_t) edge_gcov_count (e));
47238fd1498Szrj }
47338fd1498Szrj }
47438fd1498Szrj }
47538fd1498Szrj
47638fd1498Szrj return num_edges;
47738fd1498Szrj }
47838fd1498Szrj
47938fd1498Szrj
48038fd1498Szrj /* Compute the branch probabilities for the various branches.
48138fd1498Szrj Annotate them accordingly.
48238fd1498Szrj
48338fd1498Szrj CFG_CHECKSUM is the precomputed checksum for the CFG. */
48438fd1498Szrj
48538fd1498Szrj static void
compute_branch_probabilities(unsigned cfg_checksum,unsigned lineno_checksum)48638fd1498Szrj compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
48738fd1498Szrj {
48838fd1498Szrj basic_block bb;
48938fd1498Szrj int i;
49038fd1498Szrj int num_edges = 0;
49138fd1498Szrj int changes;
49238fd1498Szrj int passes;
49338fd1498Szrj int hist_br_prob[20];
49438fd1498Szrj int num_branches;
49538fd1498Szrj gcov_type *exec_counts = get_exec_counts (cfg_checksum, lineno_checksum);
49638fd1498Szrj int inconsistent = 0;
49738fd1498Szrj
49838fd1498Szrj /* Very simple sanity checks so we catch bugs in our profiling code. */
49938fd1498Szrj if (!profile_info)
50038fd1498Szrj {
50138fd1498Szrj if (dump_file)
50238fd1498Szrj fprintf (dump_file, "Profile info is missing; giving up\n");
50338fd1498Szrj return;
50438fd1498Szrj }
50538fd1498Szrj
50638fd1498Szrj bb_gcov_counts.safe_grow_cleared (last_basic_block_for_fn (cfun));
50738fd1498Szrj edge_gcov_counts = new hash_map<edge,gcov_type>;
50838fd1498Szrj
50938fd1498Szrj if (profile_info->sum_all < profile_info->sum_max)
51038fd1498Szrj {
51138fd1498Szrj error ("corrupted profile info: sum_all is smaller than sum_max");
51238fd1498Szrj exec_counts = NULL;
51338fd1498Szrj }
51438fd1498Szrj
51538fd1498Szrj /* Attach extra info block to each bb. */
51638fd1498Szrj alloc_aux_for_blocks (sizeof (struct bb_profile_info));
51738fd1498Szrj FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
51838fd1498Szrj {
51938fd1498Szrj edge e;
52038fd1498Szrj edge_iterator ei;
52138fd1498Szrj
52238fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
52338fd1498Szrj if (!EDGE_INFO (e)->ignore)
52438fd1498Szrj BB_INFO (bb)->succ_count++;
52538fd1498Szrj FOR_EACH_EDGE (e, ei, bb->preds)
52638fd1498Szrj if (!EDGE_INFO (e)->ignore)
52738fd1498Szrj BB_INFO (bb)->pred_count++;
52838fd1498Szrj }
52938fd1498Szrj
53038fd1498Szrj /* Avoid predicting entry on exit nodes. */
53138fd1498Szrj BB_INFO (EXIT_BLOCK_PTR_FOR_FN (cfun))->succ_count = 2;
53238fd1498Szrj BB_INFO (ENTRY_BLOCK_PTR_FOR_FN (cfun))->pred_count = 2;
53338fd1498Szrj
53438fd1498Szrj num_edges = read_profile_edge_counts (exec_counts);
53538fd1498Szrj
53638fd1498Szrj if (dump_file)
53738fd1498Szrj fprintf (dump_file, "\n%d edge counts read\n", num_edges);
53838fd1498Szrj
53938fd1498Szrj /* For every block in the file,
54038fd1498Szrj - if every exit/entrance edge has a known count, then set the block count
54138fd1498Szrj - if the block count is known, and every exit/entrance edge but one has
54238fd1498Szrj a known execution count, then set the count of the remaining edge
54338fd1498Szrj
54438fd1498Szrj As edge counts are set, decrement the succ/pred count, but don't delete
54538fd1498Szrj the edge, that way we can easily tell when all edges are known, or only
54638fd1498Szrj one edge is unknown. */
54738fd1498Szrj
54838fd1498Szrj /* The order that the basic blocks are iterated through is important.
54938fd1498Szrj Since the code that finds spanning trees starts with block 0, low numbered
55038fd1498Szrj edges are put on the spanning tree in preference to high numbered edges.
55138fd1498Szrj Hence, most instrumented edges are at the end. Graph solving works much
55238fd1498Szrj faster if we propagate numbers from the end to the start.
55338fd1498Szrj
55438fd1498Szrj This takes an average of slightly more than 3 passes. */
55538fd1498Szrj
55638fd1498Szrj changes = 1;
55738fd1498Szrj passes = 0;
55838fd1498Szrj while (changes)
55938fd1498Szrj {
56038fd1498Szrj passes++;
56138fd1498Szrj changes = 0;
56238fd1498Szrj FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, prev_bb)
56338fd1498Szrj {
56438fd1498Szrj struct bb_profile_info *bi = BB_INFO (bb);
56538fd1498Szrj if (! bi->count_valid)
56638fd1498Szrj {
56738fd1498Szrj if (bi->succ_count == 0)
56838fd1498Szrj {
56938fd1498Szrj edge e;
57038fd1498Szrj edge_iterator ei;
57138fd1498Szrj gcov_type total = 0;
57238fd1498Szrj
57338fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
57438fd1498Szrj total += edge_gcov_count (e);
57538fd1498Szrj bb_gcov_count (bb) = total;
57638fd1498Szrj bi->count_valid = 1;
57738fd1498Szrj changes = 1;
57838fd1498Szrj }
57938fd1498Szrj else if (bi->pred_count == 0)
58038fd1498Szrj {
58138fd1498Szrj edge e;
58238fd1498Szrj edge_iterator ei;
58338fd1498Szrj gcov_type total = 0;
58438fd1498Szrj
58538fd1498Szrj FOR_EACH_EDGE (e, ei, bb->preds)
58638fd1498Szrj total += edge_gcov_count (e);
58738fd1498Szrj bb_gcov_count (bb) = total;
58838fd1498Szrj bi->count_valid = 1;
58938fd1498Szrj changes = 1;
59038fd1498Szrj }
59138fd1498Szrj }
59238fd1498Szrj if (bi->count_valid)
59338fd1498Szrj {
59438fd1498Szrj if (bi->succ_count == 1)
59538fd1498Szrj {
59638fd1498Szrj edge e;
59738fd1498Szrj edge_iterator ei;
59838fd1498Szrj gcov_type total = 0;
59938fd1498Szrj
60038fd1498Szrj /* One of the counts will be invalid, but it is zero,
60138fd1498Szrj so adding it in also doesn't hurt. */
60238fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
60338fd1498Szrj total += edge_gcov_count (e);
60438fd1498Szrj
60538fd1498Szrj /* Search for the invalid edge, and set its count. */
60638fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
60738fd1498Szrj if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
60838fd1498Szrj break;
60938fd1498Szrj
61038fd1498Szrj /* Calculate count for remaining edge by conservation. */
61138fd1498Szrj total = bb_gcov_count (bb) - total;
61238fd1498Szrj
61338fd1498Szrj gcc_assert (e);
61438fd1498Szrj EDGE_INFO (e)->count_valid = 1;
61538fd1498Szrj edge_gcov_count (e) = total;
61638fd1498Szrj bi->succ_count--;
61738fd1498Szrj
61838fd1498Szrj BB_INFO (e->dest)->pred_count--;
61938fd1498Szrj changes = 1;
62038fd1498Szrj }
62138fd1498Szrj if (bi->pred_count == 1)
62238fd1498Szrj {
62338fd1498Szrj edge e;
62438fd1498Szrj edge_iterator ei;
62538fd1498Szrj gcov_type total = 0;
62638fd1498Szrj
62738fd1498Szrj /* One of the counts will be invalid, but it is zero,
62838fd1498Szrj so adding it in also doesn't hurt. */
62938fd1498Szrj FOR_EACH_EDGE (e, ei, bb->preds)
63038fd1498Szrj total += edge_gcov_count (e);
63138fd1498Szrj
63238fd1498Szrj /* Search for the invalid edge, and set its count. */
63338fd1498Szrj FOR_EACH_EDGE (e, ei, bb->preds)
63438fd1498Szrj if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
63538fd1498Szrj break;
63638fd1498Szrj
63738fd1498Szrj /* Calculate count for remaining edge by conservation. */
63838fd1498Szrj total = bb_gcov_count (bb) - total + edge_gcov_count (e);
63938fd1498Szrj
64038fd1498Szrj gcc_assert (e);
64138fd1498Szrj EDGE_INFO (e)->count_valid = 1;
64238fd1498Szrj edge_gcov_count (e) = total;
64338fd1498Szrj bi->pred_count--;
64438fd1498Szrj
64538fd1498Szrj BB_INFO (e->src)->succ_count--;
64638fd1498Szrj changes = 1;
64738fd1498Szrj }
64838fd1498Szrj }
64938fd1498Szrj }
65038fd1498Szrj }
65138fd1498Szrj
65238fd1498Szrj total_num_passes += passes;
65338fd1498Szrj if (dump_file)
65438fd1498Szrj fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
65538fd1498Szrj
65638fd1498Szrj /* If the graph has been correctly solved, every block will have a
65738fd1498Szrj succ and pred count of zero. */
65838fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
65938fd1498Szrj {
66038fd1498Szrj gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
66138fd1498Szrj }
66238fd1498Szrj
66338fd1498Szrj /* Check for inconsistent basic block counts */
66438fd1498Szrj inconsistent = is_inconsistent ();
66538fd1498Szrj
66638fd1498Szrj if (inconsistent)
66738fd1498Szrj {
66838fd1498Szrj if (flag_profile_correction)
66938fd1498Szrj {
67038fd1498Szrj /* Inconsistency detected. Make it flow-consistent. */
67138fd1498Szrj static int informed = 0;
67238fd1498Szrj if (dump_enabled_p () && informed == 0)
67338fd1498Szrj {
67438fd1498Szrj informed = 1;
67538fd1498Szrj dump_printf_loc (MSG_NOTE, input_location,
67638fd1498Szrj "correcting inconsistent profile data\n");
67738fd1498Szrj }
67838fd1498Szrj correct_negative_edge_counts ();
67938fd1498Szrj /* Set bb counts to the sum of the outgoing edge counts */
68038fd1498Szrj set_bb_counts ();
68138fd1498Szrj if (dump_file)
68238fd1498Szrj fprintf (dump_file, "\nCalling mcf_smooth_cfg\n");
68338fd1498Szrj mcf_smooth_cfg ();
68438fd1498Szrj }
68538fd1498Szrj else
68638fd1498Szrj error ("corrupted profile info: profile data is not flow-consistent");
68738fd1498Szrj }
68838fd1498Szrj
68938fd1498Szrj /* For every edge, calculate its branch probability and add a reg_note
69038fd1498Szrj to the branch insn to indicate this. */
69138fd1498Szrj
69238fd1498Szrj for (i = 0; i < 20; i++)
69338fd1498Szrj hist_br_prob[i] = 0;
69438fd1498Szrj num_branches = 0;
69538fd1498Szrj
69638fd1498Szrj FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
69738fd1498Szrj {
69838fd1498Szrj edge e;
69938fd1498Szrj edge_iterator ei;
70038fd1498Szrj
70138fd1498Szrj if (bb_gcov_count (bb) < 0)
70238fd1498Szrj {
70338fd1498Szrj error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
70438fd1498Szrj bb->index, (int)bb_gcov_count (bb));
70538fd1498Szrj bb_gcov_count (bb) = 0;
70638fd1498Szrj }
70738fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
70838fd1498Szrj {
70938fd1498Szrj /* Function may return twice in the cased the called function is
71038fd1498Szrj setjmp or calls fork, but we can't represent this by extra
71138fd1498Szrj edge from the entry, since extra edge from the exit is
71238fd1498Szrj already present. We get negative frequency from the entry
71338fd1498Szrj point. */
71438fd1498Szrj if ((edge_gcov_count (e) < 0
71538fd1498Szrj && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
71638fd1498Szrj || (edge_gcov_count (e) > bb_gcov_count (bb)
71738fd1498Szrj && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)))
71838fd1498Szrj {
71938fd1498Szrj if (block_ends_with_call_p (bb))
72038fd1498Szrj edge_gcov_count (e) = edge_gcov_count (e) < 0
72138fd1498Szrj ? 0 : bb_gcov_count (bb);
72238fd1498Szrj }
72338fd1498Szrj if (edge_gcov_count (e) < 0
72438fd1498Szrj || edge_gcov_count (e) > bb_gcov_count (bb))
72538fd1498Szrj {
72638fd1498Szrj error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
72738fd1498Szrj e->src->index, e->dest->index,
72838fd1498Szrj (int)edge_gcov_count (e));
72938fd1498Szrj edge_gcov_count (e) = bb_gcov_count (bb) / 2;
73038fd1498Szrj }
73138fd1498Szrj }
73238fd1498Szrj if (bb_gcov_count (bb))
73338fd1498Szrj {
73438fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
73538fd1498Szrj e->probability = profile_probability::probability_in_gcov_type
73638fd1498Szrj (edge_gcov_count (e), bb_gcov_count (bb));
73738fd1498Szrj if (bb->index >= NUM_FIXED_BLOCKS
73838fd1498Szrj && block_ends_with_condjump_p (bb)
73938fd1498Szrj && EDGE_COUNT (bb->succs) >= 2)
74038fd1498Szrj {
74138fd1498Szrj int prob;
74238fd1498Szrj edge e;
74338fd1498Szrj int index;
74438fd1498Szrj
74538fd1498Szrj /* Find the branch edge. It is possible that we do have fake
74638fd1498Szrj edges here. */
74738fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
74838fd1498Szrj if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
74938fd1498Szrj break;
75038fd1498Szrj
75138fd1498Szrj prob = e->probability.to_reg_br_prob_base ();
75238fd1498Szrj index = prob * 20 / REG_BR_PROB_BASE;
75338fd1498Szrj
75438fd1498Szrj if (index == 20)
75538fd1498Szrj index = 19;
75638fd1498Szrj hist_br_prob[index]++;
75738fd1498Szrj
75838fd1498Szrj num_branches++;
75938fd1498Szrj }
76038fd1498Szrj }
76138fd1498Szrj /* As a last resort, distribute the probabilities evenly.
76238fd1498Szrj Use simple heuristics that if there are normal edges,
76338fd1498Szrj give all abnormals frequency of 0, otherwise distribute the
76438fd1498Szrj frequency over abnormals (this is the case of noreturn
76538fd1498Szrj calls). */
76638fd1498Szrj else if (profile_status_for_fn (cfun) == PROFILE_ABSENT)
76738fd1498Szrj {
76838fd1498Szrj int total = 0;
76938fd1498Szrj
77038fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
77138fd1498Szrj if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
77238fd1498Szrj total ++;
77338fd1498Szrj if (total)
77438fd1498Szrj {
77538fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
77638fd1498Szrj if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
77738fd1498Szrj e->probability
77838fd1498Szrj = profile_probability::guessed_always ().apply_scale (1, total);
77938fd1498Szrj else
78038fd1498Szrj e->probability = profile_probability::never ();
78138fd1498Szrj }
78238fd1498Szrj else
78338fd1498Szrj {
78438fd1498Szrj total += EDGE_COUNT (bb->succs);
78538fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
78638fd1498Szrj e->probability
78738fd1498Szrj = profile_probability::guessed_always ().apply_scale (1, total);
78838fd1498Szrj }
78938fd1498Szrj if (bb->index >= NUM_FIXED_BLOCKS
79038fd1498Szrj && block_ends_with_condjump_p (bb)
79138fd1498Szrj && EDGE_COUNT (bb->succs) >= 2)
79238fd1498Szrj num_branches++;
79338fd1498Szrj }
79438fd1498Szrj }
79538fd1498Szrj
79638fd1498Szrj /* If we have real data, use them! */
79738fd1498Szrj if (bb_gcov_count (ENTRY_BLOCK_PTR_FOR_FN (cfun))
79838fd1498Szrj || !flag_guess_branch_prob)
79938fd1498Szrj FOR_ALL_BB_FN (bb, cfun)
80038fd1498Szrj bb->count = profile_count::from_gcov_type (bb_gcov_count (bb));
80138fd1498Szrj /* If function was not trained, preserve local estimates including statically
80238fd1498Szrj determined zero counts. */
80338fd1498Szrj else
80438fd1498Szrj FOR_ALL_BB_FN (bb, cfun)
80538fd1498Szrj if (!(bb->count == profile_count::zero ()))
80638fd1498Szrj bb->count = bb->count.global0 ();
80738fd1498Szrj
80838fd1498Szrj bb_gcov_counts.release ();
80938fd1498Szrj delete edge_gcov_counts;
81038fd1498Szrj edge_gcov_counts = NULL;
81138fd1498Szrj
81238fd1498Szrj update_max_bb_count ();
81338fd1498Szrj
81438fd1498Szrj if (dump_file)
81538fd1498Szrj {
81638fd1498Szrj fprintf (dump_file, "%d branches\n", num_branches);
81738fd1498Szrj if (num_branches)
81838fd1498Szrj for (i = 0; i < 10; i++)
81938fd1498Szrj fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
82038fd1498Szrj (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
82138fd1498Szrj 5 * i, 5 * i + 5);
82238fd1498Szrj
82338fd1498Szrj total_num_branches += num_branches;
82438fd1498Szrj for (i = 0; i < 20; i++)
82538fd1498Szrj total_hist_br_prob[i] += hist_br_prob[i];
82638fd1498Szrj
82738fd1498Szrj fputc ('\n', dump_file);
82838fd1498Szrj fputc ('\n', dump_file);
82938fd1498Szrj }
83038fd1498Szrj
83138fd1498Szrj free_aux_for_blocks ();
83238fd1498Szrj }
83338fd1498Szrj
83438fd1498Szrj /* Load value histograms values whose description is stored in VALUES array
83538fd1498Szrj from .gcda file.
83638fd1498Szrj
83738fd1498Szrj CFG_CHECKSUM is the precomputed checksum for the CFG. */
83838fd1498Szrj
83938fd1498Szrj static void
compute_value_histograms(histogram_values values,unsigned cfg_checksum,unsigned lineno_checksum)84038fd1498Szrj compute_value_histograms (histogram_values values, unsigned cfg_checksum,
84138fd1498Szrj unsigned lineno_checksum)
84238fd1498Szrj {
84338fd1498Szrj unsigned i, j, t, any;
84438fd1498Szrj unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
84538fd1498Szrj gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
84638fd1498Szrj gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
84738fd1498Szrj gcov_type *aact_count;
84838fd1498Szrj struct cgraph_node *node;
84938fd1498Szrj
85038fd1498Szrj for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
85138fd1498Szrj n_histogram_counters[t] = 0;
85238fd1498Szrj
85338fd1498Szrj for (i = 0; i < values.length (); i++)
85438fd1498Szrj {
85538fd1498Szrj histogram_value hist = values[i];
85638fd1498Szrj n_histogram_counters[(int) hist->type] += hist->n_counters;
85738fd1498Szrj }
85838fd1498Szrj
85938fd1498Szrj any = 0;
86038fd1498Szrj for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
86138fd1498Szrj {
86238fd1498Szrj if (!n_histogram_counters[t])
86338fd1498Szrj {
86438fd1498Szrj histogram_counts[t] = NULL;
86538fd1498Szrj continue;
86638fd1498Szrj }
86738fd1498Szrj
86838fd1498Szrj histogram_counts[t] =
86938fd1498Szrj get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
87038fd1498Szrj n_histogram_counters[t], cfg_checksum,
87138fd1498Szrj lineno_checksum, NULL);
87238fd1498Szrj if (histogram_counts[t])
87338fd1498Szrj any = 1;
87438fd1498Szrj act_count[t] = histogram_counts[t];
87538fd1498Szrj }
87638fd1498Szrj if (!any)
87738fd1498Szrj return;
87838fd1498Szrj
87938fd1498Szrj for (i = 0; i < values.length (); i++)
88038fd1498Szrj {
88138fd1498Szrj histogram_value hist = values[i];
88238fd1498Szrj gimple *stmt = hist->hvalue.stmt;
88338fd1498Szrj
88438fd1498Szrj t = (int) hist->type;
88538fd1498Szrj
88638fd1498Szrj aact_count = act_count[t];
88738fd1498Szrj
88838fd1498Szrj if (act_count[t])
88938fd1498Szrj act_count[t] += hist->n_counters;
89038fd1498Szrj
89138fd1498Szrj gimple_add_histogram_value (cfun, stmt, hist);
89238fd1498Szrj hist->hvalue.counters = XNEWVEC (gcov_type, hist->n_counters);
89338fd1498Szrj for (j = 0; j < hist->n_counters; j++)
89438fd1498Szrj if (aact_count)
89538fd1498Szrj hist->hvalue.counters[j] = aact_count[j];
89638fd1498Szrj else
89738fd1498Szrj hist->hvalue.counters[j] = 0;
89838fd1498Szrj
89938fd1498Szrj /* Time profiler counter is not related to any statement,
90038fd1498Szrj so that we have to read the counter and set the value to
90138fd1498Szrj the corresponding call graph node. */
90238fd1498Szrj if (hist->type == HIST_TYPE_TIME_PROFILE)
90338fd1498Szrj {
90438fd1498Szrj node = cgraph_node::get (hist->fun->decl);
90538fd1498Szrj node->tp_first_run = hist->hvalue.counters[0];
90638fd1498Szrj
90738fd1498Szrj if (dump_file)
90838fd1498Szrj fprintf (dump_file, "Read tp_first_run: %d\n", node->tp_first_run);
90938fd1498Szrj }
91038fd1498Szrj }
91138fd1498Szrj
91238fd1498Szrj for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
91338fd1498Szrj free (histogram_counts[t]);
91438fd1498Szrj }
91538fd1498Szrj
91638fd1498Szrj /* When passed NULL as file_name, initialize.
91738fd1498Szrj When passed something else, output the necessary commands to change
91838fd1498Szrj line to LINE and offset to FILE_NAME. */
91938fd1498Szrj static void
output_location(char const * file_name,int line,gcov_position_t * offset,basic_block bb)92038fd1498Szrj output_location (char const *file_name, int line,
92138fd1498Szrj gcov_position_t *offset, basic_block bb)
92238fd1498Szrj {
92338fd1498Szrj static char const *prev_file_name;
92438fd1498Szrj static int prev_line;
92538fd1498Szrj bool name_differs, line_differs;
92638fd1498Szrj
92738fd1498Szrj if (!file_name)
92838fd1498Szrj {
92938fd1498Szrj prev_file_name = NULL;
93038fd1498Szrj prev_line = -1;
93138fd1498Szrj return;
93238fd1498Szrj }
93338fd1498Szrj
93438fd1498Szrj name_differs = !prev_file_name || filename_cmp (file_name, prev_file_name);
93538fd1498Szrj line_differs = prev_line != line;
93638fd1498Szrj
93738fd1498Szrj if (!*offset)
93838fd1498Szrj {
93938fd1498Szrj *offset = gcov_write_tag (GCOV_TAG_LINES);
94038fd1498Szrj gcov_write_unsigned (bb->index);
94138fd1498Szrj name_differs = line_differs = true;
94238fd1498Szrj }
94338fd1498Szrj
94438fd1498Szrj /* If this is a new source file, then output the
94538fd1498Szrj file's name to the .bb file. */
94638fd1498Szrj if (name_differs)
94738fd1498Szrj {
94838fd1498Szrj prev_file_name = file_name;
94938fd1498Szrj gcov_write_unsigned (0);
95038fd1498Szrj gcov_write_filename (prev_file_name);
95138fd1498Szrj }
95238fd1498Szrj if (line_differs)
95338fd1498Szrj {
95438fd1498Szrj gcov_write_unsigned (line);
95538fd1498Szrj prev_line = line;
95638fd1498Szrj }
95738fd1498Szrj }
95838fd1498Szrj
95938fd1498Szrj /* Helper for qsort so edges get sorted from highest frequency to smallest.
96038fd1498Szrj This controls the weight for minimal spanning tree algorithm */
96138fd1498Szrj static int
compare_freqs(const void * p1,const void * p2)96238fd1498Szrj compare_freqs (const void *p1, const void *p2)
96338fd1498Szrj {
96438fd1498Szrj const_edge e1 = *(const const_edge *)p1;
96538fd1498Szrj const_edge e2 = *(const const_edge *)p2;
96638fd1498Szrj
96738fd1498Szrj /* Critical edges needs to be split which introduce extra control flow.
96838fd1498Szrj Make them more heavy. */
96938fd1498Szrj int m1 = EDGE_CRITICAL_P (e1) ? 2 : 1;
97038fd1498Szrj int m2 = EDGE_CRITICAL_P (e2) ? 2 : 1;
97138fd1498Szrj
97238fd1498Szrj if (EDGE_FREQUENCY (e1) * m1 + m1 != EDGE_FREQUENCY (e2) * m2 + m2)
97338fd1498Szrj return EDGE_FREQUENCY (e2) * m2 + m2 - EDGE_FREQUENCY (e1) * m1 - m1;
97438fd1498Szrj /* Stabilize sort. */
97538fd1498Szrj if (e1->src->index != e2->src->index)
97638fd1498Szrj return e2->src->index - e1->src->index;
97738fd1498Szrj return e2->dest->index - e1->dest->index;
97838fd1498Szrj }
97938fd1498Szrj
980*58e805e6Szrj /* Only read execution count for thunks. */
981*58e805e6Szrj
982*58e805e6Szrj void
read_thunk_profile(struct cgraph_node * node)983*58e805e6Szrj read_thunk_profile (struct cgraph_node *node)
984*58e805e6Szrj {
985*58e805e6Szrj tree old = current_function_decl;
986*58e805e6Szrj current_function_decl = node->decl;
987*58e805e6Szrj gcov_type *counts = get_coverage_counts (GCOV_COUNTER_ARCS, 1, 0, 0, NULL);
988*58e805e6Szrj if (counts)
989*58e805e6Szrj {
990*58e805e6Szrj node->callees->count = node->count
991*58e805e6Szrj = profile_count::from_gcov_type (counts[0]);
992*58e805e6Szrj free (counts);
993*58e805e6Szrj }
994*58e805e6Szrj current_function_decl = old;
995*58e805e6Szrj return;
996*58e805e6Szrj }
997*58e805e6Szrj
998*58e805e6Szrj
99938fd1498Szrj /* Instrument and/or analyze program behavior based on program the CFG.
100038fd1498Szrj
100138fd1498Szrj This function creates a representation of the control flow graph (of
100238fd1498Szrj the function being compiled) that is suitable for the instrumentation
100338fd1498Szrj of edges and/or converting measured edge counts to counts on the
100438fd1498Szrj complete CFG.
100538fd1498Szrj
100638fd1498Szrj When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
100738fd1498Szrj the flow graph that are needed to reconstruct the dynamic behavior of the
100838fd1498Szrj flow graph. This data is written to the gcno file for gcov.
100938fd1498Szrj
101038fd1498Szrj When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
101138fd1498Szrj information from the gcda file containing edge count information from
101238fd1498Szrj previous executions of the function being compiled. In this case, the
101338fd1498Szrj control flow graph is annotated with actual execution counts by
101438fd1498Szrj compute_branch_probabilities().
101538fd1498Szrj
101638fd1498Szrj Main entry point of this file. */
101738fd1498Szrj
101838fd1498Szrj void
branch_prob(bool thunk)1019*58e805e6Szrj branch_prob (bool thunk)
102038fd1498Szrj {
102138fd1498Szrj basic_block bb;
102238fd1498Szrj unsigned i;
102338fd1498Szrj unsigned num_edges, ignored_edges;
102438fd1498Szrj unsigned num_instrumented;
102538fd1498Szrj struct edge_list *el;
102638fd1498Szrj histogram_values values = histogram_values ();
102738fd1498Szrj unsigned cfg_checksum, lineno_checksum;
102838fd1498Szrj
102938fd1498Szrj total_num_times_called++;
103038fd1498Szrj
103138fd1498Szrj flow_call_edges_add (NULL);
103238fd1498Szrj add_noreturn_fake_exit_edges ();
103338fd1498Szrj
1034*58e805e6Szrj if (!thunk)
1035*58e805e6Szrj {
103638fd1498Szrj /* We can't handle cyclic regions constructed using abnormal edges.
103738fd1498Szrj To avoid these we replace every source of abnormal edge by a fake
103838fd1498Szrj edge from entry node and every destination by fake edge to exit.
103938fd1498Szrj This keeps graph acyclic and our calculation exact for all normal
104038fd1498Szrj edges except for exit and entrance ones.
104138fd1498Szrj
104238fd1498Szrj We also add fake exit edges for each call and asm statement in the
104338fd1498Szrj basic, since it may not return. */
104438fd1498Szrj
104538fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
104638fd1498Szrj {
104738fd1498Szrj int need_exit_edge = 0, need_entry_edge = 0;
104838fd1498Szrj int have_exit_edge = 0, have_entry_edge = 0;
104938fd1498Szrj edge e;
105038fd1498Szrj edge_iterator ei;
105138fd1498Szrj
105238fd1498Szrj /* Functions returning multiple times are not handled by extra edges.
105338fd1498Szrj Instead we simply allow negative counts on edges from exit to the
105438fd1498Szrj block past call and corresponding probabilities. We can't go
105538fd1498Szrj with the extra edges because that would result in flowgraph that
105638fd1498Szrj needs to have fake edges outside the spanning tree. */
105738fd1498Szrj
105838fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
105938fd1498Szrj {
106038fd1498Szrj gimple_stmt_iterator gsi;
106138fd1498Szrj gimple *last = NULL;
106238fd1498Szrj
106338fd1498Szrj /* It may happen that there are compiler generated statements
106438fd1498Szrj without a locus at all. Go through the basic block from the
106538fd1498Szrj last to the first statement looking for a locus. */
106638fd1498Szrj for (gsi = gsi_last_nondebug_bb (bb);
106738fd1498Szrj !gsi_end_p (gsi);
106838fd1498Szrj gsi_prev_nondebug (&gsi))
106938fd1498Szrj {
107038fd1498Szrj last = gsi_stmt (gsi);
107138fd1498Szrj if (!RESERVED_LOCATION_P (gimple_location (last)))
107238fd1498Szrj break;
107338fd1498Szrj }
107438fd1498Szrj
107538fd1498Szrj /* Edge with goto locus might get wrong coverage info unless
107638fd1498Szrj it is the only edge out of BB.
107738fd1498Szrj Don't do that when the locuses match, so
107838fd1498Szrj if (blah) goto something;
107938fd1498Szrj is not computed twice. */
108038fd1498Szrj if (last
108138fd1498Szrj && gimple_has_location (last)
108238fd1498Szrj && !RESERVED_LOCATION_P (e->goto_locus)
108338fd1498Szrj && !single_succ_p (bb)
108438fd1498Szrj && (LOCATION_FILE (e->goto_locus)
108538fd1498Szrj != LOCATION_FILE (gimple_location (last))
108638fd1498Szrj || (LOCATION_LINE (e->goto_locus)
108738fd1498Szrj != LOCATION_LINE (gimple_location (last)))))
108838fd1498Szrj {
108938fd1498Szrj basic_block new_bb = split_edge (e);
109038fd1498Szrj edge ne = single_succ_edge (new_bb);
109138fd1498Szrj ne->goto_locus = e->goto_locus;
109238fd1498Szrj }
109338fd1498Szrj if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
109438fd1498Szrj && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
109538fd1498Szrj need_exit_edge = 1;
109638fd1498Szrj if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
109738fd1498Szrj have_exit_edge = 1;
109838fd1498Szrj }
109938fd1498Szrj FOR_EACH_EDGE (e, ei, bb->preds)
110038fd1498Szrj {
110138fd1498Szrj if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
110238fd1498Szrj && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
110338fd1498Szrj need_entry_edge = 1;
110438fd1498Szrj if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
110538fd1498Szrj have_entry_edge = 1;
110638fd1498Szrj }
110738fd1498Szrj
110838fd1498Szrj if (need_exit_edge && !have_exit_edge)
110938fd1498Szrj {
111038fd1498Szrj if (dump_file)
111138fd1498Szrj fprintf (dump_file, "Adding fake exit edge to bb %i\n",
111238fd1498Szrj bb->index);
111338fd1498Szrj make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
111438fd1498Szrj }
111538fd1498Szrj if (need_entry_edge && !have_entry_edge)
111638fd1498Szrj {
111738fd1498Szrj if (dump_file)
111838fd1498Szrj fprintf (dump_file, "Adding fake entry edge to bb %i\n",
111938fd1498Szrj bb->index);
112038fd1498Szrj make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FAKE);
112138fd1498Szrj /* Avoid bbs that have both fake entry edge and also some
112238fd1498Szrj exit edge. One of those edges wouldn't be added to the
112338fd1498Szrj spanning tree, but we can't instrument any of them. */
112438fd1498Szrj if (have_exit_edge || need_exit_edge)
112538fd1498Szrj {
112638fd1498Szrj gimple_stmt_iterator gsi;
112738fd1498Szrj gimple *first;
112838fd1498Szrj
112938fd1498Szrj gsi = gsi_start_nondebug_after_labels_bb (bb);
113038fd1498Szrj gcc_checking_assert (!gsi_end_p (gsi));
113138fd1498Szrj first = gsi_stmt (gsi);
113238fd1498Szrj /* Don't split the bbs containing __builtin_setjmp_receiver
113338fd1498Szrj or ABNORMAL_DISPATCHER calls. These are very
113438fd1498Szrj special and don't expect anything to be inserted before
113538fd1498Szrj them. */
113638fd1498Szrj if (is_gimple_call (first)
113738fd1498Szrj && (gimple_call_builtin_p (first, BUILT_IN_SETJMP_RECEIVER)
113838fd1498Szrj || (gimple_call_flags (first) & ECF_RETURNS_TWICE)
113938fd1498Szrj || (gimple_call_internal_p (first)
114038fd1498Szrj && (gimple_call_internal_fn (first)
114138fd1498Szrj == IFN_ABNORMAL_DISPATCHER))))
114238fd1498Szrj continue;
114338fd1498Szrj
114438fd1498Szrj if (dump_file)
114538fd1498Szrj fprintf (dump_file, "Splitting bb %i after labels\n",
114638fd1498Szrj bb->index);
114738fd1498Szrj split_block_after_labels (bb);
114838fd1498Szrj }
114938fd1498Szrj }
115038fd1498Szrj }
1151*58e805e6Szrj }
115238fd1498Szrj
115338fd1498Szrj el = create_edge_list ();
115438fd1498Szrj num_edges = NUM_EDGES (el);
115538fd1498Szrj qsort (el->index_to_edge, num_edges, sizeof (edge), compare_freqs);
115638fd1498Szrj alloc_aux_for_edges (sizeof (struct edge_profile_info));
115738fd1498Szrj
115838fd1498Szrj /* The basic blocks are expected to be numbered sequentially. */
115938fd1498Szrj compact_blocks ();
116038fd1498Szrj
116138fd1498Szrj ignored_edges = 0;
116238fd1498Szrj for (i = 0 ; i < num_edges ; i++)
116338fd1498Szrj {
116438fd1498Szrj edge e = INDEX_EDGE (el, i);
116538fd1498Szrj
116638fd1498Szrj /* Mark edges we've replaced by fake edges above as ignored. */
116738fd1498Szrj if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
116838fd1498Szrj && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
116938fd1498Szrj && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
117038fd1498Szrj {
117138fd1498Szrj EDGE_INFO (e)->ignore = 1;
117238fd1498Szrj ignored_edges++;
117338fd1498Szrj }
117438fd1498Szrj }
117538fd1498Szrj
117638fd1498Szrj /* Create spanning tree from basic block graph, mark each edge that is
117738fd1498Szrj on the spanning tree. We insert as many abnormal and critical edges
117838fd1498Szrj as possible to minimize number of edge splits necessary. */
117938fd1498Szrj
1180*58e805e6Szrj if (!thunk)
118138fd1498Szrj find_spanning_tree (el);
1182*58e805e6Szrj else
1183*58e805e6Szrj {
1184*58e805e6Szrj edge e;
1185*58e805e6Szrj edge_iterator ei;
1186*58e805e6Szrj /* Keep only edge from entry block to be instrumented. */
1187*58e805e6Szrj FOR_EACH_BB_FN (bb, cfun)
1188*58e805e6Szrj FOR_EACH_EDGE (e, ei, bb->succs)
1189*58e805e6Szrj EDGE_INFO (e)->ignore = true;
1190*58e805e6Szrj }
1191*58e805e6Szrj
119238fd1498Szrj
119338fd1498Szrj /* Fake edges that are not on the tree will not be instrumented, so
119438fd1498Szrj mark them ignored. */
119538fd1498Szrj for (num_instrumented = i = 0; i < num_edges; i++)
119638fd1498Szrj {
119738fd1498Szrj edge e = INDEX_EDGE (el, i);
119838fd1498Szrj struct edge_profile_info *inf = EDGE_INFO (e);
119938fd1498Szrj
120038fd1498Szrj if (inf->ignore || inf->on_tree)
120138fd1498Szrj /*NOP*/;
120238fd1498Szrj else if (e->flags & EDGE_FAKE)
120338fd1498Szrj {
120438fd1498Szrj inf->ignore = 1;
120538fd1498Szrj ignored_edges++;
120638fd1498Szrj }
120738fd1498Szrj else
120838fd1498Szrj num_instrumented++;
120938fd1498Szrj }
121038fd1498Szrj
121138fd1498Szrj total_num_blocks += n_basic_blocks_for_fn (cfun);
121238fd1498Szrj if (dump_file)
121338fd1498Szrj fprintf (dump_file, "%d basic blocks\n", n_basic_blocks_for_fn (cfun));
121438fd1498Szrj
121538fd1498Szrj total_num_edges += num_edges;
121638fd1498Szrj if (dump_file)
121738fd1498Szrj fprintf (dump_file, "%d edges\n", num_edges);
121838fd1498Szrj
121938fd1498Szrj total_num_edges_ignored += ignored_edges;
122038fd1498Szrj if (dump_file)
122138fd1498Szrj fprintf (dump_file, "%d ignored edges\n", ignored_edges);
122238fd1498Szrj
122338fd1498Szrj total_num_edges_instrumented += num_instrumented;
122438fd1498Szrj if (dump_file)
122538fd1498Szrj fprintf (dump_file, "%d instrumentation edges\n", num_instrumented);
122638fd1498Szrj
122738fd1498Szrj /* Compute two different checksums. Note that we want to compute
122838fd1498Szrj the checksum in only once place, since it depends on the shape
122938fd1498Szrj of the control flow which can change during
123038fd1498Szrj various transformations. */
1231*58e805e6Szrj if (thunk)
1232*58e805e6Szrj {
1233*58e805e6Szrj /* At stream in time we do not have CFG, so we can not do checksums. */
1234*58e805e6Szrj cfg_checksum = 0;
1235*58e805e6Szrj lineno_checksum = 0;
1236*58e805e6Szrj }
1237*58e805e6Szrj else
1238*58e805e6Szrj {
123938fd1498Szrj cfg_checksum = coverage_compute_cfg_checksum (cfun);
124038fd1498Szrj lineno_checksum = coverage_compute_lineno_checksum ();
1241*58e805e6Szrj }
124238fd1498Szrj
124338fd1498Szrj /* Write the data from which gcov can reconstruct the basic block
124438fd1498Szrj graph and function line numbers (the gcno file). */
124538fd1498Szrj if (coverage_begin_function (lineno_checksum, cfg_checksum))
124638fd1498Szrj {
124738fd1498Szrj gcov_position_t offset;
124838fd1498Szrj
124938fd1498Szrj /* Basic block flags */
125038fd1498Szrj offset = gcov_write_tag (GCOV_TAG_BLOCKS);
125138fd1498Szrj gcov_write_unsigned (n_basic_blocks_for_fn (cfun));
125238fd1498Szrj gcov_write_length (offset);
125338fd1498Szrj
125438fd1498Szrj /* Arcs */
125538fd1498Szrj FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
125638fd1498Szrj EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
125738fd1498Szrj {
125838fd1498Szrj edge e;
125938fd1498Szrj edge_iterator ei;
126038fd1498Szrj
126138fd1498Szrj offset = gcov_write_tag (GCOV_TAG_ARCS);
126238fd1498Szrj gcov_write_unsigned (bb->index);
126338fd1498Szrj
126438fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs)
126538fd1498Szrj {
126638fd1498Szrj struct edge_profile_info *i = EDGE_INFO (e);
126738fd1498Szrj if (!i->ignore)
126838fd1498Szrj {
126938fd1498Szrj unsigned flag_bits = 0;
127038fd1498Szrj
127138fd1498Szrj if (i->on_tree)
127238fd1498Szrj flag_bits |= GCOV_ARC_ON_TREE;
127338fd1498Szrj if (e->flags & EDGE_FAKE)
127438fd1498Szrj flag_bits |= GCOV_ARC_FAKE;
127538fd1498Szrj if (e->flags & EDGE_FALLTHRU)
127638fd1498Szrj flag_bits |= GCOV_ARC_FALLTHROUGH;
127738fd1498Szrj /* On trees we don't have fallthru flags, but we can
127838fd1498Szrj recompute them from CFG shape. */
127938fd1498Szrj if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
128038fd1498Szrj && e->src->next_bb == e->dest)
128138fd1498Szrj flag_bits |= GCOV_ARC_FALLTHROUGH;
128238fd1498Szrj
128338fd1498Szrj gcov_write_unsigned (e->dest->index);
128438fd1498Szrj gcov_write_unsigned (flag_bits);
128538fd1498Szrj }
128638fd1498Szrj }
128738fd1498Szrj
128838fd1498Szrj gcov_write_length (offset);
128938fd1498Szrj }
129038fd1498Szrj
129138fd1498Szrj /* Line numbers. */
129238fd1498Szrj /* Initialize the output. */
129338fd1498Szrj output_location (NULL, 0, NULL, NULL);
129438fd1498Szrj
129538fd1498Szrj FOR_EACH_BB_FN (bb, cfun)
129638fd1498Szrj {
129738fd1498Szrj gimple_stmt_iterator gsi;
129838fd1498Szrj gcov_position_t offset = 0;
129938fd1498Szrj
130038fd1498Szrj if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
130138fd1498Szrj {
130238fd1498Szrj expanded_location curr_location =
130338fd1498Szrj expand_location (DECL_SOURCE_LOCATION (current_function_decl));
130438fd1498Szrj output_location (curr_location.file, curr_location.line,
130538fd1498Szrj &offset, bb);
130638fd1498Szrj }
130738fd1498Szrj
130838fd1498Szrj for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
130938fd1498Szrj {
131038fd1498Szrj gimple *stmt = gsi_stmt (gsi);
131138fd1498Szrj if (!RESERVED_LOCATION_P (gimple_location (stmt)))
131238fd1498Szrj output_location (gimple_filename (stmt), gimple_lineno (stmt),
131338fd1498Szrj &offset, bb);
131438fd1498Szrj }
131538fd1498Szrj
131638fd1498Szrj /* Notice GOTO expressions eliminated while constructing the CFG. */
131738fd1498Szrj if (single_succ_p (bb)
131838fd1498Szrj && !RESERVED_LOCATION_P (single_succ_edge (bb)->goto_locus))
131938fd1498Szrj {
132038fd1498Szrj expanded_location curr_location
132138fd1498Szrj = expand_location (single_succ_edge (bb)->goto_locus);
132238fd1498Szrj output_location (curr_location.file, curr_location.line,
132338fd1498Szrj &offset, bb);
132438fd1498Szrj }
132538fd1498Szrj
132638fd1498Szrj if (offset)
132738fd1498Szrj {
132838fd1498Szrj /* A file of NULL indicates the end of run. */
132938fd1498Szrj gcov_write_unsigned (0);
133038fd1498Szrj gcov_write_string (NULL);
133138fd1498Szrj gcov_write_length (offset);
133238fd1498Szrj }
133338fd1498Szrj }
133438fd1498Szrj }
133538fd1498Szrj
133638fd1498Szrj if (flag_profile_values)
133738fd1498Szrj gimple_find_values_to_profile (&values);
133838fd1498Szrj
133938fd1498Szrj if (flag_branch_probabilities)
134038fd1498Szrj {
134138fd1498Szrj compute_branch_probabilities (cfg_checksum, lineno_checksum);
134238fd1498Szrj if (flag_profile_values)
134338fd1498Szrj compute_value_histograms (values, cfg_checksum, lineno_checksum);
134438fd1498Szrj }
134538fd1498Szrj
134638fd1498Szrj remove_fake_edges ();
134738fd1498Szrj
134838fd1498Szrj /* For each edge not on the spanning tree, add counting code. */
134938fd1498Szrj if (profile_arc_flag
135038fd1498Szrj && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
135138fd1498Szrj {
135238fd1498Szrj unsigned n_instrumented;
135338fd1498Szrj
135438fd1498Szrj gimple_init_gcov_profiler ();
135538fd1498Szrj
135638fd1498Szrj n_instrumented = instrument_edges (el);
135738fd1498Szrj
135838fd1498Szrj gcc_assert (n_instrumented == num_instrumented);
135938fd1498Szrj
136038fd1498Szrj if (flag_profile_values)
136138fd1498Szrj instrument_values (values);
136238fd1498Szrj
136338fd1498Szrj /* Commit changes done by instrumentation. */
136438fd1498Szrj gsi_commit_edge_inserts ();
136538fd1498Szrj }
136638fd1498Szrj
136738fd1498Szrj free_aux_for_edges ();
136838fd1498Szrj
136938fd1498Szrj values.release ();
137038fd1498Szrj free_edge_list (el);
137138fd1498Szrj coverage_end_function (lineno_checksum, cfg_checksum);
137238fd1498Szrj if (flag_branch_probabilities && profile_info)
137338fd1498Szrj {
137438fd1498Szrj struct loop *loop;
137538fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
137638fd1498Szrj report_predictor_hitrates ();
137738fd1498Szrj profile_status_for_fn (cfun) = PROFILE_READ;
137838fd1498Szrj
137938fd1498Szrj /* At this moment we have precise loop iteration count estimates.
138038fd1498Szrj Record them to loop structure before the profile gets out of date. */
138138fd1498Szrj FOR_EACH_LOOP (loop, 0)
138238fd1498Szrj if (loop->header->count > 0)
138338fd1498Szrj {
138438fd1498Szrj gcov_type nit = expected_loop_iterations_unbounded (loop);
138538fd1498Szrj widest_int bound = gcov_type_to_wide_int (nit);
138638fd1498Szrj loop->any_estimate = false;
138738fd1498Szrj record_niter_bound (loop, bound, true, false);
138838fd1498Szrj }
138938fd1498Szrj compute_function_frequency ();
139038fd1498Szrj }
139138fd1498Szrj }
139238fd1498Szrj
139338fd1498Szrj /* Union find algorithm implementation for the basic blocks using
139438fd1498Szrj aux fields. */
139538fd1498Szrj
139638fd1498Szrj static basic_block
find_group(basic_block bb)139738fd1498Szrj find_group (basic_block bb)
139838fd1498Szrj {
139938fd1498Szrj basic_block group = bb, bb1;
140038fd1498Szrj
140138fd1498Szrj while ((basic_block) group->aux != group)
140238fd1498Szrj group = (basic_block) group->aux;
140338fd1498Szrj
140438fd1498Szrj /* Compress path. */
140538fd1498Szrj while ((basic_block) bb->aux != group)
140638fd1498Szrj {
140738fd1498Szrj bb1 = (basic_block) bb->aux;
140838fd1498Szrj bb->aux = (void *) group;
140938fd1498Szrj bb = bb1;
141038fd1498Szrj }
141138fd1498Szrj return group;
141238fd1498Szrj }
141338fd1498Szrj
141438fd1498Szrj static void
union_groups(basic_block bb1,basic_block bb2)141538fd1498Szrj union_groups (basic_block bb1, basic_block bb2)
141638fd1498Szrj {
141738fd1498Szrj basic_block bb1g = find_group (bb1);
141838fd1498Szrj basic_block bb2g = find_group (bb2);
141938fd1498Szrj
142038fd1498Szrj /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
142138fd1498Szrj this code is unlikely going to be performance problem anyway. */
142238fd1498Szrj gcc_assert (bb1g != bb2g);
142338fd1498Szrj
142438fd1498Szrj bb1g->aux = bb2g;
142538fd1498Szrj }
142638fd1498Szrj
142738fd1498Szrj /* This function searches all of the edges in the program flow graph, and puts
142838fd1498Szrj as many bad edges as possible onto the spanning tree. Bad edges include
142938fd1498Szrj abnormals edges, which can't be instrumented at the moment. Since it is
143038fd1498Szrj possible for fake edges to form a cycle, we will have to develop some
143138fd1498Szrj better way in the future. Also put critical edges to the tree, since they
143238fd1498Szrj are more expensive to instrument. */
143338fd1498Szrj
143438fd1498Szrj static void
find_spanning_tree(struct edge_list * el)143538fd1498Szrj find_spanning_tree (struct edge_list *el)
143638fd1498Szrj {
143738fd1498Szrj int i;
143838fd1498Szrj int num_edges = NUM_EDGES (el);
143938fd1498Szrj basic_block bb;
144038fd1498Szrj
144138fd1498Szrj /* We use aux field for standard union-find algorithm. */
144238fd1498Szrj FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
144338fd1498Szrj bb->aux = bb;
144438fd1498Szrj
144538fd1498Szrj /* Add fake edge exit to entry we can't instrument. */
144638fd1498Szrj union_groups (EXIT_BLOCK_PTR_FOR_FN (cfun), ENTRY_BLOCK_PTR_FOR_FN (cfun));
144738fd1498Szrj
144838fd1498Szrj /* First add all abnormal edges to the tree unless they form a cycle. Also
144938fd1498Szrj add all edges to the exit block to avoid inserting profiling code behind
145038fd1498Szrj setting return value from function. */
145138fd1498Szrj for (i = 0; i < num_edges; i++)
145238fd1498Szrj {
145338fd1498Szrj edge e = INDEX_EDGE (el, i);
145438fd1498Szrj if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
145538fd1498Szrj || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
145638fd1498Szrj && !EDGE_INFO (e)->ignore
145738fd1498Szrj && (find_group (e->src) != find_group (e->dest)))
145838fd1498Szrj {
145938fd1498Szrj if (dump_file)
146038fd1498Szrj fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
146138fd1498Szrj e->src->index, e->dest->index);
146238fd1498Szrj EDGE_INFO (e)->on_tree = 1;
146338fd1498Szrj union_groups (e->src, e->dest);
146438fd1498Szrj }
146538fd1498Szrj }
146638fd1498Szrj
146738fd1498Szrj /* And now the rest. Edge list is sorted according to frequencies and
146838fd1498Szrj thus we will produce minimal spanning tree. */
146938fd1498Szrj for (i = 0; i < num_edges; i++)
147038fd1498Szrj {
147138fd1498Szrj edge e = INDEX_EDGE (el, i);
147238fd1498Szrj if (!EDGE_INFO (e)->ignore
147338fd1498Szrj && find_group (e->src) != find_group (e->dest))
147438fd1498Szrj {
147538fd1498Szrj if (dump_file)
147638fd1498Szrj fprintf (dump_file, "Normal edge %d to %d put to tree\n",
147738fd1498Szrj e->src->index, e->dest->index);
147838fd1498Szrj EDGE_INFO (e)->on_tree = 1;
147938fd1498Szrj union_groups (e->src, e->dest);
148038fd1498Szrj }
148138fd1498Szrj }
148238fd1498Szrj
148338fd1498Szrj clear_aux_for_blocks ();
148438fd1498Szrj }
148538fd1498Szrj
148638fd1498Szrj /* Perform file-level initialization for branch-prob processing. */
148738fd1498Szrj
148838fd1498Szrj void
init_branch_prob(void)148938fd1498Szrj init_branch_prob (void)
149038fd1498Szrj {
149138fd1498Szrj int i;
149238fd1498Szrj
149338fd1498Szrj total_num_blocks = 0;
149438fd1498Szrj total_num_edges = 0;
149538fd1498Szrj total_num_edges_ignored = 0;
149638fd1498Szrj total_num_edges_instrumented = 0;
149738fd1498Szrj total_num_blocks_created = 0;
149838fd1498Szrj total_num_passes = 0;
149938fd1498Szrj total_num_times_called = 0;
150038fd1498Szrj total_num_branches = 0;
150138fd1498Szrj for (i = 0; i < 20; i++)
150238fd1498Szrj total_hist_br_prob[i] = 0;
150338fd1498Szrj }
150438fd1498Szrj
150538fd1498Szrj /* Performs file-level cleanup after branch-prob processing
150638fd1498Szrj is completed. */
150738fd1498Szrj
150838fd1498Szrj void
end_branch_prob(void)150938fd1498Szrj end_branch_prob (void)
151038fd1498Szrj {
151138fd1498Szrj if (dump_file)
151238fd1498Szrj {
151338fd1498Szrj fprintf (dump_file, "\n");
151438fd1498Szrj fprintf (dump_file, "Total number of blocks: %d\n",
151538fd1498Szrj total_num_blocks);
151638fd1498Szrj fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
151738fd1498Szrj fprintf (dump_file, "Total number of ignored edges: %d\n",
151838fd1498Szrj total_num_edges_ignored);
151938fd1498Szrj fprintf (dump_file, "Total number of instrumented edges: %d\n",
152038fd1498Szrj total_num_edges_instrumented);
152138fd1498Szrj fprintf (dump_file, "Total number of blocks created: %d\n",
152238fd1498Szrj total_num_blocks_created);
152338fd1498Szrj fprintf (dump_file, "Total number of graph solution passes: %d\n",
152438fd1498Szrj total_num_passes);
152538fd1498Szrj if (total_num_times_called != 0)
152638fd1498Szrj fprintf (dump_file, "Average number of graph solution passes: %d\n",
152738fd1498Szrj (total_num_passes + (total_num_times_called >> 1))
152838fd1498Szrj / total_num_times_called);
152938fd1498Szrj fprintf (dump_file, "Total number of branches: %d\n",
153038fd1498Szrj total_num_branches);
153138fd1498Szrj if (total_num_branches)
153238fd1498Szrj {
153338fd1498Szrj int i;
153438fd1498Szrj
153538fd1498Szrj for (i = 0; i < 10; i++)
153638fd1498Szrj fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
153738fd1498Szrj (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
153838fd1498Szrj / total_num_branches, 5*i, 5*i+5);
153938fd1498Szrj }
154038fd1498Szrj }
154138fd1498Szrj }
1542