1 /* Classes for saving, deduplicating, and emitting analyzer diagnostics.
2 Copyright (C) 2019-2020 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "pretty-print.h"
26 #include "gcc-rich-location.h"
27 #include "gimple-pretty-print.h"
28 #include "function.h"
29 #include "diagnostic-core.h"
30 #include "diagnostic-event-id.h"
31 #include "diagnostic-path.h"
32 #include "alloc-pool.h"
33 #include "fibonacci_heap.h"
34 #include "shortest-paths.h"
35 #include "sbitmap.h"
36 #include "bitmap.h"
37 #include "tristate.h"
38 #include "selftest.h"
39 #include "ordered-hash-map.h"
40 #include "analyzer/analyzer.h"
41 #include "analyzer/analyzer-logging.h"
42 #include "analyzer/sm.h"
43 #include "analyzer/pending-diagnostic.h"
44 #include "analyzer/diagnostic-manager.h"
45 #include "analyzer/region-model.h"
46 #include "analyzer/constraint-manager.h"
47 #include "cfg.h"
48 #include "basic-block.h"
49 #include "gimple.h"
50 #include "gimple-iterator.h"
51 #include "cgraph.h"
52 #include "digraph.h"
53 #include "analyzer/supergraph.h"
54 #include "analyzer/call-string.h"
55 #include "analyzer/program-point.h"
56 #include "analyzer/program-state.h"
57 #include "analyzer/exploded-graph.h"
58 #include "analyzer/checker-path.h"
59 #include "analyzer/reachability.h"
60
61 #if ENABLE_ANALYZER
62
63 namespace ana {
64
65 /* class saved_diagnostic. */
66
67 /* saved_diagnostic's ctor.
68 Take ownership of D and STMT_FINDER. */
69
saved_diagnostic(const state_machine * sm,const exploded_node * enode,const supernode * snode,const gimple * stmt,stmt_finder * stmt_finder,tree var,state_machine::state_t state,pending_diagnostic * d)70 saved_diagnostic::saved_diagnostic (const state_machine *sm,
71 const exploded_node *enode,
72 const supernode *snode, const gimple *stmt,
73 stmt_finder *stmt_finder,
74 tree var, state_machine::state_t state,
75 pending_diagnostic *d)
76 : m_sm (sm), m_enode (enode), m_snode (snode), m_stmt (stmt),
77 /* stmt_finder could be on-stack; we want our own copy that can
78 outlive that. */
79 m_stmt_finder (stmt_finder ? stmt_finder->clone () : NULL),
80 m_var (var), m_state (state),
81 m_d (d), m_trailing_eedge (NULL),
82 m_status (STATUS_NEW), m_epath_length (0), m_problem (NULL)
83 {
84 gcc_assert (m_stmt || m_stmt_finder);
85
86 /* We must have an enode in order to be able to look for paths
87 through the exploded_graph to this diagnostic. */
88 gcc_assert (m_enode);
89 }
90
91 /* saved_diagnostic's dtor. */
92
~saved_diagnostic()93 saved_diagnostic::~saved_diagnostic ()
94 {
95 delete m_stmt_finder;
96 delete m_d;
97 delete m_problem;
98 }
99
100 bool
operator ==(const saved_diagnostic & other) const101 saved_diagnostic::operator== (const saved_diagnostic &other) const
102 {
103 return (m_sm == other.m_sm
104 /* We don't compare m_enode. */
105 && m_snode == other.m_snode
106 && m_stmt == other.m_stmt
107 /* We don't compare m_stmt_finder. */
108 && pending_diagnostic::same_tree_p (m_var, other.m_var)
109 && m_state == other.m_state
110 && m_d->equal_p (*other.m_d)
111 && m_trailing_eedge == other.m_trailing_eedge);
112 }
113
114 /* State for building a checker_path from a particular exploded_path.
115 In particular, this precomputes reachability information: the set of
116 source enodes for which a path be found to the diagnostic enode. */
117
118 class path_builder
119 {
120 public:
path_builder(const exploded_graph & eg,const exploded_path & epath)121 path_builder (const exploded_graph &eg,
122 const exploded_path &epath)
123 : m_eg (eg),
124 m_diag_enode (epath.get_final_enode ()),
125 m_reachability (eg, m_diag_enode)
126 {}
127
get_diag_node() const128 const exploded_node *get_diag_node () const { return m_diag_enode; }
129
reachable_from_p(const exploded_node * src_enode) const130 bool reachable_from_p (const exploded_node *src_enode) const
131 {
132 return m_reachability.reachable_from_p (src_enode);
133 }
134
get_ext_state() const135 const extrinsic_state &get_ext_state () const { return m_eg.get_ext_state (); }
136
137 private:
138 typedef reachability<eg_traits> enode_reachability;
139
140 const exploded_graph &m_eg;
141
142 /* The enode where the diagnostic occurs. */
143 const exploded_node *m_diag_enode;
144
145 /* Precompute all enodes from which the diagnostic is reachable. */
146 enode_reachability m_reachability;
147 };
148
149 /* class diagnostic_manager. */
150
151 /* diagnostic_manager's ctor. */
152
diagnostic_manager(logger * logger,int verbosity)153 diagnostic_manager::diagnostic_manager (logger *logger, int verbosity)
154 : log_user (logger), m_verbosity (verbosity)
155 {
156 }
157
158 /* Queue pending_diagnostic D at ENODE for later emission. */
159
160 void
add_diagnostic(const state_machine * sm,const exploded_node * enode,const supernode * snode,const gimple * stmt,stmt_finder * finder,tree var,state_machine::state_t state,pending_diagnostic * d)161 diagnostic_manager::add_diagnostic (const state_machine *sm,
162 const exploded_node *enode,
163 const supernode *snode, const gimple *stmt,
164 stmt_finder *finder,
165 tree var, state_machine::state_t state,
166 pending_diagnostic *d)
167 {
168 LOG_FUNC (get_logger ());
169
170 /* We must have an enode in order to be able to look for paths
171 through the exploded_graph to the diagnostic. */
172 gcc_assert (enode);
173
174 saved_diagnostic *sd
175 = new saved_diagnostic (sm, enode, snode, stmt, finder, var, state, d);
176 m_saved_diagnostics.safe_push (sd);
177 if (get_logger ())
178 log ("adding saved diagnostic %i at SN %i: %qs",
179 m_saved_diagnostics.length () - 1,
180 snode->m_index, d->get_kind ());
181 }
182
183 /* Queue pending_diagnostic D at ENODE for later emission. */
184
185 void
add_diagnostic(const exploded_node * enode,const supernode * snode,const gimple * stmt,stmt_finder * finder,pending_diagnostic * d)186 diagnostic_manager::add_diagnostic (const exploded_node *enode,
187 const supernode *snode, const gimple *stmt,
188 stmt_finder *finder,
189 pending_diagnostic *d)
190 {
191 gcc_assert (enode);
192 add_diagnostic (NULL, enode, snode, stmt, finder, NULL_TREE, 0, d);
193 }
194
195 /* A class for identifying sets of duplicated pending_diagnostic.
196
197 We want to find the simplest dedupe_candidate amongst those that share a
198 dedupe_key. */
199
200 class dedupe_key
201 {
202 public:
dedupe_key(const saved_diagnostic & sd,const exploded_path & epath)203 dedupe_key (const saved_diagnostic &sd,
204 const exploded_path &epath)
205 : m_sd (sd), m_stmt (sd.m_stmt)
206 {
207 /* Support deferring the choice of stmt until after an emission path has
208 been built, using an optional stmt_finder. */
209 if (m_stmt == NULL)
210 {
211 gcc_assert (sd.m_stmt_finder);
212 m_stmt = sd.m_stmt_finder->find_stmt (epath);
213 }
214 gcc_assert (m_stmt);
215 }
216
hash() const217 hashval_t hash () const
218 {
219 inchash::hash hstate;
220 hstate.add_ptr (m_stmt);
221 // TODO: m_sd
222 return hstate.end ();
223 }
operator ==(const dedupe_key & other) const224 bool operator== (const dedupe_key &other) const
225 {
226 return (m_sd == other.m_sd
227 && m_stmt == other.m_stmt);
228 }
229
get_location() const230 location_t get_location () const
231 {
232 return m_stmt->location;
233 }
234
235 /* A qsort comparator for use by dedupe_winners::emit_best
236 to sort them into location_t order. */
237
238 static int
comparator(const void * p1,const void * p2)239 comparator (const void *p1, const void *p2)
240 {
241 const dedupe_key *pk1 = *(const dedupe_key * const *)p1;
242 const dedupe_key *pk2 = *(const dedupe_key * const *)p2;
243
244 location_t loc1 = pk1->get_location ();
245 location_t loc2 = pk2->get_location ();
246
247 return linemap_compare_locations (line_table, loc2, loc1);
248 }
249
250 const saved_diagnostic &m_sd;
251 const gimple *m_stmt;
252 };
253
254 /* The value of a slot for a dedupe_key within dedupe_winners:
255 the exploded_path for the best candidate for that key, and the
256 number of duplicates seen so far. */
257
258 class dedupe_candidate
259 {
260 public:
261 // has the exploded_path
dedupe_candidate(const shortest_exploded_paths & sp,saved_diagnostic * sd)262 dedupe_candidate (const shortest_exploded_paths &sp,
263 saved_diagnostic *sd)
264 : m_epath (sp.get_shortest_path (sd->m_enode)),
265 m_num_dupes (0)
266 {
267 }
268
length() const269 unsigned length () const { return m_epath.length (); }
get_path() const270 const exploded_path &get_path () const { return m_epath; }
271
add_duplicate()272 void add_duplicate () { m_num_dupes++; }
get_num_dupes() const273 int get_num_dupes () const { return m_num_dupes; }
274
275 private:
276 exploded_path m_epath;
277 public:
278 int m_num_dupes;
279 };
280
281 /* Traits for use by dedupe_winners. */
282
283 class dedupe_hash_map_traits
284 {
285 public:
286 typedef const dedupe_key *key_type;
287 typedef dedupe_candidate *value_type;
288 typedef dedupe_candidate *compare_type;
289
hash(const key_type & v)290 static inline hashval_t hash (const key_type &v)
291 {
292 return v->hash ();
293 }
equal_keys(const key_type & k1,const key_type & k2)294 static inline bool equal_keys (const key_type &k1, const key_type &k2)
295 {
296 return *k1 == *k2;
297 }
298 template <typename T>
remove(T &)299 static inline void remove (T &)
300 {
301 // TODO
302 }
303 template <typename T>
mark_deleted(T & entry)304 static inline void mark_deleted (T &entry)
305 {
306 entry.m_key = reinterpret_cast<key_type> (1);
307 }
308 template <typename T>
mark_empty(T & entry)309 static inline void mark_empty (T &entry)
310 {
311 entry.m_key = NULL;
312 }
313 template <typename T>
is_deleted(const T & entry)314 static inline bool is_deleted (const T &entry)
315 {
316 return entry.m_key == reinterpret_cast<key_type> (1);
317 }
318 template <typename T>
is_empty(const T & entry)319 static inline bool is_empty (const T &entry)
320 {
321 return entry.m_key == NULL;
322 }
323 static const bool empty_zero_p = true;
324 };
325
326 /* A class for deduplicating diagnostics and finding (and emitting) the
327 best diagnostic within each partition. */
328
329 class dedupe_winners
330 {
331 public:
~dedupe_winners()332 ~dedupe_winners ()
333 {
334 /* Delete all keys and candidates. */
335 for (map_t::iterator iter = m_map.begin ();
336 iter != m_map.end ();
337 ++iter)
338 {
339 delete (*iter).first;
340 delete (*iter).second;
341 }
342 }
343
344 /* Determine an exploded_path for SD using SP and, if it's feasible,
345 determine if it's the best seen so far for its dedupe_key.
346 Retain the winner for each dedupe_key, and discard the rest. */
347
add(logger * logger,const shortest_exploded_paths & sp,saved_diagnostic * sd)348 void add (logger *logger,
349 const shortest_exploded_paths &sp,
350 saved_diagnostic *sd)
351 {
352 /* Build a dedupe_candidate for SD.
353 This uses SP to build an exploded_path. */
354 dedupe_candidate *dc = new dedupe_candidate (sp, sd);
355
356 sd->set_epath_length (dc->length ());
357
358 /* Verify that the epath is feasible.
359 State-merging means that not every path in the epath corresponds
360 to a feasible one w.r.t. states.
361 Here we simply check each duplicate saved_diagnostic's
362 shortest_path, and reject any that aren't feasible.
363 This could introduce false negatives, as there could be longer
364 feasible paths within the egraph. */
365 if (logger)
366 logger->log ("considering %qs at EN: %i, SN: %i",
367 sd->m_d->get_kind (), sd->m_enode->m_index,
368 sd->m_snode->m_index);
369
370 feasibility_problem *p = NULL;
371 if (!dc->get_path ().feasible_p (logger, &p))
372 {
373 if (logger)
374 logger->log ("rejecting %qs at EN: %i, SN: %i"
375 " due to infeasible path",
376 sd->m_d->get_kind (), sd->m_enode->m_index,
377 sd->m_snode->m_index);
378 sd->set_infeasible (p);
379 delete dc;
380 return;
381 }
382 else
383 if (logger)
384 logger->log ("accepting %qs at EN: %i, SN: %i with feasible path",
385 sd->m_d->get_kind (), sd->m_enode->m_index,
386 sd->m_snode->m_index);
387
388 sd->set_feasible ();
389
390 dedupe_key *key = new dedupe_key (*sd, dc->get_path ());
391 if (dedupe_candidate **slot = m_map.get (key))
392 {
393 if (logger)
394 logger->log ("already have this dedupe_key");
395
396 (*slot)->add_duplicate ();
397
398 if (dc->length () < (*slot)->length ())
399 {
400 /* We've got a shorter path for the key; replace
401 the current candidate. */
402 if (logger)
403 logger->log ("length %i is better than existing length %i;"
404 " taking over this dedupe_key",
405 dc->length (), (*slot)->length ());
406 dc->m_num_dupes = (*slot)->get_num_dupes ();
407 delete *slot;
408 *slot = dc;
409 }
410 else
411 /* We haven't beaten the current best candidate;
412 drop the new candidate. */
413 {
414 if (logger)
415 logger->log ("length %i isn't better than existing length %i;"
416 " dropping this candidate",
417 dc->length (), (*slot)->length ());
418 delete dc;
419 }
420 delete key;
421 }
422 else
423 {
424 /* This is the first candidate for this key. */
425 m_map.put (key, dc);
426 if (logger)
427 logger->log ("first candidate for this dedupe_key");
428 }
429 }
430
431 /* Emit the simplest diagnostic within each set. */
432
emit_best(diagnostic_manager * dm,const exploded_graph & eg)433 void emit_best (diagnostic_manager *dm,
434 const exploded_graph &eg)
435 {
436 LOG_SCOPE (dm->get_logger ());
437
438 /* Get keys into a vec for sorting. */
439 auto_vec<const dedupe_key *> keys (m_map.elements ());
440 for (map_t::iterator iter = m_map.begin ();
441 iter != m_map.end ();
442 ++iter)
443 keys.quick_push ((*iter).first);
444
445 dm->log ("# keys after de-duplication: %i", keys.length ());
446
447 /* Sort into a good emission order. */
448 keys.qsort (dedupe_key::comparator);
449
450 /* Emit the best candidate for each key. */
451 int i;
452 const dedupe_key *key;
453 FOR_EACH_VEC_ELT (keys, i, key)
454 {
455 dedupe_candidate **slot = m_map.get (key);
456 gcc_assert (*slot);
457 const dedupe_candidate &dc = **slot;
458
459 dm->emit_saved_diagnostic (eg, key->m_sd,
460 dc.get_path (), key->m_stmt,
461 dc.get_num_dupes ());
462 }
463 }
464
465 private:
466
467 /* This maps from each dedupe_key to a current best dedupe_candidate. */
468
469 typedef hash_map<const dedupe_key *, dedupe_candidate *,
470 dedupe_hash_map_traits> map_t;
471 map_t m_map;
472 };
473
474 /* Emit all saved diagnostics. */
475
476 void
emit_saved_diagnostics(const exploded_graph & eg)477 diagnostic_manager::emit_saved_diagnostics (const exploded_graph &eg)
478 {
479 LOG_SCOPE (get_logger ());
480 auto_timevar tv (TV_ANALYZER_DIAGNOSTICS);
481 log ("# saved diagnostics: %i", m_saved_diagnostics.length ());
482 if (get_logger ())
483 {
484 unsigned i;
485 saved_diagnostic *sd;
486 FOR_EACH_VEC_ELT (m_saved_diagnostics, i, sd)
487 log ("[%i] sd: %qs at EN: %i, SN: %i",
488 i, sd->m_d->get_kind (), sd->m_enode->m_index,
489 sd->m_snode->m_index);
490 }
491
492 if (m_saved_diagnostics.length () == 0)
493 return;
494
495 /* Compute the shortest_paths once, sharing it between all diagnostics. */
496 shortest_exploded_paths sp (eg, eg.get_origin ());
497
498 /* Iterate through all saved diagnostics, adding them to a dedupe_winners
499 instance. This partitions the saved diagnostics by dedupe_key,
500 generating exploded_paths for them, and retaining the best one in each
501 partition. */
502 dedupe_winners best_candidates;
503
504 int i;
505 saved_diagnostic *sd;
506 FOR_EACH_VEC_ELT (m_saved_diagnostics, i, sd)
507 best_candidates.add (get_logger (), sp, sd);
508
509 /* For each dedupe-key, call emit_saved_diagnostic on the "best"
510 saved_diagnostic. */
511 best_candidates.emit_best (this, eg);
512 }
513
514 /* Given a saved_diagnostic SD at STMT with feasible path EPATH through EG,
515 create an checker_path of suitable events and use it to call
516 SD's underlying pending_diagnostic "emit" vfunc to emit a diagnostic. */
517
518 void
emit_saved_diagnostic(const exploded_graph & eg,const saved_diagnostic & sd,const exploded_path & epath,const gimple * stmt,int num_dupes)519 diagnostic_manager::emit_saved_diagnostic (const exploded_graph &eg,
520 const saved_diagnostic &sd,
521 const exploded_path &epath,
522 const gimple *stmt,
523 int num_dupes)
524 {
525 LOG_SCOPE (get_logger ());
526 log ("sd: %qs at SN: %i", sd.m_d->get_kind (), sd.m_snode->m_index);
527 log ("num dupes: %i", num_dupes);
528
529 pretty_printer *pp = global_dc->printer->clone ();
530
531 /* Precompute all enodes from which the diagnostic is reachable. */
532 path_builder pb (eg, epath);
533
534 /* This is the diagnostic_path subclass that will be built for
535 the diagnostic. */
536 checker_path emission_path;
537
538 /* Populate emission_path with a full description of EPATH. */
539 build_emission_path (pb, epath, &emission_path);
540
541 /* Now prune it to just cover the most pertinent events. */
542 prune_path (&emission_path, sd.m_sm, sd.m_var, sd.m_state);
543
544 /* Add a final event to the path, covering the diagnostic itself.
545 We use the final enode from the epath, which might be different from
546 the sd.m_enode, as the dedupe code doesn't care about enodes, just
547 snodes. */
548 emission_path.add_final_event (sd.m_sm, epath.get_final_enode (), stmt,
549 sd.m_var, sd.m_state);
550
551 /* The "final" event might not be final; if the saved_diagnostic has a
552 trailing eedge stashed, add any events for it. This is for use
553 in handling longjmp, to show where a longjmp is rewinding to. */
554 if (sd.m_trailing_eedge)
555 add_events_for_eedge (pb, *sd.m_trailing_eedge, &emission_path);
556
557 emission_path.prepare_for_emission (sd.m_d);
558
559 gcc_rich_location rich_loc (stmt->location);
560 rich_loc.set_path (&emission_path);
561
562 auto_diagnostic_group d;
563 auto_cfun sentinel (sd.m_snode->m_fun);
564 if (sd.m_d->emit (&rich_loc))
565 {
566 if (flag_analyzer_show_duplicate_count && num_dupes > 0)
567 inform_n (stmt->location, num_dupes,
568 "%i duplicate", "%i duplicates",
569 num_dupes);
570 }
571 delete pp;
572 }
573
574 /* Given a state change to DST_REP, determine a tree that gives the origin
575 of that state at STMT, using DST_STATE's region model, so that state
576 changes based on assignments can be tracked back to their origins.
577
578 For example, if we have
579
580 (S1) _1 = malloc (64);
581 (S2) EXPR = _1;
582
583 then at stmt S2 we can get the origin of EXPR's state as being _1,
584 and thus track the allocation back to S1. */
585
586 static tree
get_any_origin(const gimple * stmt,tree dst_rep,const program_state & dst_state)587 get_any_origin (const gimple *stmt,
588 tree dst_rep,
589 const program_state &dst_state)
590 {
591 if (!stmt)
592 return NULL_TREE;
593
594 gcc_assert (dst_rep);
595
596 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
597 {
598 tree lhs = gimple_assign_lhs (assign);
599 /* Use region IDs to compare lhs with DST_REP, bulletproofing against
600 cases where they can't have lvalues by using
601 tentative_region_model_context. */
602 tentative_region_model_context ctxt;
603 region_id lhs_rid = dst_state.m_region_model->get_lvalue (lhs, &ctxt);
604 region_id dst_rep_rid
605 = dst_state.m_region_model->get_lvalue (dst_rep, &ctxt);
606 if (lhs_rid == dst_rep_rid && !ctxt.had_errors_p ())
607 {
608 tree rhs1 = gimple_assign_rhs1 (assign);
609 enum tree_code op = gimple_assign_rhs_code (assign);
610 switch (op)
611 {
612 default:
613 //gcc_unreachable (); // TODO
614 break;
615 case COMPONENT_REF:
616 case SSA_NAME:
617 return rhs1;
618 }
619 }
620 }
621 return NULL_TREE;
622 }
623
624 /* Emit a "path" of events to EMISSION_PATH describing the exploded path
625 EPATH within EG. */
626
627 void
build_emission_path(const path_builder & pb,const exploded_path & epath,checker_path * emission_path) const628 diagnostic_manager::build_emission_path (const path_builder &pb,
629 const exploded_path &epath,
630 checker_path *emission_path) const
631 {
632 LOG_SCOPE (get_logger ());
633 for (unsigned i = 0; i < epath.m_edges.length (); i++)
634 {
635 const exploded_edge *eedge = epath.m_edges[i];
636 add_events_for_eedge (pb, *eedge, emission_path);
637 }
638 }
639
640 /* Subclass of state_change_visitor that creates state_change_event
641 instances. */
642
643 class state_change_event_creator : public state_change_visitor
644 {
645 public:
state_change_event_creator(const exploded_edge & eedge,checker_path * emission_path)646 state_change_event_creator (const exploded_edge &eedge,
647 checker_path *emission_path)
648 : m_eedge (eedge),
649 m_emission_path (emission_path)
650 {}
651
on_global_state_change(const state_machine & sm,state_machine::state_t src_sm_val,state_machine::state_t dst_sm_val)652 bool on_global_state_change (const state_machine &sm,
653 state_machine::state_t src_sm_val,
654 state_machine::state_t dst_sm_val)
655 FINAL OVERRIDE
656 {
657 const exploded_node *src_node = m_eedge.m_src;
658 const program_point &src_point = src_node->get_point ();
659 const int src_stack_depth = src_point.get_stack_depth ();
660 const exploded_node *dst_node = m_eedge.m_dest;
661 const gimple *stmt = src_point.get_stmt ();
662 const supernode *supernode = src_point.get_supernode ();
663 const program_state &dst_state = dst_node->get_state ();
664
665 int stack_depth = src_stack_depth;
666
667 m_emission_path->add_event (new state_change_event (supernode,
668 stmt,
669 stack_depth,
670 sm,
671 NULL_TREE,
672 src_sm_val,
673 dst_sm_val,
674 NULL_TREE,
675 dst_state));
676 return false;
677 }
678
on_state_change(const state_machine & sm,state_machine::state_t src_sm_val,state_machine::state_t dst_sm_val,tree dst_rep,svalue_id dst_origin_sid)679 bool on_state_change (const state_machine &sm,
680 state_machine::state_t src_sm_val,
681 state_machine::state_t dst_sm_val,
682 tree dst_rep,
683 svalue_id dst_origin_sid) FINAL OVERRIDE
684 {
685 const exploded_node *src_node = m_eedge.m_src;
686 const program_point &src_point = src_node->get_point ();
687 const int src_stack_depth = src_point.get_stack_depth ();
688 const exploded_node *dst_node = m_eedge.m_dest;
689 const gimple *stmt = src_point.get_stmt ();
690 const supernode *supernode = src_point.get_supernode ();
691 const program_state &dst_state = dst_node->get_state ();
692
693 int stack_depth = src_stack_depth;
694
695 if (m_eedge.m_sedge
696 && m_eedge.m_sedge->m_kind == SUPEREDGE_CFG_EDGE)
697 {
698 supernode = src_point.get_supernode ();
699 stmt = supernode->get_last_stmt ();
700 stack_depth = src_stack_depth;
701 }
702
703 /* Bulletproofing for state changes at calls/returns;
704 TODO: is there a better way? */
705 if (!stmt)
706 return false;
707
708 tree origin_rep
709 = dst_state.get_representative_tree (dst_origin_sid);
710
711 if (origin_rep == NULL_TREE)
712 origin_rep = get_any_origin (stmt, dst_rep, dst_state);
713 m_emission_path->add_event (new state_change_event (supernode,
714 stmt,
715 stack_depth,
716 sm,
717 dst_rep,
718 src_sm_val,
719 dst_sm_val,
720 origin_rep,
721 dst_state));
722 return false;
723 }
724
725 const exploded_edge &m_eedge;
726 checker_path *m_emission_path;
727 };
728
729 /* Compare SRC_STATE and DST_STATE (which use EXT_STATE), and call
730 VISITOR's on_state_change for every sm-state change that occurs
731 to a tree, and on_global_state_change for every global state change
732 that occurs.
733
734 This determines the state changes that ought to be reported to
735 the user: a combination of the effects of changes to sm_state_map
736 (which maps svalues to sm-states), and of region_model changes
737 (which map trees to svalues).
738
739 Bail out early and return true if any call to on_global_state_change
740 or on_state_change returns true, otherwise return false.
741
742 This is split out to make it easier to experiment with changes to
743 exploded_node granularity (so that we can observe what state changes
744 lead to state_change_events being emitted). */
745
746 bool
for_each_state_change(const program_state & src_state,const program_state & dst_state,const extrinsic_state & ext_state,state_change_visitor * visitor)747 for_each_state_change (const program_state &src_state,
748 const program_state &dst_state,
749 const extrinsic_state &ext_state,
750 state_change_visitor *visitor)
751 {
752 gcc_assert (src_state.m_checker_states.length ()
753 == ext_state.get_num_checkers ());
754 gcc_assert (dst_state.m_checker_states.length ()
755 == ext_state.get_num_checkers ());
756 for (unsigned i = 0; i < ext_state.get_num_checkers (); i++)
757 {
758 const state_machine &sm = ext_state.get_sm (i);
759 const sm_state_map &src_smap = *src_state.m_checker_states[i];
760 const sm_state_map &dst_smap = *dst_state.m_checker_states[i];
761
762 /* Add events for any global state changes. */
763 if (src_smap.get_global_state () != dst_smap.get_global_state ())
764 if (visitor->on_global_state_change (sm,
765 src_smap.get_global_state (),
766 dst_smap.get_global_state ()))
767 return true;
768
769 /* Add events for per-svalue state changes. */
770 for (sm_state_map::iterator_t iter = dst_smap.begin ();
771 iter != dst_smap.end ();
772 ++iter)
773 {
774 /* Ideally we'd directly compare the SM state between src state
775 and dst state, but there's no guarantee that the IDs can
776 be meaningfully compared. */
777 svalue_id dst_sid = (*iter).first;
778 state_machine::state_t dst_sm_val = (*iter).second.m_state;
779
780 auto_vec<path_var> dst_pvs;
781 dst_state.m_region_model->get_path_vars_for_svalue (dst_sid,
782 &dst_pvs);
783
784 unsigned j;
785 path_var *dst_pv;
786 FOR_EACH_VEC_ELT (dst_pvs, j, dst_pv)
787 {
788 tree dst_rep = dst_pv->m_tree;
789 gcc_assert (dst_rep);
790 if (dst_pv->m_stack_depth
791 >= src_state.m_region_model->get_stack_depth ())
792 continue;
793 tentative_region_model_context ctxt;
794 svalue_id src_sid
795 = src_state.m_region_model->get_rvalue (*dst_pv, &ctxt);
796 if (src_sid.null_p () || ctxt.had_errors_p ())
797 continue;
798 state_machine::state_t src_sm_val = src_smap.get_state (src_sid);
799 if (dst_sm_val != src_sm_val)
800 {
801 svalue_id dst_origin_sid = (*iter).second.m_origin;
802 if (visitor->on_state_change (sm, src_sm_val, dst_sm_val,
803 dst_rep, dst_origin_sid))
804 return true;
805 }
806 }
807 }
808 }
809 return false;
810 }
811
812 /* Subroutine of diagnostic_manager::build_emission_path.
813 Add any events for EEDGE to EMISSION_PATH. */
814
815 void
add_events_for_eedge(const path_builder & pb,const exploded_edge & eedge,checker_path * emission_path) const816 diagnostic_manager::add_events_for_eedge (const path_builder &pb,
817 const exploded_edge &eedge,
818 checker_path *emission_path) const
819 {
820 const exploded_node *src_node = eedge.m_src;
821 const program_point &src_point = src_node->get_point ();
822 const exploded_node *dst_node = eedge.m_dest;
823 const program_point &dst_point = dst_node->get_point ();
824 const int dst_stack_depth = dst_point.get_stack_depth ();
825 if (get_logger ())
826 {
827 get_logger ()->start_log_line ();
828 pretty_printer *pp = get_logger ()->get_printer ();
829 pp_printf (pp, "EN %i -> EN %i: ",
830 eedge.m_src->m_index,
831 eedge.m_dest->m_index);
832 src_point.print (pp, format (false));
833 pp_string (pp, "-> ");
834 dst_point.print (pp, format (false));
835 get_logger ()->end_log_line ();
836 }
837 const program_state &src_state = src_node->get_state ();
838 const program_state &dst_state = dst_node->get_state ();
839
840 /* Add state change events for the states that have changed.
841 We add these before events for superedges, so that if we have a
842 state_change_event due to following an edge, we'll get this sequence
843 of events:
844
845 | if (!ptr)
846 | ~
847 | |
848 | (1) assuming 'ptr' is non-NULL (state_change_event)
849 | (2) following 'false' branch... (start_cfg_edge_event)
850 ...
851 | do_something (ptr);
852 | ~~~~~~~~~~~~~^~~~~
853 | |
854 | (3) ...to here (end_cfg_edge_event). */
855 state_change_event_creator visitor (eedge, emission_path);
856 for_each_state_change (src_state, dst_state, pb.get_ext_state (),
857 &visitor);
858
859 /* Allow non-standard edges to add events, e.g. when rewinding from
860 longjmp to a setjmp. */
861 if (eedge.m_custom_info)
862 eedge.m_custom_info->add_events_to_path (emission_path, eedge);
863
864 /* Add events for superedges, function entries, and for statements. */
865 switch (dst_point.get_kind ())
866 {
867 default:
868 break;
869 case PK_BEFORE_SUPERNODE:
870 if (src_point.get_kind () == PK_AFTER_SUPERNODE)
871 {
872 if (eedge.m_sedge)
873 add_events_for_superedge (pb, eedge, emission_path);
874 }
875 /* Add function entry events. */
876 if (dst_point.get_supernode ()->entry_p ())
877 {
878 emission_path->add_event
879 (new function_entry_event
880 (dst_point.get_supernode ()->get_start_location (),
881 dst_point.get_fndecl (),
882 dst_stack_depth));
883 }
884 break;
885 case PK_BEFORE_STMT:
886 {
887 const gimple *stmt = dst_point.get_stmt ();
888 const gcall *call = dyn_cast <const gcall *> (stmt);
889 if (call && is_setjmp_call_p (call))
890 emission_path->add_event
891 (new setjmp_event (stmt->location,
892 dst_node,
893 dst_point.get_fndecl (),
894 dst_stack_depth,
895 call));
896 else
897 emission_path->add_event
898 (new statement_event (stmt,
899 dst_point.get_fndecl (),
900 dst_stack_depth, dst_state));
901 }
902 break;
903 }
904 }
905
906 /* Return true if EEDGE is a significant edge in the path to the diagnostic
907 for PB.
908
909 Consider all of the sibling out-eedges from the same source enode
910 as EEDGE.
911 If there's no path from the destinations of those eedges to the
912 diagnostic enode, then we have to take this eedge and thus it's
913 significant.
914
915 Conversely if there is a path from the destination of any other sibling
916 eedge to the diagnostic enode, then this edge is insignificant.
917
918 Example 1: redundant if-else:
919
920 (A) if (...) A
921 (B) ... / \
922 else B C
923 (C) ... \ /
924 (D) [DIAGNOSTIC] D
925
926 D is reachable by either B or C, so neither of these edges
927 are significant.
928
929 Example 2: pertinent if-else:
930
931 (A) if (...) A
932 (B) ... / \
933 else B C
934 (C) [NECESSARY CONDITION] | |
935 (D) [POSSIBLE DIAGNOSTIC] D1 D2
936
937 D becomes D1 and D2 in the exploded graph, where the diagnostic occurs
938 at D2. D2 is only reachable via C, so the A -> C edge is significant.
939
940 Example 3: redundant loop:
941
942 (A) while (...) +-->A
943 (B) ... | / \
944 (C) ... +-B C
945 (D) [DIAGNOSTIC] |
946 D
947
948 D is reachable from both B and C, so the A->C edge is not significant. */
949
950 bool
significant_edge_p(const path_builder & pb,const exploded_edge & eedge) const951 diagnostic_manager::significant_edge_p (const path_builder &pb,
952 const exploded_edge &eedge) const
953 {
954 int i;
955 exploded_edge *sibling;
956 FOR_EACH_VEC_ELT (eedge.m_src->m_succs, i, sibling)
957 {
958 if (sibling == &eedge)
959 continue;
960 if (pb.reachable_from_p (sibling->m_dest))
961 {
962 if (get_logger ())
963 get_logger ()->log (" edge EN: %i -> EN: %i is insignificant as"
964 " EN: %i is also reachable via"
965 " EN: %i -> EN: %i",
966 eedge.m_src->m_index, eedge.m_dest->m_index,
967 pb.get_diag_node ()->m_index,
968 sibling->m_src->m_index,
969 sibling->m_dest->m_index);
970 return false;
971 }
972 }
973
974 return true;
975 }
976
977 /* Subroutine of diagnostic_manager::add_events_for_eedge
978 where EEDGE has an underlying superedge i.e. a CFG edge,
979 or an interprocedural call/return.
980 Add any events for the superedge to EMISSION_PATH. */
981
982 void
add_events_for_superedge(const path_builder & pb,const exploded_edge & eedge,checker_path * emission_path) const983 diagnostic_manager::add_events_for_superedge (const path_builder &pb,
984 const exploded_edge &eedge,
985 checker_path *emission_path)
986 const
987 {
988 gcc_assert (eedge.m_sedge);
989
990 /* Don't add events for insignificant edges at verbosity levels below 3. */
991 if (m_verbosity < 3)
992 if (!significant_edge_p (pb, eedge))
993 return;
994
995 const exploded_node *src_node = eedge.m_src;
996 const program_point &src_point = src_node->get_point ();
997 const exploded_node *dst_node = eedge.m_dest;
998 const program_point &dst_point = dst_node->get_point ();
999 const int src_stack_depth = src_point.get_stack_depth ();
1000 const int dst_stack_depth = dst_point.get_stack_depth ();
1001 const gimple *last_stmt = src_point.get_supernode ()->get_last_stmt ();
1002
1003 switch (eedge.m_sedge->m_kind)
1004 {
1005 case SUPEREDGE_CFG_EDGE:
1006 {
1007 emission_path->add_event
1008 (new start_cfg_edge_event (eedge,
1009 (last_stmt
1010 ? last_stmt->location
1011 : UNKNOWN_LOCATION),
1012 src_point.get_fndecl (),
1013 src_stack_depth));
1014 emission_path->add_event
1015 (new end_cfg_edge_event (eedge,
1016 dst_point.get_supernode ()->get_start_location (),
1017 dst_point.get_fndecl (),
1018 dst_stack_depth));
1019 }
1020 break;
1021
1022 case SUPEREDGE_CALL:
1023 {
1024 emission_path->add_event
1025 (new call_event (eedge,
1026 (last_stmt
1027 ? last_stmt->location
1028 : UNKNOWN_LOCATION),
1029 src_point.get_fndecl (),
1030 src_stack_depth));
1031 }
1032 break;
1033
1034 case SUPEREDGE_INTRAPROCEDURAL_CALL:
1035 {
1036 /* TODO: add a subclass for this, or generate events for the
1037 summary. */
1038 emission_path->add_event
1039 (new debug_event ((last_stmt
1040 ? last_stmt->location
1041 : UNKNOWN_LOCATION),
1042 src_point.get_fndecl (),
1043 src_stack_depth,
1044 "call summary"));
1045 }
1046 break;
1047
1048 case SUPEREDGE_RETURN:
1049 {
1050 const return_superedge *return_edge
1051 = as_a <const return_superedge *> (eedge.m_sedge);
1052
1053 const gcall *call_stmt = return_edge->get_call_stmt ();
1054 emission_path->add_event
1055 (new return_event (eedge,
1056 (call_stmt
1057 ? call_stmt->location
1058 : UNKNOWN_LOCATION),
1059 dst_point.get_fndecl (),
1060 dst_stack_depth));
1061 }
1062 break;
1063 }
1064 }
1065
1066 /* Prune PATH, based on the verbosity level, to the most pertinent
1067 events for a diagnostic that involves VAR ending in state STATE
1068 (for state machine SM).
1069
1070 PATH is updated in place, and the redundant checker_events are deleted.
1071
1072 As well as deleting events, call record_critical_state on events in
1073 which state critical to the pending_diagnostic is being handled; see
1074 the comment for diagnostic_manager::prune_for_sm_diagnostic. */
1075
1076 void
prune_path(checker_path * path,const state_machine * sm,tree var,state_machine::state_t state) const1077 diagnostic_manager::prune_path (checker_path *path,
1078 const state_machine *sm,
1079 tree var,
1080 state_machine::state_t state) const
1081 {
1082 LOG_FUNC (get_logger ());
1083 path->maybe_log (get_logger (), "path");
1084 prune_for_sm_diagnostic (path, sm, var, state);
1085 prune_interproc_events (path);
1086 finish_pruning (path);
1087 path->maybe_log (get_logger (), "pruned");
1088 }
1089
1090 /* A cheap test to determine if EXPR can be the expression of interest in
1091 an sm-diagnostic, so that we can reject cases where we have a non-lvalue.
1092 We don't have always have a model when calling this, so we can't use
1093 tentative_region_model_context, so there can be false positives. */
1094
1095 static bool
can_be_expr_of_interest_p(tree expr)1096 can_be_expr_of_interest_p (tree expr)
1097 {
1098 if (!expr)
1099 return false;
1100
1101 /* Reject constants. */
1102 if (CONSTANT_CLASS_P (expr))
1103 return false;
1104
1105 /* Otherwise assume that it can be an lvalue. */
1106 return true;
1107 }
1108
1109 /* First pass of diagnostic_manager::prune_path: apply verbosity level,
1110 pruning unrelated state change events.
1111
1112 Iterate backwards through PATH, skipping state change events that aren't
1113 VAR but update the pertinent VAR when state-copying occurs.
1114
1115 As well as deleting events, call record_critical_state on events in
1116 which state critical to the pending_diagnostic is being handled, so
1117 that the event's get_desc vfunc can potentially supply a more precise
1118 description of the event to the user.
1119 e.g. improving
1120 "calling 'foo' from 'bar'"
1121 to
1122 "passing possibly-NULL pointer 'ptr' to 'foo' from 'bar' as param 1"
1123 when the diagnostic relates to later dereferencing 'ptr'. */
1124
1125 void
prune_for_sm_diagnostic(checker_path * path,const state_machine * sm,tree var,state_machine::state_t state) const1126 diagnostic_manager::prune_for_sm_diagnostic (checker_path *path,
1127 const state_machine *sm,
1128 tree var,
1129 state_machine::state_t state) const
1130 {
1131 update_for_unsuitable_sm_exprs (&var);
1132
1133 int idx = path->num_events () - 1;
1134 while (idx >= 0 && idx < (signed)path->num_events ())
1135 {
1136 checker_event *base_event = path->get_checker_event (idx);
1137 if (get_logger ())
1138 {
1139 if (sm)
1140 {
1141 if (var)
1142 log ("considering event %i, with var: %qE, state: %qs",
1143 idx, var, sm->get_state_name (state));
1144 else
1145 log ("considering event %i, with global state: %qs",
1146 idx, sm->get_state_name (state));
1147 }
1148 else
1149 log ("considering event %i", idx);
1150 }
1151 gcc_assert (var == NULL || can_be_expr_of_interest_p (var));
1152 switch (base_event->m_kind)
1153 {
1154 default:
1155 gcc_unreachable ();
1156
1157 case EK_DEBUG:
1158 if (m_verbosity < 4)
1159 {
1160 log ("filtering event %i: debug event", idx);
1161 path->delete_event (idx);
1162 }
1163 break;
1164
1165 case EK_CUSTOM:
1166 /* Don't filter custom events. */
1167 break;
1168
1169 case EK_STMT:
1170 {
1171 /* If this stmt is the origin of "var", update var. */
1172 if (var)
1173 {
1174 statement_event *stmt_event = (statement_event *)base_event;
1175 tree new_var = get_any_origin (stmt_event->m_stmt, var,
1176 stmt_event->m_dst_state);
1177 if (new_var)
1178 {
1179 log ("event %i: switching var of interest from %qE to %qE",
1180 idx, var, new_var);
1181 var = new_var;
1182 }
1183 }
1184 if (m_verbosity < 4)
1185 {
1186 log ("filtering event %i: statement event", idx);
1187 path->delete_event (idx);
1188 }
1189 }
1190 break;
1191
1192 case EK_FUNCTION_ENTRY:
1193 if (m_verbosity < 1)
1194 {
1195 log ("filtering event %i: function entry", idx);
1196 path->delete_event (idx);
1197 }
1198 break;
1199
1200 case EK_STATE_CHANGE:
1201 {
1202 state_change_event *state_change = (state_change_event *)base_event;
1203 /* Use region IDs to compare var with the state_change's m_var,
1204 bulletproofing against cases where they can't have lvalues by
1205 using tentative_region_model_context. */
1206 tentative_region_model_context ctxt;
1207 region_id state_var_rid
1208 = state_change->get_lvalue (state_change->m_var, &ctxt);
1209 region_id var_rid = state_change->get_lvalue (var, &ctxt);
1210 if (state_var_rid == var_rid && !ctxt.had_errors_p ())
1211 {
1212 if (state_change->m_origin)
1213 {
1214 log ("event %i: switching var of interest from %qE to %qE",
1215 idx, var, state_change->m_origin);
1216 var = state_change->m_origin;
1217 update_for_unsuitable_sm_exprs (&var);
1218 }
1219 log ("event %i: switching state of interest from %qs to %qs",
1220 idx, sm->get_state_name (state_change->m_to),
1221 sm->get_state_name (state_change->m_from));
1222 state = state_change->m_from;
1223 }
1224 else if (m_verbosity < 4)
1225 {
1226 if (var)
1227 log ("filtering event %i:"
1228 " state change to %qE unrelated to %qE",
1229 idx, state_change->m_var, var);
1230 else
1231 log ("filtering event %i: state change to %qE",
1232 idx, state_change->m_var);
1233 if (ctxt.had_errors_p ())
1234 log ("context had errors");
1235 path->delete_event (idx);
1236 }
1237 }
1238 break;
1239
1240 case EK_START_CFG_EDGE:
1241 {
1242 cfg_edge_event *event = (cfg_edge_event *)base_event;
1243 const cfg_superedge& cfg_superedge
1244 = event->get_cfg_superedge ();
1245 const supernode *dest = event->m_sedge->m_dest;
1246 /* Do we have an SSA_NAME defined via a phi node in
1247 the dest CFG node? */
1248 if (var && TREE_CODE (var) == SSA_NAME)
1249 if (SSA_NAME_DEF_STMT (var)->bb == dest->m_bb)
1250 {
1251 if (gphi *phi
1252 = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (var)))
1253 {
1254 /* Update var based on its phi node. */
1255 tree old_var = var;
1256 var = cfg_superedge.get_phi_arg (phi);
1257 log ("updating from %qE to %qE based on phi node",
1258 old_var, var);
1259 if (get_logger ())
1260 {
1261 pretty_printer pp;
1262 pp_gimple_stmt_1 (&pp, phi, 0, (dump_flags_t)0);
1263 log (" phi: %s", pp_formatted_text (&pp));
1264 }
1265 /* If we've chosen a bad exploded_path, then the
1266 phi arg might be a constant. Fail gracefully for
1267 this case. */
1268 update_for_unsuitable_sm_exprs (&var);
1269 }
1270 }
1271
1272 /* TODO: is this edge significant to var?
1273 See if var can be in other states in the dest, but not
1274 in other states in the src?
1275 Must have multiple sibling edges. */
1276
1277 if (event->should_filter_p (m_verbosity))
1278 {
1279 log ("filtering event %i: CFG edge", idx);
1280 path->delete_event (idx);
1281 /* Also delete the corresponding EK_END_CFG_EDGE. */
1282 gcc_assert (path->get_checker_event (idx)->m_kind
1283 == EK_END_CFG_EDGE);
1284 path->delete_event (idx);
1285 }
1286 }
1287 break;
1288
1289 case EK_END_CFG_EDGE:
1290 /* These come in pairs with EK_START_CFG_EDGE events and are
1291 filtered when their start event is filtered. */
1292 break;
1293
1294 case EK_CALL_EDGE:
1295 {
1296 call_event *event = (call_event *)base_event;
1297 const callgraph_superedge& cg_superedge
1298 = event->get_callgraph_superedge ();
1299 callsite_expr expr;
1300 tree caller_var
1301 = cg_superedge.map_expr_from_callee_to_caller (var, &expr);
1302 if (caller_var)
1303 {
1304 log ("event %i:"
1305 " switching var of interest"
1306 " from %qE in callee to %qE in caller",
1307 idx, var, caller_var);
1308 var = caller_var;
1309 if (expr.param_p ())
1310 event->record_critical_state (var, state);
1311 update_for_unsuitable_sm_exprs (&var);
1312 }
1313 }
1314 break;
1315
1316 case EK_RETURN_EDGE:
1317 // TODO: potentially update var/state based on return value,
1318 // args etc
1319 {
1320 if (var)
1321 {
1322 return_event *event = (return_event *)base_event;
1323 const callgraph_superedge& cg_superedge
1324 = event->get_callgraph_superedge ();
1325 callsite_expr expr;
1326 tree callee_var
1327 = cg_superedge.map_expr_from_caller_to_callee (var, &expr);
1328 if (callee_var)
1329 {
1330 log ("event %i:"
1331 " switching var of interest"
1332 " from %qE in caller to %qE in callee",
1333 idx, var, callee_var);
1334 var = callee_var;
1335 if (expr.return_value_p ())
1336 event->record_critical_state (var, state);
1337 update_for_unsuitable_sm_exprs (&var);
1338 }
1339 }
1340 }
1341 break;
1342
1343 case EK_SETJMP:
1344 /* TODO: only show setjmp_events that matter i.e. those for which
1345 there is a later rewind event using them. */
1346 case EK_REWIND_FROM_LONGJMP:
1347 case EK_REWIND_TO_SETJMP:
1348 break;
1349
1350 case EK_WARNING:
1351 /* Always show the final "warning" event in the path. */
1352 break;
1353 }
1354 idx--;
1355 }
1356 }
1357
1358 /* Subroutine of diagnostic_manager::prune_for_sm_diagnostic.
1359 If *EXPR is not suitable to be the expression of interest in
1360 an sm-diagnostic, set *EXPR to NULL and log. */
1361
1362 void
update_for_unsuitable_sm_exprs(tree * expr) const1363 diagnostic_manager::update_for_unsuitable_sm_exprs (tree *expr) const
1364 {
1365 gcc_assert (expr);
1366 if (*expr && !can_be_expr_of_interest_p (*expr))
1367 {
1368 log ("new var %qE is unsuitable; setting var to NULL", *expr);
1369 *expr = NULL_TREE;
1370 }
1371 }
1372
1373 /* Second pass of diagnostic_manager::prune_path: remove redundant
1374 interprocedural information.
1375
1376 For example, given:
1377 (1)- calling "f2" from "f1"
1378 (2)--- entry to "f2"
1379 (3)--- calling "f3" from "f2"
1380 (4)----- entry to "f3"
1381 (5)--- returning to "f2" to "f3"
1382 (6)- returning to "f1" to "f2"
1383 with no other intervening events, then none of these events are
1384 likely to be interesting to the user.
1385
1386 Prune [..., call, function-entry, return, ...] triples repeatedly
1387 until nothing has changed. For the example above, this would
1388 remove events (3, 4, 5), and then remove events (1, 2, 6). */
1389
1390 void
prune_interproc_events(checker_path * path) const1391 diagnostic_manager::prune_interproc_events (checker_path *path) const
1392 {
1393 bool changed = false;
1394 do
1395 {
1396 changed = false;
1397 int idx = path->num_events () - 1;
1398 while (idx >= 0)
1399 {
1400 /* Prune [..., call, function-entry, return, ...] triples. */
1401 if (idx + 2 < (signed)path->num_events ()
1402 && path->get_checker_event (idx)->is_call_p ()
1403 && path->get_checker_event (idx + 1)->is_function_entry_p ()
1404 && path->get_checker_event (idx + 2)->is_return_p ())
1405 {
1406 if (get_logger ())
1407 {
1408 label_text desc
1409 (path->get_checker_event (idx)->get_desc (false));
1410 log ("filtering events %i-%i:"
1411 " irrelevant call/entry/return: %s",
1412 idx, idx + 2, desc.m_buffer);
1413 desc.maybe_free ();
1414 }
1415 path->delete_event (idx + 2);
1416 path->delete_event (idx + 1);
1417 path->delete_event (idx);
1418 changed = true;
1419 idx--;
1420 continue;
1421 }
1422
1423 /* Prune [..., call, return, ...] pairs
1424 (for -fanalyzer-verbosity=0). */
1425 if (idx + 1 < (signed)path->num_events ()
1426 && path->get_checker_event (idx)->is_call_p ()
1427 && path->get_checker_event (idx + 1)->is_return_p ())
1428 {
1429 if (get_logger ())
1430 {
1431 label_text desc
1432 (path->get_checker_event (idx)->get_desc (false));
1433 log ("filtering events %i-%i:"
1434 " irrelevant call/return: %s",
1435 idx, idx + 1, desc.m_buffer);
1436 desc.maybe_free ();
1437 }
1438 path->delete_event (idx + 1);
1439 path->delete_event (idx);
1440 changed = true;
1441 idx--;
1442 continue;
1443 }
1444
1445 idx--;
1446 }
1447
1448 }
1449 while (changed);
1450 }
1451
1452 /* Final pass of diagnostic_manager::prune_path.
1453
1454 If all we're left with is in one function, then filter function entry
1455 events. */
1456
1457 void
finish_pruning(checker_path * path) const1458 diagnostic_manager::finish_pruning (checker_path *path) const
1459 {
1460 if (!path->interprocedural_p ())
1461 {
1462 int idx = path->num_events () - 1;
1463 while (idx >= 0 && idx < (signed)path->num_events ())
1464 {
1465 checker_event *base_event = path->get_checker_event (idx);
1466 if (base_event->m_kind == EK_FUNCTION_ENTRY)
1467 {
1468 log ("filtering event %i:"
1469 " function entry for purely intraprocedural path", idx);
1470 path->delete_event (idx);
1471 }
1472 idx--;
1473 }
1474 }
1475 }
1476
1477 } // namespace ana
1478
1479 #endif /* #if ENABLE_ANALYZER */
1480