1 /* Classes for modeling the state of memory.
2    Copyright (C) 2019-2022 Free Software Foundation, Inc.
3    Contributed by David Malcolm <dmalcolm@redhat.com>.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15 General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "function.h"
26 #include "basic-block.h"
27 #include "gimple.h"
28 #include "gimple-iterator.h"
29 #include "diagnostic-core.h"
30 #include "graphviz.h"
31 #include "options.h"
32 #include "cgraph.h"
33 #include "tree-dfa.h"
34 #include "stringpool.h"
35 #include "convert.h"
36 #include "target.h"
37 #include "fold-const.h"
38 #include "tree-pretty-print.h"
39 #include "diagnostic-color.h"
40 #include "diagnostic-metadata.h"
41 #include "tristate.h"
42 #include "bitmap.h"
43 #include "selftest.h"
44 #include "function.h"
45 #include "json.h"
46 #include "analyzer/analyzer.h"
47 #include "analyzer/analyzer-logging.h"
48 #include "ordered-hash-map.h"
49 #include "options.h"
50 #include "cgraph.h"
51 #include "cfg.h"
52 #include "digraph.h"
53 #include "analyzer/supergraph.h"
54 #include "sbitmap.h"
55 #include "analyzer/call-string.h"
56 #include "analyzer/program-point.h"
57 #include "analyzer/store.h"
58 #include "analyzer/region-model.h"
59 #include "analyzer/constraint-manager.h"
60 #include "diagnostic-event-id.h"
61 #include "analyzer/sm.h"
62 #include "diagnostic-event-id.h"
63 #include "analyzer/sm.h"
64 #include "analyzer/pending-diagnostic.h"
65 #include "analyzer/region-model-reachability.h"
66 #include "analyzer/analyzer-selftests.h"
67 #include "analyzer/program-state.h"
68 #include "stor-layout.h"
69 #include "attribs.h"
70 #include "tree-object-size.h"
71 #include "gimple-ssa.h"
72 #include "tree-phinodes.h"
73 #include "tree-ssa-operands.h"
74 #include "ssa-iterators.h"
75 #include "calls.h"
76 
77 #if ENABLE_ANALYZER
78 
79 namespace ana {
80 
81 /* Dump T to PP in language-independent form, for debugging/logging/dumping
82    purposes.  */
83 
84 void
dump_tree(pretty_printer * pp,tree t)85 dump_tree (pretty_printer *pp, tree t)
86 {
87   dump_generic_node (pp, t, 0, TDF_SLIM, 0);
88 }
89 
90 /* Dump T to PP in language-independent form in quotes, for
91    debugging/logging/dumping purposes.  */
92 
93 void
dump_quoted_tree(pretty_printer * pp,tree t)94 dump_quoted_tree (pretty_printer *pp, tree t)
95 {
96   pp_begin_quote (pp, pp_show_color (pp));
97   dump_tree (pp, t);
98   pp_end_quote (pp, pp_show_color (pp));
99 }
100 
101 /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
102    calls within other pp_printf calls.
103 
104    default_tree_printer handles 'T' and some other codes by calling
105      dump_generic_node (pp, t, 0, TDF_SLIM, 0);
106    dump_generic_node calls pp_printf in various places, leading to
107    garbled output.
108 
109    Ideally pp_printf could be made to be reentrant, but in the meantime
110    this function provides a workaround.  */
111 
112 void
print_quoted_type(pretty_printer * pp,tree t)113 print_quoted_type (pretty_printer *pp, tree t)
114 {
115   pp_begin_quote (pp, pp_show_color (pp));
116   dump_generic_node (pp, t, 0, TDF_SLIM, 0);
117   pp_end_quote (pp, pp_show_color (pp));
118 }
119 
120 /* class region_to_value_map.  */
121 
122 /* Assignment operator for region_to_value_map.  */
123 
124 region_to_value_map &
operator =(const region_to_value_map & other)125 region_to_value_map::operator= (const region_to_value_map &other)
126 {
127   m_hash_map.empty ();
128   for (auto iter : other.m_hash_map)
129     {
130       const region *reg = iter.first;
131       const svalue *sval = iter.second;
132       m_hash_map.put (reg, sval);
133     }
134   return *this;
135 }
136 
137 /* Equality operator for region_to_value_map.  */
138 
139 bool
operator ==(const region_to_value_map & other) const140 region_to_value_map::operator== (const region_to_value_map &other) const
141 {
142   if (m_hash_map.elements () != other.m_hash_map.elements ())
143     return false;
144 
145   for (auto iter : *this)
146     {
147       const region *reg = iter.first;
148       const svalue *sval = iter.second;
149       const svalue * const *other_slot = other.get (reg);
150       if (other_slot == NULL)
151 	return false;
152       if (sval != *other_slot)
153 	return false;
154     }
155 
156   return true;
157 }
158 
159 /* Dump this object to PP.  */
160 
161 void
dump_to_pp(pretty_printer * pp,bool simple,bool multiline) const162 region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
163 				 bool multiline) const
164 {
165   auto_vec<const region *> regs;
166   for (iterator iter = begin (); iter != end (); ++iter)
167     regs.safe_push ((*iter).first);
168   regs.qsort (region::cmp_ptr_ptr);
169   if (multiline)
170     pp_newline (pp);
171   else
172     pp_string (pp, " {");
173   unsigned i;
174   const region *reg;
175   FOR_EACH_VEC_ELT (regs, i, reg)
176     {
177       if (multiline)
178 	pp_string (pp, "  ");
179       else if (i > 0)
180 	pp_string (pp, ", ");
181       reg->dump_to_pp (pp, simple);
182       pp_string (pp, ": ");
183       const svalue *sval = *get (reg);
184       sval->dump_to_pp (pp, true);
185       if (multiline)
186 	pp_newline (pp);
187     }
188   if (!multiline)
189     pp_string (pp, "}");
190 }
191 
192 /* Dump this object to stderr.  */
193 
194 DEBUG_FUNCTION void
dump(bool simple) const195 region_to_value_map::dump (bool simple) const
196 {
197   pretty_printer pp;
198   pp_format_decoder (&pp) = default_tree_printer;
199   pp_show_color (&pp) = pp_show_color (global_dc->printer);
200   pp.buffer->stream = stderr;
201   dump_to_pp (&pp, simple, true);
202   pp_newline (&pp);
203   pp_flush (&pp);
204 }
205 
206 
207 /* Attempt to merge THIS with OTHER, writing the result
208    to OUT.
209 
210    For now, write (region, value) mappings that are in common between THIS
211    and OTHER to OUT, effectively taking the intersection, rather than
212    rejecting differences.  */
213 
214 bool
can_merge_with_p(const region_to_value_map & other,region_to_value_map * out) const215 region_to_value_map::can_merge_with_p (const region_to_value_map &other,
216 				       region_to_value_map *out) const
217 {
218   for (auto iter : *this)
219     {
220       const region *iter_reg = iter.first;
221       const svalue *iter_sval = iter.second;
222       const svalue * const * other_slot = other.get (iter_reg);
223       if (other_slot)
224 	if (iter_sval == *other_slot)
225 	  out->put (iter_reg, iter_sval);
226     }
227   return true;
228 }
229 
230 /* Purge any state involving SVAL.  */
231 
232 void
purge_state_involving(const svalue * sval)233 region_to_value_map::purge_state_involving (const svalue *sval)
234 {
235   auto_vec<const region *> to_purge;
236   for (auto iter : *this)
237     {
238       const region *iter_reg = iter.first;
239       const svalue *iter_sval = iter.second;
240       if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
241 	to_purge.safe_push (iter_reg);
242     }
243   for (auto iter : to_purge)
244     m_hash_map.remove (iter);
245 }
246 
247 /* class region_model.  */
248 
249 /* Ctor for region_model: construct an "empty" model.  */
250 
region_model(region_model_manager * mgr)251 region_model::region_model (region_model_manager *mgr)
252 : m_mgr (mgr), m_store (), m_current_frame (NULL),
253   m_dynamic_extents ()
254 {
255   m_constraints = new constraint_manager (mgr);
256 }
257 
258 /* region_model's copy ctor.  */
259 
region_model(const region_model & other)260 region_model::region_model (const region_model &other)
261 : m_mgr (other.m_mgr), m_store (other.m_store),
262   m_constraints (new constraint_manager (*other.m_constraints)),
263   m_current_frame (other.m_current_frame),
264   m_dynamic_extents (other.m_dynamic_extents)
265 {
266 }
267 
268 /* region_model's dtor.  */
269 
~region_model()270 region_model::~region_model ()
271 {
272   delete m_constraints;
273 }
274 
275 /* region_model's assignment operator.  */
276 
277 region_model &
operator =(const region_model & other)278 region_model::operator= (const region_model &other)
279 {
280   /* m_mgr is const.  */
281   gcc_assert (m_mgr == other.m_mgr);
282 
283   m_store = other.m_store;
284 
285   delete m_constraints;
286   m_constraints = new constraint_manager (*other.m_constraints);
287 
288   m_current_frame = other.m_current_frame;
289 
290   m_dynamic_extents = other.m_dynamic_extents;
291 
292   return *this;
293 }
294 
295 /* Equality operator for region_model.
296 
297    Amongst other things this directly compares the stores and the constraint
298    managers, so for this to be meaningful both this and OTHER should
299    have been canonicalized.  */
300 
301 bool
operator ==(const region_model & other) const302 region_model::operator== (const region_model &other) const
303 {
304   /* We can only compare instances that use the same manager.  */
305   gcc_assert (m_mgr == other.m_mgr);
306 
307   if (m_store != other.m_store)
308     return false;
309 
310   if (*m_constraints != *other.m_constraints)
311     return false;
312 
313   if (m_current_frame != other.m_current_frame)
314     return false;
315 
316   if (m_dynamic_extents != other.m_dynamic_extents)
317     return false;
318 
319   gcc_checking_assert (hash () == other.hash ());
320 
321   return true;
322 }
323 
324 /* Generate a hash value for this region_model.  */
325 
326 hashval_t
hash() const327 region_model::hash () const
328 {
329   hashval_t result = m_store.hash ();
330   result ^= m_constraints->hash ();
331   return result;
332 }
333 
334 /* Dump a representation of this model to PP, showing the
335    stack, the store, and any constraints.
336    Use SIMPLE to control how svalues and regions are printed.  */
337 
338 void
dump_to_pp(pretty_printer * pp,bool simple,bool multiline) const339 region_model::dump_to_pp (pretty_printer *pp, bool simple,
340 			  bool multiline) const
341 {
342   /* Dump stack.  */
343   pp_printf (pp, "stack depth: %i", get_stack_depth ());
344   if (multiline)
345     pp_newline (pp);
346   else
347     pp_string (pp, " {");
348   for (const frame_region *iter_frame = m_current_frame; iter_frame;
349        iter_frame = iter_frame->get_calling_frame ())
350     {
351       if (multiline)
352 	pp_string (pp, "  ");
353       else if (iter_frame != m_current_frame)
354 	pp_string (pp, ", ");
355       pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
356       iter_frame->dump_to_pp (pp, simple);
357       if (multiline)
358 	pp_newline (pp);
359     }
360   if (!multiline)
361     pp_string (pp, "}");
362 
363   /* Dump store.  */
364   if (!multiline)
365     pp_string (pp, ", {");
366   m_store.dump_to_pp (pp, simple, multiline,
367 		      m_mgr->get_store_manager ());
368   if (!multiline)
369     pp_string (pp, "}");
370 
371   /* Dump constraints.  */
372   pp_string (pp, "constraint_manager:");
373   if (multiline)
374     pp_newline (pp);
375   else
376     pp_string (pp, " {");
377   m_constraints->dump_to_pp (pp, multiline);
378   if (!multiline)
379     pp_string (pp, "}");
380 
381   /* Dump sizes of dynamic regions, if any are known.  */
382   if (!m_dynamic_extents.is_empty ())
383     {
384       pp_string (pp, "dynamic_extents:");
385       m_dynamic_extents.dump_to_pp (pp, simple, multiline);
386     }
387 }
388 
389 /* Dump a representation of this model to FILE.  */
390 
391 void
dump(FILE * fp,bool simple,bool multiline) const392 region_model::dump (FILE *fp, bool simple, bool multiline) const
393 {
394   pretty_printer pp;
395   pp_format_decoder (&pp) = default_tree_printer;
396   pp_show_color (&pp) = pp_show_color (global_dc->printer);
397   pp.buffer->stream = fp;
398   dump_to_pp (&pp, simple, multiline);
399   pp_newline (&pp);
400   pp_flush (&pp);
401 }
402 
403 /* Dump a multiline representation of this model to stderr.  */
404 
405 DEBUG_FUNCTION void
dump(bool simple) const406 region_model::dump (bool simple) const
407 {
408   dump (stderr, simple, true);
409 }
410 
411 /* Dump a multiline representation of this model to stderr.  */
412 
413 DEBUG_FUNCTION void
debug() const414 region_model::debug () const
415 {
416   dump (true);
417 }
418 
419 /* Assert that this object is valid.  */
420 
421 void
validate() const422 region_model::validate () const
423 {
424   m_store.validate ();
425 }
426 
427 /* Canonicalize the store and constraints, to maximize the chance of
428    equality between region_model instances.  */
429 
430 void
canonicalize()431 region_model::canonicalize ()
432 {
433   m_store.canonicalize (m_mgr->get_store_manager ());
434   m_constraints->canonicalize ();
435 }
436 
437 /* Return true if this region_model is in canonical form.  */
438 
439 bool
canonicalized_p() const440 region_model::canonicalized_p () const
441 {
442   region_model copy (*this);
443   copy.canonicalize ();
444   return *this == copy;
445 }
446 
447 /* See the comment for store::loop_replay_fixup.  */
448 
449 void
loop_replay_fixup(const region_model * dst_state)450 region_model::loop_replay_fixup (const region_model *dst_state)
451 {
452   m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
453 }
454 
455 /* A subclass of pending_diagnostic for complaining about uses of
456    poisoned values.  */
457 
458 class poisoned_value_diagnostic
459 : public pending_diagnostic_subclass<poisoned_value_diagnostic>
460 {
461 public:
poisoned_value_diagnostic(tree expr,enum poison_kind pkind,const region * src_region)462   poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
463 			     const region *src_region)
464   : m_expr (expr), m_pkind (pkind),
465     m_src_region (src_region)
466   {}
467 
get_kind() const468   const char *get_kind () const FINAL OVERRIDE { return "poisoned_value_diagnostic"; }
469 
use_of_uninit_p() const470   bool use_of_uninit_p () const FINAL OVERRIDE
471   {
472     return m_pkind == POISON_KIND_UNINIT;
473   }
474 
operator ==(const poisoned_value_diagnostic & other) const475   bool operator== (const poisoned_value_diagnostic &other) const
476   {
477     return (m_expr == other.m_expr
478 	    && m_pkind == other.m_pkind
479 	    && m_src_region == other.m_src_region);
480   }
481 
get_controlling_option() const482   int get_controlling_option () const FINAL OVERRIDE
483   {
484     switch (m_pkind)
485       {
486       default:
487 	gcc_unreachable ();
488       case POISON_KIND_UNINIT:
489 	return OPT_Wanalyzer_use_of_uninitialized_value;
490       case POISON_KIND_FREED:
491 	return OPT_Wanalyzer_use_after_free;
492       case POISON_KIND_POPPED_STACK:
493 	return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
494       }
495   }
496 
emit(rich_location * rich_loc)497   bool emit (rich_location *rich_loc) FINAL OVERRIDE
498   {
499     switch (m_pkind)
500       {
501       default:
502 	gcc_unreachable ();
503       case POISON_KIND_UNINIT:
504 	{
505 	  diagnostic_metadata m;
506 	  m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable".  */
507 	  return warning_meta (rich_loc, m, get_controlling_option (),
508 			       "use of uninitialized value %qE",
509 			       m_expr);
510 	}
511 	break;
512       case POISON_KIND_FREED:
513 	{
514 	  diagnostic_metadata m;
515 	  m.add_cwe (416); /* "CWE-416: Use After Free".  */
516 	  return warning_meta (rich_loc, m, get_controlling_option (),
517 			       "use after %<free%> of %qE",
518 			       m_expr);
519 	}
520 	break;
521       case POISON_KIND_POPPED_STACK:
522 	{
523 	  /* TODO: which CWE?  */
524 	  return warning_at
525 	    (rich_loc, get_controlling_option (),
526 	     "dereferencing pointer %qE to within stale stack frame",
527 	     m_expr);
528 	}
529 	break;
530       }
531   }
532 
describe_final_event(const evdesc::final_event & ev)533   label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
534   {
535     switch (m_pkind)
536       {
537       default:
538 	gcc_unreachable ();
539       case POISON_KIND_UNINIT:
540 	return ev.formatted_print ("use of uninitialized value %qE here",
541 				   m_expr);
542       case POISON_KIND_FREED:
543 	return ev.formatted_print ("use after %<free%> of %qE here",
544 				   m_expr);
545       case POISON_KIND_POPPED_STACK:
546 	return ev.formatted_print
547 	  ("dereferencing pointer %qE to within stale stack frame",
548 	   m_expr);
549       }
550   }
551 
mark_interesting_stuff(interesting_t * interest)552   void mark_interesting_stuff (interesting_t *interest) FINAL OVERRIDE
553   {
554     if (m_src_region)
555       interest->add_region_creation (m_src_region);
556   }
557 
558 private:
559   tree m_expr;
560   enum poison_kind m_pkind;
561   const region *m_src_region;
562 };
563 
564 /* A subclass of pending_diagnostic for complaining about shifts
565    by negative counts.  */
566 
567 class shift_count_negative_diagnostic
568 : public pending_diagnostic_subclass<shift_count_negative_diagnostic>
569 {
570 public:
shift_count_negative_diagnostic(const gassign * assign,tree count_cst)571   shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
572   : m_assign (assign), m_count_cst (count_cst)
573   {}
574 
get_kind() const575   const char *get_kind () const FINAL OVERRIDE
576   {
577     return "shift_count_negative_diagnostic";
578   }
579 
operator ==(const shift_count_negative_diagnostic & other) const580   bool operator== (const shift_count_negative_diagnostic &other) const
581   {
582     return (m_assign == other.m_assign
583 	    && same_tree_p (m_count_cst, other.m_count_cst));
584   }
585 
get_controlling_option() const586   int get_controlling_option () const FINAL OVERRIDE
587   {
588     return OPT_Wanalyzer_shift_count_negative;
589   }
590 
emit(rich_location * rich_loc)591   bool emit (rich_location *rich_loc) FINAL OVERRIDE
592   {
593     return warning_at (rich_loc, get_controlling_option (),
594 		       "shift by negative count (%qE)", m_count_cst);
595   }
596 
describe_final_event(const evdesc::final_event & ev)597   label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
598   {
599     return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
600   }
601 
602 private:
603   const gassign *m_assign;
604   tree m_count_cst;
605 };
606 
607 /* A subclass of pending_diagnostic for complaining about shifts
608    by counts >= the width of the operand type.  */
609 
610 class shift_count_overflow_diagnostic
611 : public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
612 {
613 public:
shift_count_overflow_diagnostic(const gassign * assign,int operand_precision,tree count_cst)614   shift_count_overflow_diagnostic (const gassign *assign,
615 				   int operand_precision,
616 				   tree count_cst)
617   : m_assign (assign), m_operand_precision (operand_precision),
618     m_count_cst (count_cst)
619   {}
620 
get_kind() const621   const char *get_kind () const FINAL OVERRIDE
622   {
623     return "shift_count_overflow_diagnostic";
624   }
625 
operator ==(const shift_count_overflow_diagnostic & other) const626   bool operator== (const shift_count_overflow_diagnostic &other) const
627   {
628     return (m_assign == other.m_assign
629 	    && m_operand_precision == other.m_operand_precision
630 	    && same_tree_p (m_count_cst, other.m_count_cst));
631   }
632 
get_controlling_option() const633   int get_controlling_option () const FINAL OVERRIDE
634   {
635     return OPT_Wanalyzer_shift_count_overflow;
636   }
637 
emit(rich_location * rich_loc)638   bool emit (rich_location *rich_loc) FINAL OVERRIDE
639   {
640     return warning_at (rich_loc, get_controlling_option (),
641 		       "shift by count (%qE) >= precision of type (%qi)",
642 		       m_count_cst, m_operand_precision);
643   }
644 
describe_final_event(const evdesc::final_event & ev)645   label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
646   {
647     return ev.formatted_print ("shift by count %qE here", m_count_cst);
648   }
649 
650 private:
651   const gassign *m_assign;
652   int m_operand_precision;
653   tree m_count_cst;
654 };
655 
656 /* If ASSIGN is a stmt that can be modelled via
657      set_value (lhs_reg, SVALUE, CTXT)
658    for some SVALUE, get the SVALUE.
659    Otherwise return NULL.  */
660 
661 const svalue *
get_gassign_result(const gassign * assign,region_model_context * ctxt)662 region_model::get_gassign_result (const gassign *assign,
663 				   region_model_context *ctxt)
664 {
665   tree lhs = gimple_assign_lhs (assign);
666   tree rhs1 = gimple_assign_rhs1 (assign);
667   enum tree_code op = gimple_assign_rhs_code (assign);
668   switch (op)
669     {
670     default:
671       return NULL;
672 
673     case POINTER_PLUS_EXPR:
674       {
675 	/* e.g. "_1 = a_10(D) + 12;" */
676 	tree ptr = rhs1;
677 	tree offset = gimple_assign_rhs2 (assign);
678 
679 	const svalue *ptr_sval = get_rvalue (ptr, ctxt);
680 	const svalue *offset_sval = get_rvalue (offset, ctxt);
681 	/* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
682 	   is an integer of type sizetype".  */
683 	offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
684 
685 	const svalue *sval_binop
686 	  = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
687 					ptr_sval, offset_sval);
688 	return sval_binop;
689       }
690       break;
691 
692     case POINTER_DIFF_EXPR:
693       {
694 	/* e.g. "_1 = p_2(D) - q_3(D);".  */
695 	tree rhs2 = gimple_assign_rhs2 (assign);
696 	const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
697 	const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
698 
699 	// TODO: perhaps fold to zero if they're known to be equal?
700 
701 	const svalue *sval_binop
702 	  = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
703 					rhs1_sval, rhs2_sval);
704 	return sval_binop;
705       }
706       break;
707 
708     /* Assignments of the form
709 	set_value (lvalue (LHS), rvalue (EXPR))
710        for various EXPR.
711        We already have the lvalue for the LHS above, as "lhs_reg".  */
712     case ADDR_EXPR: /* LHS = &RHS;  */
713     case BIT_FIELD_REF:
714     case COMPONENT_REF: /* LHS = op0.op1;  */
715     case MEM_REF:
716     case REAL_CST:
717     case COMPLEX_CST:
718     case VECTOR_CST:
719     case INTEGER_CST:
720     case ARRAY_REF:
721     case SSA_NAME: /* LHS = VAR; */
722     case VAR_DECL: /* LHS = VAR; */
723     case PARM_DECL:/* LHS = VAR; */
724     case REALPART_EXPR:
725     case IMAGPART_EXPR:
726       return get_rvalue (rhs1, ctxt);
727 
728     case ABS_EXPR:
729     case ABSU_EXPR:
730     case CONJ_EXPR:
731     case BIT_NOT_EXPR:
732     case FIX_TRUNC_EXPR:
733     case FLOAT_EXPR:
734     case NEGATE_EXPR:
735     case NOP_EXPR:
736     case VIEW_CONVERT_EXPR:
737       {
738 	/* Unary ops.  */
739 	const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
740 	const svalue *sval_unaryop
741 	  = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
742 	return sval_unaryop;
743       }
744 
745     case EQ_EXPR:
746     case GE_EXPR:
747     case LE_EXPR:
748     case NE_EXPR:
749     case GT_EXPR:
750     case LT_EXPR:
751     case UNORDERED_EXPR:
752     case ORDERED_EXPR:
753       {
754 	tree rhs2 = gimple_assign_rhs2 (assign);
755 
756 	const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
757 	const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
758 
759 	if (TREE_TYPE (lhs) == boolean_type_node)
760 	  {
761 	    /* Consider constraints between svalues.  */
762 	    tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
763 	    if (t.is_known ())
764 	      return m_mgr->get_or_create_constant_svalue
765 		(t.is_true () ? boolean_true_node : boolean_false_node);
766 	  }
767 
768 	/* Otherwise, generate a symbolic binary op.  */
769 	const svalue *sval_binop
770 	  = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
771 					rhs1_sval, rhs2_sval);
772 	return sval_binop;
773       }
774       break;
775 
776     case PLUS_EXPR:
777     case MINUS_EXPR:
778     case MULT_EXPR:
779     case MULT_HIGHPART_EXPR:
780     case TRUNC_DIV_EXPR:
781     case CEIL_DIV_EXPR:
782     case FLOOR_DIV_EXPR:
783     case ROUND_DIV_EXPR:
784     case TRUNC_MOD_EXPR:
785     case CEIL_MOD_EXPR:
786     case FLOOR_MOD_EXPR:
787     case ROUND_MOD_EXPR:
788     case RDIV_EXPR:
789     case EXACT_DIV_EXPR:
790     case LSHIFT_EXPR:
791     case RSHIFT_EXPR:
792     case LROTATE_EXPR:
793     case RROTATE_EXPR:
794     case BIT_IOR_EXPR:
795     case BIT_XOR_EXPR:
796     case BIT_AND_EXPR:
797     case MIN_EXPR:
798     case MAX_EXPR:
799     case COMPLEX_EXPR:
800       {
801 	/* Binary ops.  */
802 	tree rhs2 = gimple_assign_rhs2 (assign);
803 
804 	const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
805 	const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
806 
807 	if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
808 	  {
809 	    /* "INT34-C. Do not shift an expression by a negative number of bits
810 	       or by greater than or equal to the number of bits that exist in
811 	       the operand."  */
812 	    if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
813 	      if (TREE_CODE (rhs2_cst) == INTEGER_CST)
814 		{
815 		  if (tree_int_cst_sgn (rhs2_cst) < 0)
816 		    ctxt->warn (new shift_count_negative_diagnostic
817 				  (assign, rhs2_cst));
818 		  else if (compare_tree_int (rhs2_cst,
819 					     TYPE_PRECISION (TREE_TYPE (rhs1)))
820 			   >= 0)
821 		    ctxt->warn (new shift_count_overflow_diagnostic
822 				  (assign, TYPE_PRECISION (TREE_TYPE (rhs1)),
823 				   rhs2_cst));
824 		}
825 	  }
826 
827 	const svalue *sval_binop
828 	  = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
829 					rhs1_sval, rhs2_sval);
830 	return sval_binop;
831       }
832 
833     /* Vector expressions.  In theory we could implement these elementwise,
834        but for now, simply return unknown values.  */
835     case VEC_DUPLICATE_EXPR:
836     case VEC_SERIES_EXPR:
837     case VEC_COND_EXPR:
838     case VEC_PERM_EXPR:
839     case VEC_WIDEN_MULT_HI_EXPR:
840     case VEC_WIDEN_MULT_LO_EXPR:
841     case VEC_WIDEN_MULT_EVEN_EXPR:
842     case VEC_WIDEN_MULT_ODD_EXPR:
843     case VEC_UNPACK_HI_EXPR:
844     case VEC_UNPACK_LO_EXPR:
845     case VEC_UNPACK_FLOAT_HI_EXPR:
846     case VEC_UNPACK_FLOAT_LO_EXPR:
847     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
848     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
849     case VEC_PACK_TRUNC_EXPR:
850     case VEC_PACK_SAT_EXPR:
851     case VEC_PACK_FIX_TRUNC_EXPR:
852     case VEC_PACK_FLOAT_EXPR:
853     case VEC_WIDEN_LSHIFT_HI_EXPR:
854     case VEC_WIDEN_LSHIFT_LO_EXPR:
855       return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
856     }
857 }
858 
859 /* Workaround for discarding certain false positives from
860    -Wanalyzer-use-of-uninitialized-value
861    of the form:
862      ((A OR-IF B) OR-IF C)
863    and:
864      ((A AND-IF B) AND-IF C)
865    where evaluating B is redundant, but could involve simple accesses of
866    uninitialized locals.
867 
868    When optimization is turned on the FE can immediately fold compound
869    conditionals.  Specifically, c_parser_condition parses this condition:
870      ((A OR-IF B) OR-IF C)
871    and calls c_fully_fold on the condition.
872    Within c_fully_fold, fold_truth_andor is called, which bails when
873    optimization is off, but if any optimization is turned on can convert the
874      ((A OR-IF B) OR-IF C)
875    into:
876      ((A OR B) OR_IF C)
877    for sufficiently simple B
878    i.e. the inner OR-IF becomes an OR.
879    At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
880    giving this for the inner condition:
881       tmp = A | B;
882       if (tmp)
883    thus effectively synthesizing a redundant access of B when optimization
884    is turned on, when compared to:
885       if (A) goto L1; else goto L4;
886   L1: if (B) goto L2; else goto L4;
887   L2: if (C) goto L3; else goto L4;
888    for the unoptimized case.
889 
890    Return true if CTXT appears to be  handling such a short-circuitable stmt,
891    such as the def-stmt for B for the:
892       tmp = A | B;
893    case above, for the case where A is true and thus B would have been
894    short-circuited without optimization, using MODEL for the value of A.  */
895 
896 static bool
within_short_circuited_stmt_p(const region_model * model,const gassign * assign_stmt)897 within_short_circuited_stmt_p (const region_model *model,
898 			       const gassign *assign_stmt)
899 {
900   /* We must have an assignment to a temporary of _Bool type.  */
901   tree lhs = gimple_assign_lhs (assign_stmt);
902   if (TREE_TYPE (lhs) != boolean_type_node)
903     return false;
904   if (TREE_CODE (lhs) != SSA_NAME)
905     return false;
906   if (SSA_NAME_VAR (lhs) != NULL_TREE)
907     return false;
908 
909   /* The temporary bool must be used exactly once: as the second arg of
910      a BIT_IOR_EXPR or BIT_AND_EXPR.  */
911   use_operand_p use_op;
912   gimple *use_stmt;
913   if (!single_imm_use (lhs, &use_op, &use_stmt))
914     return false;
915   const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
916   if (!use_assign)
917     return false;
918   enum tree_code op = gimple_assign_rhs_code (use_assign);
919   if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
920     return false;
921   if (!(gimple_assign_rhs1 (use_assign) != lhs
922 	&& gimple_assign_rhs2 (use_assign) == lhs))
923     return false;
924 
925   /* The first arg of the bitwise stmt must have a known value in MODEL
926      that implies that the value of the second arg doesn't matter, i.e.
927      1 for bitwise or, 0 for bitwise and.  */
928   tree other_arg = gimple_assign_rhs1 (use_assign);
929   /* Use a NULL ctxt here to avoid generating warnings.  */
930   const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
931   tree other_arg_cst = other_arg_sval->maybe_get_constant ();
932   if (!other_arg_cst)
933     return false;
934   switch (op)
935     {
936     default:
937       gcc_unreachable ();
938     case BIT_IOR_EXPR:
939       if (zerop (other_arg_cst))
940 	return false;
941       break;
942     case BIT_AND_EXPR:
943       if (!zerop (other_arg_cst))
944 	return false;
945       break;
946     }
947 
948   /* All tests passed.  We appear to be in a stmt that generates a boolean
949      temporary with a value that won't matter.  */
950   return true;
951 }
952 
953 /* Workaround for discarding certain false positives from
954    -Wanalyzer-use-of-uninitialized-value
955    seen with -ftrivial-auto-var-init=.
956 
957    -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
958 
959    If the address of the var is taken, gimplification will give us
960    something like:
961 
962      _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
963      len = _1;
964 
965    The result of DEFERRED_INIT will be an uninit value; we don't
966    want to emit a false positive for "len = _1;"
967 
968    Return true if ASSIGN_STMT is such a stmt.  */
969 
970 static bool
due_to_ifn_deferred_init_p(const gassign * assign_stmt)971 due_to_ifn_deferred_init_p (const gassign *assign_stmt)
972 
973 {
974   /* We must have an assignment to a decl from an SSA name that's the
975      result of a IFN_DEFERRED_INIT call.  */
976   if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
977     return false;
978   tree lhs = gimple_assign_lhs (assign_stmt);
979   if (TREE_CODE (lhs) != VAR_DECL)
980     return false;
981   tree rhs = gimple_assign_rhs1 (assign_stmt);
982   if (TREE_CODE (rhs) != SSA_NAME)
983     return false;
984   const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
985   const gcall *call = dyn_cast <const gcall *> (def_stmt);
986   if (!call)
987     return false;
988   if (gimple_call_internal_p (call)
989       && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
990     return true;
991   return false;
992 }
993 
994 /* Check for SVAL being poisoned, adding a warning to CTXT.
995    Return SVAL, or, if a warning is added, another value, to avoid
996    repeatedly complaining about the same poisoned value in followup code.  */
997 
998 const svalue *
check_for_poison(const svalue * sval,tree expr,region_model_context * ctxt) const999 region_model::check_for_poison (const svalue *sval,
1000 				tree expr,
1001 				region_model_context *ctxt) const
1002 {
1003   if (!ctxt)
1004     return sval;
1005 
1006   if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1007     {
1008       enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1009 
1010       /* Ignore uninitialized uses of empty types; there's nothing
1011 	 to initialize.  */
1012       if (pkind == POISON_KIND_UNINIT
1013 	  && sval->get_type ()
1014 	  && is_empty_type (sval->get_type ()))
1015 	return sval;
1016 
1017       if (pkind == POISON_KIND_UNINIT)
1018 	if (const gimple *curr_stmt = ctxt->get_stmt ())
1019 	  if (const gassign *assign_stmt
1020 		= dyn_cast <const gassign *> (curr_stmt))
1021 	    {
1022 	      /* Special case to avoid certain false positives.  */
1023 	      if (within_short_circuited_stmt_p (this, assign_stmt))
1024 		return sval;
1025 
1026 	      /* Special case to avoid false positive on
1027 		 -ftrivial-auto-var-init=.  */
1028 	      if (due_to_ifn_deferred_init_p (assign_stmt))
1029 		return sval;
1030 	  }
1031 
1032       /* If we have an SSA name for a temporary, we don't want to print
1033 	 '<unknown>'.
1034 	 Poisoned values are shared by type, and so we can't reconstruct
1035 	 the tree other than via the def stmts, using
1036 	 fixup_tree_for_diagnostic.  */
1037       tree diag_arg = fixup_tree_for_diagnostic (expr);
1038       const region *src_region = NULL;
1039       if (pkind == POISON_KIND_UNINIT)
1040 	src_region = get_region_for_poisoned_expr (expr);
1041       if (ctxt->warn (new poisoned_value_diagnostic (diag_arg, pkind,
1042 						     src_region)))
1043 	{
1044 	  /* We only want to report use of a poisoned value at the first
1045 	     place it gets used; return an unknown value to avoid generating
1046 	     a chain of followup warnings.  */
1047 	  sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1048 	}
1049 
1050       return sval;
1051     }
1052 
1053   return sval;
1054 }
1055 
1056 /* Attempt to get a region for describing EXPR, the source of region of
1057    a poisoned_svalue for use in a poisoned_value_diagnostic.
1058    Return NULL if there is no good region to use.  */
1059 
1060 const region *
get_region_for_poisoned_expr(tree expr) const1061 region_model::get_region_for_poisoned_expr (tree expr) const
1062 {
1063   if (TREE_CODE (expr) == SSA_NAME)
1064     {
1065       tree decl = SSA_NAME_VAR (expr);
1066       if (decl && DECL_P (decl))
1067 	expr = decl;
1068       else
1069 	return NULL;
1070     }
1071   return get_lvalue (expr, NULL);
1072 }
1073 
1074 /* Update this model for the ASSIGN stmt, using CTXT to report any
1075    diagnostics.  */
1076 
1077 void
on_assignment(const gassign * assign,region_model_context * ctxt)1078 region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1079 {
1080   tree lhs = gimple_assign_lhs (assign);
1081   tree rhs1 = gimple_assign_rhs1 (assign);
1082 
1083   const region *lhs_reg = get_lvalue (lhs, ctxt);
1084 
1085   /* Most assignments are handled by:
1086        set_value (lhs_reg, SVALUE, CTXT)
1087      for some SVALUE.  */
1088   if (const svalue *sval = get_gassign_result (assign, ctxt))
1089     {
1090       tree expr = get_diagnostic_tree_for_gassign (assign);
1091       check_for_poison (sval, expr, ctxt);
1092       set_value (lhs_reg, sval, ctxt);
1093       return;
1094     }
1095 
1096   enum tree_code op = gimple_assign_rhs_code (assign);
1097   switch (op)
1098     {
1099     default:
1100       {
1101 	if (0)
1102 	  sorry_at (assign->location, "unhandled assignment op: %qs",
1103 		    get_tree_code_name (op));
1104 	const svalue *unknown_sval
1105 	  = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1106 	set_value (lhs_reg, unknown_sval, ctxt);
1107       }
1108       break;
1109 
1110     case CONSTRUCTOR:
1111       {
1112 	if (TREE_CLOBBER_P (rhs1))
1113 	  {
1114 	    /* e.g. "x ={v} {CLOBBER};"  */
1115 	    clobber_region (lhs_reg);
1116 	  }
1117 	else
1118 	  {
1119 	    /* Any CONSTRUCTOR that survives to this point is either
1120 	       just a zero-init of everything, or a vector.  */
1121 	    if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1122 	      zero_fill_region (lhs_reg);
1123 	    unsigned ix;
1124 	    tree index;
1125 	    tree val;
1126 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1127 	      {
1128 		gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1129 		if (!index)
1130 		  index = build_int_cst (integer_type_node, ix);
1131 		gcc_assert (TREE_CODE (index) == INTEGER_CST);
1132 		const svalue *index_sval
1133 		  = m_mgr->get_or_create_constant_svalue (index);
1134 		gcc_assert (index_sval);
1135 		const region *sub_reg
1136 		  = m_mgr->get_element_region (lhs_reg,
1137 					       TREE_TYPE (val),
1138 					       index_sval);
1139 		const svalue *val_sval = get_rvalue (val, ctxt);
1140 		set_value (sub_reg, val_sval, ctxt);
1141 	      }
1142 	  }
1143       }
1144       break;
1145 
1146     case STRING_CST:
1147       {
1148 	/* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};".  */
1149 	const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1150 	m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
1151 			   ctxt ? ctxt->get_uncertainty () : NULL);
1152       }
1153       break;
1154     }
1155 }
1156 
1157 /* A pending_diagnostic subclass for implementing "__analyzer_dump_path".  */
1158 
1159 class dump_path_diagnostic
1160   : public pending_diagnostic_subclass<dump_path_diagnostic>
1161 {
1162 public:
get_controlling_option() const1163   int get_controlling_option () const FINAL OVERRIDE
1164   {
1165     return 0;
1166   }
1167 
emit(rich_location * richloc)1168   bool emit (rich_location *richloc) FINAL OVERRIDE
1169   {
1170     inform (richloc, "path");
1171     return true;
1172   }
1173 
get_kind() const1174   const char *get_kind () const FINAL OVERRIDE { return "dump_path_diagnostic"; }
1175 
operator ==(const dump_path_diagnostic &) const1176   bool operator== (const dump_path_diagnostic &) const
1177   {
1178     return true;
1179   }
1180 };
1181 
1182 /* Handle the pre-sm-state part of STMT, modifying this object in-place.
1183    Write true to *OUT_TERMINATE_PATH if the path should be terminated.
1184    Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1185    side effects.  */
1186 
1187 void
on_stmt_pre(const gimple * stmt,bool * out_terminate_path,bool * out_unknown_side_effects,region_model_context * ctxt)1188 region_model::on_stmt_pre (const gimple *stmt,
1189 			   bool *out_terminate_path,
1190 			   bool *out_unknown_side_effects,
1191 			   region_model_context *ctxt)
1192 {
1193   switch (gimple_code (stmt))
1194     {
1195     default:
1196       /* No-op for now.  */
1197       break;
1198 
1199     case GIMPLE_ASSIGN:
1200       {
1201 	const gassign *assign = as_a <const gassign *> (stmt);
1202 	on_assignment (assign, ctxt);
1203       }
1204       break;
1205 
1206     case GIMPLE_ASM:
1207       {
1208 	const gasm *asm_stmt = as_a <const gasm *> (stmt);
1209 	on_asm_stmt (asm_stmt, ctxt);
1210       }
1211       break;
1212 
1213     case GIMPLE_CALL:
1214       {
1215 	/* Track whether we have a gcall to a function that's not recognized by
1216 	   anything, for which we don't have a function body, or for which we
1217 	   don't know the fndecl.  */
1218 	const gcall *call = as_a <const gcall *> (stmt);
1219 
1220 	/* Debugging/test support.  */
1221 	if (is_special_named_call_p (call, "__analyzer_describe", 2))
1222 	  impl_call_analyzer_describe (call, ctxt);
1223 	else if (is_special_named_call_p (call, "__analyzer_dump_capacity", 1))
1224 	  impl_call_analyzer_dump_capacity (call, ctxt);
1225 	else if (is_special_named_call_p (call, "__analyzer_dump_escaped", 0))
1226 	  impl_call_analyzer_dump_escaped (call);
1227 	else if (is_special_named_call_p (call, "__analyzer_dump_path", 0))
1228 	  {
1229 	    /* Handle the builtin "__analyzer_dump_path" by queuing a
1230 	       diagnostic at this exploded_node.  */
1231 	    ctxt->warn (new dump_path_diagnostic ());
1232 	  }
1233 	else if (is_special_named_call_p (call, "__analyzer_dump_region_model",
1234 					  0))
1235 	  {
1236 	    /* Handle the builtin "__analyzer_dump_region_model" by dumping
1237 	       the region model's state to stderr.  */
1238 	    dump (false);
1239 	  }
1240 	else if (is_special_named_call_p (call, "__analyzer_eval", 1))
1241 	  impl_call_analyzer_eval (call, ctxt);
1242 	else if (is_special_named_call_p (call, "__analyzer_break", 0))
1243 	  {
1244 	    /* Handle the builtin "__analyzer_break" by triggering a
1245 	       breakpoint.  */
1246 	    /* TODO: is there a good cross-platform way to do this?  */
1247 	    raise (SIGINT);
1248 	  }
1249 	else if (is_special_named_call_p (call,
1250 					  "__analyzer_dump_exploded_nodes",
1251 					  1))
1252 	  {
1253 	    /* This is handled elsewhere.  */
1254 	  }
1255 	else
1256 	  *out_unknown_side_effects = on_call_pre (call, ctxt,
1257 						   out_terminate_path);
1258       }
1259       break;
1260 
1261     case GIMPLE_RETURN:
1262       {
1263 	const greturn *return_ = as_a <const greturn *> (stmt);
1264 	on_return (return_, ctxt);
1265       }
1266       break;
1267     }
1268 }
1269 
1270 /* Ensure that all arguments at the call described by CD are checked
1271    for poisoned values, by calling get_rvalue on each argument.  */
1272 
1273 void
check_call_args(const call_details & cd) const1274 region_model::check_call_args (const call_details &cd) const
1275 {
1276   for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1277     cd.get_arg_svalue (arg_idx);
1278 }
1279 
1280 /* Return true if CD is known to be a call to a function with
1281    __attribute__((const)).  */
1282 
1283 static bool
const_fn_p(const call_details & cd)1284 const_fn_p (const call_details &cd)
1285 {
1286   tree fndecl = cd.get_fndecl_for_call ();
1287   if (!fndecl)
1288     return false;
1289   gcc_assert (DECL_P (fndecl));
1290   return TREE_READONLY (fndecl);
1291 }
1292 
1293 /* If this CD is known to be a call to a function with
1294    __attribute__((const)), attempt to get a const_fn_result_svalue
1295    based on the arguments, or return NULL otherwise.  */
1296 
1297 static const svalue *
maybe_get_const_fn_result(const call_details & cd)1298 maybe_get_const_fn_result (const call_details &cd)
1299 {
1300   if (!const_fn_p (cd))
1301     return NULL;
1302 
1303   unsigned num_args = cd.num_args ();
1304   if (num_args > const_fn_result_svalue::MAX_INPUTS)
1305     /* Too many arguments.  */
1306     return NULL;
1307 
1308   auto_vec<const svalue *> inputs (num_args);
1309   for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1310     {
1311       const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1312       if (!arg_sval->can_have_associated_state_p ())
1313 	return NULL;
1314       inputs.quick_push (arg_sval);
1315     }
1316 
1317   region_model_manager *mgr = cd.get_manager ();
1318   const svalue *sval
1319     = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1320 						 cd.get_fndecl_for_call (),
1321 						 inputs);
1322   return sval;
1323 }
1324 
1325 /* Update this model for the CALL stmt, using CTXT to report any
1326    diagnostics - the first half.
1327 
1328    Updates to the region_model that should be made *before* sm-states
1329    are updated are done here; other updates to the region_model are done
1330    in region_model::on_call_post.
1331 
1332    Return true if the function call has unknown side effects (it wasn't
1333    recognized and we don't have a body for it, or are unable to tell which
1334    fndecl it is).
1335 
1336    Write true to *OUT_TERMINATE_PATH if this execution path should be
1337    terminated (e.g. the function call terminates the process).  */
1338 
1339 bool
on_call_pre(const gcall * call,region_model_context * ctxt,bool * out_terminate_path)1340 region_model::on_call_pre (const gcall *call, region_model_context *ctxt,
1341 			   bool *out_terminate_path)
1342 {
1343   call_details cd (call, this, ctxt);
1344 
1345   bool unknown_side_effects = false;
1346 
1347   /* Special-case for IFN_DEFERRED_INIT.
1348      We want to report uninitialized variables with -fanalyzer (treating
1349      -ftrivial-auto-var-init= as purely a mitigation feature).
1350      Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1351      lhs of the call, so that it is still uninitialized from the point of
1352      view of the analyzer.  */
1353   if (gimple_call_internal_p (call)
1354       && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1355     return false;
1356 
1357   /* Get svalues for all of the arguments at the callsite, to ensure that we
1358      complain about any uninitialized arguments.  This might lead to
1359      duplicates if any of the handling below also looks up the svalues,
1360      but the deduplication code should deal with that.  */
1361   if (ctxt)
1362     check_call_args (cd);
1363 
1364   /* Some of the cases below update the lhs of the call based on the
1365      return value, but not all.  Provide a default value, which may
1366      get overwritten below.  */
1367   if (tree lhs = gimple_call_lhs (call))
1368     {
1369       const region *lhs_region = get_lvalue (lhs, ctxt);
1370       const svalue *sval = maybe_get_const_fn_result (cd);
1371       if (!sval)
1372 	{
1373 	  /* For the common case of functions without __attribute__((const)),
1374 	     use a conjured value, and purge any prior state involving that
1375 	     value (in case this is in a loop).  */
1376 	  sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
1377 						       lhs_region,
1378 						       conjured_purge (this,
1379 								       ctxt));
1380 	}
1381       set_value (lhs_region, sval, ctxt);
1382     }
1383 
1384   if (gimple_call_internal_p (call))
1385     {
1386       switch (gimple_call_internal_fn (call))
1387        {
1388        default:
1389 	 break;
1390        case IFN_BUILTIN_EXPECT:
1391 	 impl_call_builtin_expect (cd);
1392 	 return false;
1393        case IFN_UBSAN_BOUNDS:
1394 	 return false;
1395        }
1396     }
1397 
1398   if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1399     {
1400       /* The various impl_call_* member functions are implemented
1401 	 in region-model-impl-calls.cc.
1402 	 Having them split out into separate functions makes it easier
1403 	 to put breakpoints on the handling of specific functions.  */
1404       int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
1405 
1406       if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
1407 	  && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
1408 	switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
1409 	  {
1410 	  default:
1411 	    if (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
1412 	      unknown_side_effects = true;
1413 	    break;
1414 	  case BUILT_IN_ALLOCA:
1415 	  case BUILT_IN_ALLOCA_WITH_ALIGN:
1416 	    impl_call_alloca (cd);
1417 	    return false;
1418 	  case BUILT_IN_CALLOC:
1419 	    impl_call_calloc (cd);
1420 	    return false;
1421 	  case BUILT_IN_EXPECT:
1422 	  case BUILT_IN_EXPECT_WITH_PROBABILITY:
1423 	    impl_call_builtin_expect (cd);
1424 	    return false;
1425 	  case BUILT_IN_FREE:
1426 	    /* Handle in "on_call_post".  */
1427 	    break;
1428 	  case BUILT_IN_MALLOC:
1429 	    impl_call_malloc (cd);
1430 	    return false;
1431 	  case BUILT_IN_MEMCPY:
1432 	  case BUILT_IN_MEMCPY_CHK:
1433 	    impl_call_memcpy (cd);
1434 	    return false;
1435 	  case BUILT_IN_MEMSET:
1436 	  case BUILT_IN_MEMSET_CHK:
1437 	    impl_call_memset (cd);
1438 	    return false;
1439 	    break;
1440 	  case BUILT_IN_REALLOC:
1441 	    return false;
1442 	  case BUILT_IN_STRCHR:
1443 	    impl_call_strchr (cd);
1444 	    return false;
1445 	  case BUILT_IN_STRCPY:
1446 	  case BUILT_IN_STRCPY_CHK:
1447 	    impl_call_strcpy (cd);
1448 	    return false;
1449 	  case BUILT_IN_STRLEN:
1450 	    impl_call_strlen (cd);
1451 	    return false;
1452 
1453 	  case BUILT_IN_STACK_SAVE:
1454 	  case BUILT_IN_STACK_RESTORE:
1455 	    return false;
1456 
1457 	  /* Stdio builtins.  */
1458 	  case BUILT_IN_FPRINTF:
1459 	  case BUILT_IN_FPRINTF_UNLOCKED:
1460 	  case BUILT_IN_PUTC:
1461 	  case BUILT_IN_PUTC_UNLOCKED:
1462 	  case BUILT_IN_FPUTC:
1463 	  case BUILT_IN_FPUTC_UNLOCKED:
1464 	  case BUILT_IN_FPUTS:
1465 	  case BUILT_IN_FPUTS_UNLOCKED:
1466 	  case BUILT_IN_FWRITE:
1467 	  case BUILT_IN_FWRITE_UNLOCKED:
1468 	  case BUILT_IN_PRINTF:
1469 	  case BUILT_IN_PRINTF_UNLOCKED:
1470 	  case BUILT_IN_PUTCHAR:
1471 	  case BUILT_IN_PUTCHAR_UNLOCKED:
1472 	  case BUILT_IN_PUTS:
1473 	  case BUILT_IN_PUTS_UNLOCKED:
1474 	  case BUILT_IN_VFPRINTF:
1475 	  case BUILT_IN_VPRINTF:
1476 	    /* These stdio builtins have external effects that are out
1477 	       of scope for the analyzer: we only want to model the effects
1478 	       on the return value.  */
1479 	    break;
1480 	  }
1481       else if (is_named_call_p (callee_fndecl, "malloc", call, 1))
1482 	{
1483 	  impl_call_malloc (cd);
1484 	  return false;
1485 	}
1486       else if (is_named_call_p (callee_fndecl, "calloc", call, 2))
1487 	{
1488 	  impl_call_calloc (cd);
1489 	  return false;
1490 	}
1491       else if (is_named_call_p (callee_fndecl, "alloca", call, 1))
1492 	{
1493 	  impl_call_alloca (cd);
1494 	  return false;
1495 	}
1496       else if (is_named_call_p (callee_fndecl, "realloc", call, 2))
1497 	{
1498 	  impl_call_realloc (cd);
1499 	  return false;
1500 	}
1501       else if (is_named_call_p (callee_fndecl, "error"))
1502 	{
1503 	  if (impl_call_error (cd, 3, out_terminate_path))
1504 	    return false;
1505 	  else
1506 	    unknown_side_effects = true;
1507 	}
1508       else if (is_named_call_p (callee_fndecl, "error_at_line"))
1509 	{
1510 	  if (impl_call_error (cd, 5, out_terminate_path))
1511 	    return false;
1512 	  else
1513 	    unknown_side_effects = true;
1514 	}
1515       else if (is_named_call_p (callee_fndecl, "fgets", call, 3)
1516 	       || is_named_call_p (callee_fndecl, "fgets_unlocked", call, 3))
1517 	{
1518 	  impl_call_fgets (cd);
1519 	  return false;
1520 	}
1521       else if (is_named_call_p (callee_fndecl, "fread", call, 4))
1522 	{
1523 	  impl_call_fread (cd);
1524 	  return false;
1525 	}
1526       else if (is_named_call_p (callee_fndecl, "getchar", call, 0))
1527 	{
1528 	  /* No side-effects (tracking stream state is out-of-scope
1529 	     for the analyzer).  */
1530 	}
1531       else if (is_named_call_p (callee_fndecl, "memset", call, 3)
1532 	       && POINTER_TYPE_P (cd.get_arg_type (0)))
1533 	{
1534 	  impl_call_memset (cd);
1535 	  return false;
1536 	}
1537       else if (is_named_call_p (callee_fndecl, "strchr", call, 2)
1538 	       && POINTER_TYPE_P (cd.get_arg_type (0)))
1539 	{
1540 	  impl_call_strchr (cd);
1541 	  return false;
1542 	}
1543       else if (is_named_call_p (callee_fndecl, "strlen", call, 1)
1544 	       && POINTER_TYPE_P (cd.get_arg_type (0)))
1545 	{
1546 	  impl_call_strlen (cd);
1547 	  return false;
1548 	}
1549       else if (is_named_call_p (callee_fndecl, "operator new", call, 1))
1550 	{
1551 	  impl_call_operator_new (cd);
1552 	  return false;
1553 	}
1554       else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
1555 	{
1556 	  impl_call_operator_new (cd);
1557 	  return false;
1558 	}
1559       else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1560 	       || is_named_call_p (callee_fndecl, "operator delete", call, 2)
1561 	       || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1562 	{
1563 	  /* Handle in "on_call_post".  */
1564 	}
1565       else if (!fndecl_has_gimple_body_p (callee_fndecl)
1566 	       && (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
1567 	       && !fndecl_built_in_p (callee_fndecl))
1568 	unknown_side_effects = true;
1569     }
1570   else
1571     unknown_side_effects = true;
1572 
1573   return unknown_side_effects;
1574 }
1575 
1576 /* Update this model for the CALL stmt, using CTXT to report any
1577    diagnostics - the second half.
1578 
1579    Updates to the region_model that should be made *after* sm-states
1580    are updated are done here; other updates to the region_model are done
1581    in region_model::on_call_pre.
1582 
1583    If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1584    to purge state.  */
1585 
1586 void
on_call_post(const gcall * call,bool unknown_side_effects,region_model_context * ctxt)1587 region_model::on_call_post (const gcall *call,
1588 			    bool unknown_side_effects,
1589 			    region_model_context *ctxt)
1590 {
1591   if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1592     {
1593       call_details cd (call, this, ctxt);
1594       if (is_named_call_p (callee_fndecl, "free", call, 1))
1595 	{
1596 	  impl_call_free (cd);
1597 	  return;
1598 	}
1599       if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1600 	  || is_named_call_p (callee_fndecl, "operator delete", call, 2)
1601 	  || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1602 	{
1603 	  impl_call_operator_delete (cd);
1604 	  return;
1605 	}
1606       /* Was this fndecl referenced by
1607 	 __attribute__((malloc(FOO)))?  */
1608       if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1609 	{
1610 	  impl_deallocation_call (cd);
1611 	  return;
1612 	}
1613       if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
1614 	  && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
1615 	switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
1616 	  {
1617 	  default:
1618 	    break;
1619 	  case BUILT_IN_REALLOC:
1620 	    impl_call_realloc (cd);
1621 	    return;
1622 	  }
1623     }
1624 
1625   if (unknown_side_effects)
1626     handle_unrecognized_call (call, ctxt);
1627 }
1628 
1629 /* Purge state involving SVAL from this region_model, using CTXT
1630    (if non-NULL) to purge other state in a program_state.
1631 
1632    For example, if we're at the def-stmt of an SSA name, then we need to
1633    purge any state for svalues that involve that SSA name.  This avoids
1634    false positives in loops, since a symbolic value referring to the
1635    SSA name will be referring to the previous value of that SSA name.
1636 
1637    For example, in:
1638      while ((e = hashmap_iter_next(&iter))) {
1639        struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1640        free (e_strbuf->value);
1641      }
1642    at the def-stmt of e_8:
1643      e_8 = hashmap_iter_next (&iter);
1644    we should purge the "freed" state of:
1645      INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1646    which is the "e_strbuf->value" value from the previous iteration,
1647    or we will erroneously report a double-free - the "e_8" within it
1648    refers to the previous value.  */
1649 
1650 void
purge_state_involving(const svalue * sval,region_model_context * ctxt)1651 region_model::purge_state_involving (const svalue *sval,
1652 				     region_model_context *ctxt)
1653 {
1654   if (!sval->can_have_associated_state_p ())
1655     return;
1656   m_store.purge_state_involving (sval, m_mgr);
1657   m_constraints->purge_state_involving (sval);
1658   m_dynamic_extents.purge_state_involving (sval);
1659   if (ctxt)
1660     ctxt->purge_state_involving (sval);
1661 }
1662 
1663 /* A pending_note subclass for adding a note about an
1664    __attribute__((access, ...)) to a diagnostic.  */
1665 
1666 class reason_attr_access : public pending_note_subclass<reason_attr_access>
1667 {
1668 public:
reason_attr_access(tree callee_fndecl,const attr_access & access)1669   reason_attr_access (tree callee_fndecl, const attr_access &access)
1670   : m_callee_fndecl (callee_fndecl),
1671     m_ptr_argno (access.ptrarg),
1672     m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
1673   {
1674   }
1675 
get_kind() const1676   const char *get_kind () const FINAL OVERRIDE { return "reason_attr_access"; }
1677 
emit() const1678   void emit () const
1679   {
1680     inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
1681 	    "parameter %i of %qD marked with attribute %qs",
1682 	    m_ptr_argno + 1, m_callee_fndecl, m_access_str);
1683   }
1684 
operator ==(const reason_attr_access & other) const1685   bool operator== (const reason_attr_access &other) const
1686   {
1687     return (m_callee_fndecl == other.m_callee_fndecl
1688 	    && m_ptr_argno == other.m_ptr_argno
1689 	    && !strcmp (m_access_str, other.m_access_str));
1690   }
1691 
1692 private:
1693   tree m_callee_fndecl;
1694   unsigned m_ptr_argno;
1695   const char *m_access_str;
1696 };
1697 
1698 /* Check CALL a call to external function CALLEE_FNDECL based on
1699    any __attribute__ ((access, ....) on the latter, complaining to
1700    CTXT about any issues.
1701 
1702    Currently we merely call check_region_for_write on any regions
1703    pointed to by arguments marked with a "write_only" or "read_write"
1704    attribute.  */
1705 
1706 void
1707 region_model::
check_external_function_for_access_attr(const gcall * call,tree callee_fndecl,region_model_context * ctxt) const1708 check_external_function_for_access_attr (const gcall *call,
1709 					 tree callee_fndecl,
1710 					 region_model_context *ctxt) const
1711 {
1712   gcc_assert (call);
1713   gcc_assert (callee_fndecl);
1714   gcc_assert (ctxt);
1715 
1716   tree fntype = TREE_TYPE (callee_fndecl);
1717   if (!fntype)
1718     return;
1719 
1720   if (!TYPE_ATTRIBUTES (fntype))
1721     return;
1722 
1723   /* Initialize a map of attribute access specifications for arguments
1724      to the function call.  */
1725   rdwr_map rdwr_idx;
1726   init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
1727 
1728   unsigned argno = 0;
1729 
1730   for (tree iter = TYPE_ARG_TYPES (fntype); iter;
1731        iter = TREE_CHAIN (iter), ++argno)
1732     {
1733       const attr_access* access = rdwr_idx.get (argno);
1734       if (!access)
1735 	continue;
1736 
1737       /* Ignore any duplicate entry in the map for the size argument.  */
1738       if (access->ptrarg != argno)
1739 	continue;
1740 
1741       if (access->mode == access_write_only
1742 	  || access->mode == access_read_write)
1743 	{
1744 	  /* Subclass of decorated_region_model_context that
1745 	     adds a note about the attr access to any saved diagnostics.  */
1746 	  class annotating_ctxt : public note_adding_context
1747 	  {
1748 	  public:
1749 	    annotating_ctxt (tree callee_fndecl,
1750 			     const attr_access &access,
1751 			     region_model_context *ctxt)
1752 	    : note_adding_context (ctxt),
1753 	      m_callee_fndecl (callee_fndecl),
1754 	      m_access (access)
1755 	    {
1756 	    }
1757 	    pending_note *make_note () FINAL OVERRIDE
1758 	    {
1759 	      return new reason_attr_access (m_callee_fndecl, m_access);
1760 	    }
1761 	  private:
1762 	    tree m_callee_fndecl;
1763 	    const attr_access &m_access;
1764 	  };
1765 
1766 	  /* Use this ctxt below so that any diagnostics get the
1767 	     note added to them.  */
1768 	  annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
1769 
1770 	  tree ptr_tree = gimple_call_arg (call, access->ptrarg);
1771 	  const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
1772 	  const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
1773 	  check_region_for_write (reg, &my_ctxt);
1774 	  /* We don't use the size arg for now.  */
1775 	}
1776     }
1777 }
1778 
1779 /* Handle a call CALL to a function with unknown behavior.
1780 
1781    Traverse the regions in this model, determining what regions are
1782    reachable from pointer arguments to CALL and from global variables,
1783    recursively.
1784 
1785    Set all reachable regions to new unknown values and purge sm-state
1786    from their values, and from values that point to them.  */
1787 
1788 void
handle_unrecognized_call(const gcall * call,region_model_context * ctxt)1789 region_model::handle_unrecognized_call (const gcall *call,
1790 					region_model_context *ctxt)
1791 {
1792   tree fndecl = get_fndecl_for_call (call, ctxt);
1793 
1794   if (fndecl && ctxt)
1795     check_external_function_for_access_attr (call, fndecl, ctxt);
1796 
1797   reachable_regions reachable_regs (this);
1798 
1799   /* Determine the reachable regions and their mutability.  */
1800   {
1801     /* Add globals and regions that already escaped in previous
1802        unknown calls.  */
1803     m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1804 			      &reachable_regs);
1805 
1806     /* Params that are pointers.  */
1807     tree iter_param_types = NULL_TREE;
1808     if (fndecl)
1809       iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1810     for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1811       {
1812 	/* Track expected param type, where available.  */
1813 	tree param_type = NULL_TREE;
1814 	if (iter_param_types)
1815 	  {
1816 	    param_type = TREE_VALUE (iter_param_types);
1817 	    gcc_assert (param_type);
1818 	    iter_param_types = TREE_CHAIN (iter_param_types);
1819 	  }
1820 
1821 	tree parm = gimple_call_arg (call, arg_idx);
1822 	const svalue *parm_sval = get_rvalue (parm, ctxt);
1823 	reachable_regs.handle_parm (parm_sval, param_type);
1824       }
1825   }
1826 
1827   uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
1828 
1829   /* Purge sm-state for the svalues that were reachable,
1830      both in non-mutable and mutable form.  */
1831   for (svalue_set::iterator iter
1832 	 = reachable_regs.begin_reachable_svals ();
1833        iter != reachable_regs.end_reachable_svals (); ++iter)
1834     {
1835       const svalue *sval = (*iter);
1836       if (ctxt)
1837 	ctxt->on_unknown_change (sval, false);
1838     }
1839   for (svalue_set::iterator iter
1840 	 = reachable_regs.begin_mutable_svals ();
1841        iter != reachable_regs.end_mutable_svals (); ++iter)
1842     {
1843       const svalue *sval = (*iter);
1844       if (ctxt)
1845 	ctxt->on_unknown_change (sval, true);
1846       if (uncertainty)
1847 	uncertainty->on_mutable_sval_at_unknown_call (sval);
1848     }
1849 
1850   /* Mark any clusters that have escaped.  */
1851   reachable_regs.mark_escaped_clusters (ctxt);
1852 
1853   /* Update bindings for all clusters that have escaped, whether above,
1854      or previously.  */
1855   m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
1856 			     conjured_purge (this, ctxt));
1857 
1858   /* Purge dynamic extents from any regions that have escaped mutably:
1859      realloc could have been called on them.  */
1860   for (hash_set<const region *>::iterator
1861 	 iter = reachable_regs.begin_mutable_base_regs ();
1862        iter != reachable_regs.end_mutable_base_regs ();
1863        ++iter)
1864     {
1865       const region *base_reg = (*iter);
1866       unset_dynamic_extents (base_reg);
1867     }
1868 }
1869 
1870 /* Traverse the regions in this model, determining what regions are
1871    reachable from the store and populating *OUT.
1872 
1873    If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1874    for reachability (for handling return values from functions when
1875    analyzing return of the only function on the stack).
1876 
1877    If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1878    within it as being maybe-bound as additional "roots" for reachability.
1879 
1880    Find svalues that haven't leaked.    */
1881 
1882 void
get_reachable_svalues(svalue_set * out,const svalue * extra_sval,const uncertainty_t * uncertainty)1883 region_model::get_reachable_svalues (svalue_set *out,
1884 				     const svalue *extra_sval,
1885 				     const uncertainty_t *uncertainty)
1886 {
1887   reachable_regions reachable_regs (this);
1888 
1889   /* Add globals and regions that already escaped in previous
1890      unknown calls.  */
1891   m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1892 			    &reachable_regs);
1893 
1894   if (extra_sval)
1895     reachable_regs.handle_sval (extra_sval);
1896 
1897   if (uncertainty)
1898     for (uncertainty_t::iterator iter
1899 	   = uncertainty->begin_maybe_bound_svals ();
1900 	 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1901       reachable_regs.handle_sval (*iter);
1902 
1903   /* Get regions for locals that have explicitly bound values.  */
1904   for (store::cluster_map_t::iterator iter = m_store.begin ();
1905        iter != m_store.end (); ++iter)
1906     {
1907       const region *base_reg = (*iter).first;
1908       if (const region *parent = base_reg->get_parent_region ())
1909 	if (parent->get_kind () == RK_FRAME)
1910 	  reachable_regs.add (base_reg, false);
1911     }
1912 
1913   /* Populate *OUT based on the values that were reachable.  */
1914   for (svalue_set::iterator iter
1915 	 = reachable_regs.begin_reachable_svals ();
1916        iter != reachable_regs.end_reachable_svals (); ++iter)
1917     out->add (*iter);
1918 }
1919 
1920 /* Update this model for the RETURN_STMT, using CTXT to report any
1921    diagnostics.  */
1922 
1923 void
on_return(const greturn * return_stmt,region_model_context * ctxt)1924 region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1925 {
1926   tree callee = get_current_function ()->decl;
1927   tree lhs = DECL_RESULT (callee);
1928   tree rhs = gimple_return_retval (return_stmt);
1929 
1930   if (lhs && rhs)
1931     {
1932       const svalue *sval = get_rvalue (rhs, ctxt);
1933       const region *ret_reg = get_lvalue (lhs, ctxt);
1934       set_value (ret_reg, sval, ctxt);
1935     }
1936 }
1937 
1938 /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1939    ENODE, using CTXT to report any diagnostics.
1940 
1941    This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1942    0), as opposed to any second return due to longjmp/sigsetjmp.  */
1943 
1944 void
on_setjmp(const gcall * call,const exploded_node * enode,region_model_context * ctxt)1945 region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1946 			 region_model_context *ctxt)
1947 {
1948   const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1949   const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1950 					 ctxt);
1951 
1952   /* Create a setjmp_svalue for this call and store it in BUF_REG's
1953      region.  */
1954   if (buf_reg)
1955     {
1956       setjmp_record r (enode, call);
1957       const svalue *sval
1958 	= m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1959       set_value (buf_reg, sval, ctxt);
1960     }
1961 
1962   /* Direct calls to setjmp return 0.  */
1963   if (tree lhs = gimple_call_lhs (call))
1964     {
1965       const svalue *new_sval
1966 	= m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
1967       const region *lhs_reg = get_lvalue (lhs, ctxt);
1968       set_value (lhs_reg, new_sval, ctxt);
1969     }
1970 }
1971 
1972 /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1973    to a "setjmp" at SETJMP_CALL where the final stack depth should be
1974    SETJMP_STACK_DEPTH.  Pop any stack frames.  Leak detection is *not*
1975    done, and should be done by the caller.  */
1976 
1977 void
on_longjmp(const gcall * longjmp_call,const gcall * setjmp_call,int setjmp_stack_depth,region_model_context * ctxt)1978 region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
1979 			   int setjmp_stack_depth, region_model_context *ctxt)
1980 {
1981   /* Evaluate the val, using the frame of the "longjmp".  */
1982   tree fake_retval = gimple_call_arg (longjmp_call, 1);
1983   const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
1984 
1985   /* Pop any frames until we reach the stack depth of the function where
1986      setjmp was called.  */
1987   gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1988   while (get_stack_depth () > setjmp_stack_depth)
1989     pop_frame (NULL, NULL, ctxt, false);
1990 
1991   gcc_assert (get_stack_depth () == setjmp_stack_depth);
1992 
1993   /* Assign to LHS of "setjmp" in new_state.  */
1994   if (tree lhs = gimple_call_lhs (setjmp_call))
1995     {
1996       /* Passing 0 as the val to longjmp leads to setjmp returning 1.  */
1997       const svalue *zero_sval
1998 	= m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
1999       tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
2000       /* If we have 0, use 1.  */
2001       if (eq_zero.is_true ())
2002 	{
2003 	  const svalue *one_sval
2004 	    = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
2005 	  fake_retval_sval = one_sval;
2006 	}
2007       else
2008 	{
2009 	  /* Otherwise note that the value is nonzero.  */
2010 	  m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
2011 	}
2012 
2013       /* Decorate the return value from setjmp as being unmergeable,
2014 	 so that we don't attempt to merge states with it as zero
2015 	 with states in which it's nonzero, leading to a clean distinction
2016 	 in the exploded_graph betweeen the first return and the second
2017 	 return.  */
2018       fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
2019 
2020       const region *lhs_reg = get_lvalue (lhs, ctxt);
2021       set_value (lhs_reg, fake_retval_sval, ctxt);
2022     }
2023 }
2024 
2025 /* Update this region_model for a phi stmt of the form
2026      LHS = PHI <...RHS...>.
2027    where RHS is for the appropriate edge.
2028    Get state from OLD_STATE so that all of the phi stmts for a basic block
2029    are effectively handled simultaneously.  */
2030 
2031 void
handle_phi(const gphi * phi,tree lhs,tree rhs,const region_model & old_state,region_model_context * ctxt)2032 region_model::handle_phi (const gphi *phi,
2033 			  tree lhs, tree rhs,
2034 			  const region_model &old_state,
2035 			  region_model_context *ctxt)
2036 {
2037   /* For now, don't bother tracking the .MEM SSA names.  */
2038   if (tree var = SSA_NAME_VAR (lhs))
2039     if (TREE_CODE (var) == VAR_DECL)
2040       if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
2041 	return;
2042 
2043   const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
2044   const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
2045 
2046   set_value (dst_reg, src_sval, ctxt);
2047 
2048   if (ctxt)
2049     ctxt->on_phi (phi, rhs);
2050 }
2051 
2052 /* Implementation of region_model::get_lvalue; the latter adds type-checking.
2053 
2054    Get the id of the region for PV within this region_model,
2055    emitting any diagnostics to CTXT.  */
2056 
2057 const region *
get_lvalue_1(path_var pv,region_model_context * ctxt) const2058 region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
2059 {
2060   tree expr = pv.m_tree;
2061 
2062   gcc_assert (expr);
2063 
2064   switch (TREE_CODE (expr))
2065     {
2066     default:
2067       return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2068 							 dump_location_t ());
2069 
2070     case ARRAY_REF:
2071       {
2072 	tree array = TREE_OPERAND (expr, 0);
2073 	tree index = TREE_OPERAND (expr, 1);
2074 
2075 	const region *array_reg = get_lvalue (array, ctxt);
2076 	const svalue *index_sval = get_rvalue (index, ctxt);
2077 	return m_mgr->get_element_region (array_reg,
2078 					  TREE_TYPE (TREE_TYPE (array)),
2079 					  index_sval);
2080       }
2081       break;
2082 
2083     case BIT_FIELD_REF:
2084       {
2085 	tree inner_expr = TREE_OPERAND (expr, 0);
2086 	const region *inner_reg = get_lvalue (inner_expr, ctxt);
2087 	tree num_bits = TREE_OPERAND (expr, 1);
2088 	tree first_bit_offset = TREE_OPERAND (expr, 2);
2089 	gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2090 	gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2091 	bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2092 			TREE_INT_CST_LOW (num_bits));
2093 	return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2094       }
2095       break;
2096 
2097     case MEM_REF:
2098       {
2099 	tree ptr = TREE_OPERAND (expr, 0);
2100 	tree offset = TREE_OPERAND (expr, 1);
2101 	const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2102 	const svalue *offset_sval = get_rvalue (offset, ctxt);
2103 	const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2104 	return m_mgr->get_offset_region (star_ptr,
2105 					 TREE_TYPE (expr),
2106 					 offset_sval);
2107       }
2108       break;
2109 
2110     case FUNCTION_DECL:
2111       return m_mgr->get_region_for_fndecl (expr);
2112 
2113     case LABEL_DECL:
2114       return m_mgr->get_region_for_label (expr);
2115 
2116     case VAR_DECL:
2117       /* Handle globals.  */
2118       if (is_global_var (expr))
2119 	return m_mgr->get_region_for_global (expr);
2120 
2121       /* Fall through.  */
2122 
2123     case SSA_NAME:
2124     case PARM_DECL:
2125     case RESULT_DECL:
2126       {
2127 	gcc_assert (TREE_CODE (expr) == SSA_NAME
2128 		    || TREE_CODE (expr) == PARM_DECL
2129 		    || TREE_CODE (expr) == VAR_DECL
2130 		    || TREE_CODE (expr) == RESULT_DECL);
2131 
2132 	int stack_index = pv.m_stack_depth;
2133 	const frame_region *frame = get_frame_at_index (stack_index);
2134 	gcc_assert (frame);
2135 	return frame->get_region_for_local (m_mgr, expr, ctxt);
2136       }
2137 
2138     case COMPONENT_REF:
2139       {
2140 	/* obj.field  */
2141 	tree obj = TREE_OPERAND (expr, 0);
2142 	tree field = TREE_OPERAND (expr, 1);
2143 	const region *obj_reg = get_lvalue (obj, ctxt);
2144 	return m_mgr->get_field_region (obj_reg, field);
2145       }
2146       break;
2147 
2148     case STRING_CST:
2149       return m_mgr->get_region_for_string (expr);
2150     }
2151 }
2152 
2153 /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op.  */
2154 
2155 static void
assert_compat_types(tree src_type,tree dst_type)2156 assert_compat_types (tree src_type, tree dst_type)
2157 {
2158   if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2159     {
2160 #if CHECKING_P
2161       if (!(useless_type_conversion_p (src_type, dst_type)))
2162 	internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2163 #endif
2164     }
2165 }
2166 
2167 /* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op.  */
2168 
2169 bool
compat_types_p(tree src_type,tree dst_type)2170 compat_types_p (tree src_type, tree dst_type)
2171 {
2172   if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2173     if (!(useless_type_conversion_p (src_type, dst_type)))
2174       return false;
2175   return true;
2176 }
2177 
2178 /* Get the region for PV within this region_model,
2179    emitting any diagnostics to CTXT.  */
2180 
2181 const region *
get_lvalue(path_var pv,region_model_context * ctxt) const2182 region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
2183 {
2184   if (pv.m_tree == NULL_TREE)
2185     return NULL;
2186 
2187   const region *result_reg = get_lvalue_1 (pv, ctxt);
2188   assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2189   return result_reg;
2190 }
2191 
2192 /* Get the region for EXPR within this region_model (assuming the most
2193    recent stack frame if it's a local).  */
2194 
2195 const region *
get_lvalue(tree expr,region_model_context * ctxt) const2196 region_model::get_lvalue (tree expr, region_model_context *ctxt) const
2197 {
2198   return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2199 }
2200 
2201 /* Implementation of region_model::get_rvalue; the latter adds type-checking.
2202 
2203    Get the value of PV within this region_model,
2204    emitting any diagnostics to CTXT.  */
2205 
2206 const svalue *
get_rvalue_1(path_var pv,region_model_context * ctxt) const2207 region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
2208 {
2209   gcc_assert (pv.m_tree);
2210 
2211   switch (TREE_CODE (pv.m_tree))
2212     {
2213     default:
2214       return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
2215 
2216     case ADDR_EXPR:
2217       {
2218 	/* "&EXPR".  */
2219 	tree expr = pv.m_tree;
2220 	tree op0 = TREE_OPERAND (expr, 0);
2221 	const region *expr_reg = get_lvalue (op0, ctxt);
2222 	return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
2223       }
2224       break;
2225 
2226     case BIT_FIELD_REF:
2227       {
2228 	tree expr = pv.m_tree;
2229 	tree op0 = TREE_OPERAND (expr, 0);
2230 	const region *reg = get_lvalue (op0, ctxt);
2231 	tree num_bits = TREE_OPERAND (expr, 1);
2232 	tree first_bit_offset = TREE_OPERAND (expr, 2);
2233 	gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2234 	gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2235 	bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2236 			TREE_INT_CST_LOW (num_bits));
2237 	return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
2238       }
2239 
2240     case VAR_DECL:
2241       if (DECL_HARD_REGISTER (pv.m_tree))
2242 	{
2243 	  /* If it has a hard register, it doesn't have a memory region
2244 	     and can't be referred to as an lvalue.  */
2245 	  return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
2246 	}
2247       /* Fall through. */
2248     case PARM_DECL:
2249     case SSA_NAME:
2250     case RESULT_DECL:
2251     case ARRAY_REF:
2252       {
2253 	const region *reg = get_lvalue (pv, ctxt);
2254 	return get_store_value (reg, ctxt);
2255       }
2256 
2257     case REALPART_EXPR:
2258     case IMAGPART_EXPR:
2259     case VIEW_CONVERT_EXPR:
2260       {
2261 	tree expr = pv.m_tree;
2262 	tree arg = TREE_OPERAND (expr, 0);
2263 	const svalue *arg_sval = get_rvalue (arg, ctxt);
2264 	const svalue *sval_unaryop
2265 	  = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
2266 					  arg_sval);
2267 	return sval_unaryop;
2268       };
2269 
2270     case INTEGER_CST:
2271     case REAL_CST:
2272     case COMPLEX_CST:
2273     case VECTOR_CST:
2274     case STRING_CST:
2275       return m_mgr->get_or_create_constant_svalue (pv.m_tree);
2276 
2277     case POINTER_PLUS_EXPR:
2278 	{
2279 	  tree expr = pv.m_tree;
2280 	  tree ptr = TREE_OPERAND (expr, 0);
2281 	  tree offset = TREE_OPERAND (expr, 1);
2282 	  const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2283 	  const svalue *offset_sval = get_rvalue (offset, ctxt);
2284 	  const svalue *sval_binop
2285 	    = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
2286 					  ptr_sval, offset_sval);
2287 	  return sval_binop;
2288 	}
2289 
2290     /* Binary ops.  */
2291     case PLUS_EXPR:
2292     case MULT_EXPR:
2293 	{
2294 	  tree expr = pv.m_tree;
2295 	  tree arg0 = TREE_OPERAND (expr, 0);
2296 	  tree arg1 = TREE_OPERAND (expr, 1);
2297 	  const svalue *arg0_sval = get_rvalue (arg0, ctxt);
2298 	  const svalue *arg1_sval = get_rvalue (arg1, ctxt);
2299 	  const svalue *sval_binop
2300 	    = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
2301 					  arg0_sval, arg1_sval);
2302 	  return sval_binop;
2303 	}
2304 
2305     case COMPONENT_REF:
2306     case MEM_REF:
2307       {
2308 	const region *ref_reg = get_lvalue (pv, ctxt);
2309 	return get_store_value (ref_reg, ctxt);
2310       }
2311     case OBJ_TYPE_REF:
2312       {
2313         tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
2314         return get_rvalue (expr, ctxt);
2315       }
2316     }
2317 }
2318 
2319 /* Get the value of PV within this region_model,
2320    emitting any diagnostics to CTXT.  */
2321 
2322 const svalue *
get_rvalue(path_var pv,region_model_context * ctxt) const2323 region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
2324 {
2325   if (pv.m_tree == NULL_TREE)
2326     return NULL;
2327 
2328   const svalue *result_sval = get_rvalue_1 (pv, ctxt);
2329 
2330   assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
2331 
2332   result_sval = check_for_poison (result_sval, pv.m_tree, ctxt);
2333 
2334   return result_sval;
2335 }
2336 
2337 /* Get the value of EXPR within this region_model (assuming the most
2338    recent stack frame if it's a local).  */
2339 
2340 const svalue *
get_rvalue(tree expr,region_model_context * ctxt) const2341 region_model::get_rvalue (tree expr, region_model_context *ctxt) const
2342 {
2343   return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2344 }
2345 
2346 /* Return true if this model is on a path with "main" as the entrypoint
2347    (as opposed to one in which we're merely analyzing a subset of the
2348    path through the code).  */
2349 
2350 bool
called_from_main_p() const2351 region_model::called_from_main_p () const
2352 {
2353   if (!m_current_frame)
2354     return false;
2355   /* Determine if the oldest stack frame in this model is for "main".  */
2356   const frame_region *frame0 = get_frame_at_index (0);
2357   gcc_assert (frame0);
2358   return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
2359 }
2360 
2361 /* Subroutine of region_model::get_store_value for when REG is (or is within)
2362    a global variable that hasn't been touched since the start of this path
2363    (or was implicitly touched due to a call to an unknown function).  */
2364 
2365 const svalue *
get_initial_value_for_global(const region * reg) const2366 region_model::get_initial_value_for_global (const region *reg) const
2367 {
2368   /* Get the decl that REG is for (or is within).  */
2369   const decl_region *base_reg
2370     = reg->get_base_region ()->dyn_cast_decl_region ();
2371   gcc_assert (base_reg);
2372   tree decl = base_reg->get_decl ();
2373 
2374   /* Special-case: to avoid having to explicitly update all previously
2375      untracked globals when calling an unknown fn, they implicitly have
2376      an unknown value if an unknown call has occurred, unless this is
2377      static to-this-TU and hasn't escaped.  Globals that have escaped
2378      are explicitly tracked, so we shouldn't hit this case for them.  */
2379   if (m_store.called_unknown_fn_p ()
2380       && TREE_PUBLIC (decl)
2381       && !TREE_READONLY (decl))
2382     return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2383 
2384   /* If we are on a path from the entrypoint from "main" and we have a
2385      global decl defined in this TU that hasn't been touched yet, then
2386      the initial value of REG can be taken from the initialization value
2387      of the decl.  */
2388   if (called_from_main_p () || TREE_READONLY (decl))
2389     {
2390       /* Attempt to get the initializer value for base_reg.  */
2391       if (const svalue *base_reg_init
2392 	    = base_reg->get_svalue_for_initializer (m_mgr))
2393 	{
2394 	  if (reg == base_reg)
2395 	    return base_reg_init;
2396 	  else
2397 	    {
2398 	      /* Get the value for REG within base_reg_init.  */
2399 	      binding_cluster c (base_reg);
2400 	      c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
2401 	      const svalue *sval
2402 		= c.get_any_binding (m_mgr->get_store_manager (), reg);
2403 	      if (sval)
2404 		{
2405 		  if (reg->get_type ())
2406 		    sval = m_mgr->get_or_create_cast (reg->get_type (),
2407 						      sval);
2408 		  return sval;
2409 		}
2410 	    }
2411 	}
2412     }
2413 
2414   /* Otherwise, return INIT_VAL(REG).  */
2415   return m_mgr->get_or_create_initial_value (reg);
2416 }
2417 
2418 /* Get a value for REG, looking it up in the store, or otherwise falling
2419    back to "initial" or "unknown" values.
2420    Use CTXT to report any warnings associated with reading from REG. */
2421 
2422 const svalue *
get_store_value(const region * reg,region_model_context * ctxt) const2423 region_model::get_store_value (const region *reg,
2424 			       region_model_context *ctxt) const
2425 {
2426   check_region_for_read (reg, ctxt);
2427 
2428   /* Special-case: handle var_decls in the constant pool.  */
2429   if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2430     if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2431       return sval;
2432 
2433   const svalue *sval
2434     = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2435   if (sval)
2436     {
2437       if (reg->get_type ())
2438 	sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2439       return sval;
2440     }
2441 
2442   /* Special-case: read at a constant index within a STRING_CST.  */
2443   if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2444     if (tree byte_offset_cst
2445 	  = offset_reg->get_byte_offset ()->maybe_get_constant ())
2446       if (const string_region *str_reg
2447 	  = reg->get_parent_region ()->dyn_cast_string_region ())
2448 	{
2449 	  tree string_cst = str_reg->get_string_cst ();
2450 	  if (const svalue *char_sval
2451 		= m_mgr->maybe_get_char_from_string_cst (string_cst,
2452 							 byte_offset_cst))
2453 	    return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2454 	}
2455 
2456   /* Special-case: read the initial char of a STRING_CST.  */
2457   if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2458     if (const string_region *str_reg
2459 	= cast_reg->get_original_region ()->dyn_cast_string_region ())
2460       {
2461 	tree string_cst = str_reg->get_string_cst ();
2462 	tree byte_offset_cst = build_int_cst (integer_type_node, 0);
2463 	if (const svalue *char_sval
2464 	    = m_mgr->maybe_get_char_from_string_cst (string_cst,
2465 						     byte_offset_cst))
2466 	  return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2467       }
2468 
2469   /* Otherwise we implicitly have the initial value of the region
2470      (if the cluster had been touched, binding_cluster::get_any_binding,
2471      would have returned UNKNOWN, and we would already have returned
2472      that above).  */
2473 
2474   /* Handle globals.  */
2475   if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2476       == RK_GLOBALS)
2477     return get_initial_value_for_global (reg);
2478 
2479   return m_mgr->get_or_create_initial_value (reg);
2480 }
2481 
2482 /* Return false if REG does not exist, true if it may do.
2483    This is for detecting regions within the stack that don't exist anymore
2484    after frames are popped.  */
2485 
2486 bool
region_exists_p(const region * reg) const2487 region_model::region_exists_p (const region *reg) const
2488 {
2489   /* If within a stack frame, check that the stack frame is live.  */
2490   if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
2491     {
2492       /* Check that the current frame is the enclosing frame, or is called
2493 	 by it.  */
2494       for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2495 	   iter_frame = iter_frame->get_calling_frame ())
2496 	if (iter_frame == enclosing_frame)
2497 	  return true;
2498       return false;
2499     }
2500 
2501   return true;
2502 }
2503 
2504 /* Get a region for referencing PTR_SVAL, creating a region if need be, and
2505    potentially generating warnings via CTXT.
2506    PTR_SVAL must be of pointer type.
2507    PTR_TREE if non-NULL can be used when emitting diagnostics.  */
2508 
2509 const region *
deref_rvalue(const svalue * ptr_sval,tree ptr_tree,region_model_context * ctxt) const2510 region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
2511 			    region_model_context *ctxt) const
2512 {
2513   gcc_assert (ptr_sval);
2514   gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
2515 
2516   /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2517      as a constraint.  This suppresses false positives from
2518      -Wanalyzer-null-dereference for the case where we later have an
2519      if (PTR_SVAL) that would occur if we considered the false branch
2520      and transitioned the malloc state machine from start->null.  */
2521   tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2522   const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2523   m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2524 
2525   switch (ptr_sval->get_kind ())
2526     {
2527     default:
2528       break;
2529 
2530     case SK_REGION:
2531       {
2532 	const region_svalue *region_sval
2533 	  = as_a <const region_svalue *> (ptr_sval);
2534 	return region_sval->get_pointee ();
2535       }
2536 
2537     case SK_BINOP:
2538       {
2539 	const binop_svalue *binop_sval
2540 	  = as_a <const binop_svalue *> (ptr_sval);
2541 	switch (binop_sval->get_op ())
2542 	  {
2543 	  case POINTER_PLUS_EXPR:
2544 	    {
2545 	      /* If we have a symbolic value expressing pointer arithmentic,
2546 		 try to convert it to a suitable region.  */
2547 	      const region *parent_region
2548 		= deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2549 	      const svalue *offset = binop_sval->get_arg1 ();
2550 	      tree type= TREE_TYPE (ptr_sval->get_type ());
2551 	      return m_mgr->get_offset_region (parent_region, type, offset);
2552 	    }
2553 	  default:
2554 	    break;
2555 	  }
2556       }
2557       break;
2558 
2559     case SK_POISONED:
2560       {
2561 	if (ctxt)
2562 	  {
2563 	    tree ptr = get_representative_tree (ptr_sval);
2564 	    /* If we can't get a representative tree for PTR_SVAL
2565 	       (e.g. if it hasn't been bound into the store), then
2566 	       fall back on PTR_TREE, if non-NULL.  */
2567 	    if (!ptr)
2568 	      ptr = ptr_tree;
2569 	    if (ptr)
2570 	      {
2571 		const poisoned_svalue *poisoned_sval
2572 		  = as_a <const poisoned_svalue *> (ptr_sval);
2573 		enum poison_kind pkind = poisoned_sval->get_poison_kind ();
2574 		ctxt->warn (new poisoned_value_diagnostic (ptr, pkind, NULL));
2575 	      }
2576 	  }
2577       }
2578       break;
2579     }
2580 
2581   return m_mgr->get_symbolic_region (ptr_sval);
2582 }
2583 
2584 /* Attempt to get BITS within any value of REG, as TYPE.
2585    In particular, extract values from compound_svalues for the case
2586    where there's a concrete binding at BITS.
2587    Return an unknown svalue if we can't handle the given case.
2588    Use CTXT to report any warnings associated with reading from REG.  */
2589 
2590 const svalue *
get_rvalue_for_bits(tree type,const region * reg,const bit_range & bits,region_model_context * ctxt) const2591 region_model::get_rvalue_for_bits (tree type,
2592 				   const region *reg,
2593 				   const bit_range &bits,
2594 				   region_model_context *ctxt) const
2595 {
2596   const svalue *sval = get_store_value (reg, ctxt);
2597   return m_mgr->get_or_create_bits_within (type, bits, sval);
2598 }
2599 
2600 /* A subclass of pending_diagnostic for complaining about writes to
2601    constant regions of memory.  */
2602 
2603 class write_to_const_diagnostic
2604 : public pending_diagnostic_subclass<write_to_const_diagnostic>
2605 {
2606 public:
write_to_const_diagnostic(const region * reg,tree decl)2607   write_to_const_diagnostic (const region *reg, tree decl)
2608   : m_reg (reg), m_decl (decl)
2609   {}
2610 
get_kind() const2611   const char *get_kind () const FINAL OVERRIDE
2612   {
2613     return "write_to_const_diagnostic";
2614   }
2615 
operator ==(const write_to_const_diagnostic & other) const2616   bool operator== (const write_to_const_diagnostic &other) const
2617   {
2618     return (m_reg == other.m_reg
2619 	    && m_decl == other.m_decl);
2620   }
2621 
get_controlling_option() const2622   int get_controlling_option () const FINAL OVERRIDE
2623   {
2624     return OPT_Wanalyzer_write_to_const;
2625   }
2626 
emit(rich_location * rich_loc)2627   bool emit (rich_location *rich_loc) FINAL OVERRIDE
2628   {
2629     auto_diagnostic_group d;
2630     bool warned;
2631     switch (m_reg->get_kind ())
2632       {
2633       default:
2634 	warned = warning_at (rich_loc, get_controlling_option (),
2635 			     "write to %<const%> object %qE", m_decl);
2636 	break;
2637       case RK_FUNCTION:
2638 	warned = warning_at (rich_loc, get_controlling_option (),
2639 			     "write to function %qE", m_decl);
2640 	break;
2641       case RK_LABEL:
2642 	warned = warning_at (rich_loc, get_controlling_option (),
2643 			     "write to label %qE", m_decl);
2644 	break;
2645       }
2646     if (warned)
2647       inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2648     return warned;
2649   }
2650 
describe_final_event(const evdesc::final_event & ev)2651   label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
2652   {
2653     switch (m_reg->get_kind ())
2654       {
2655       default:
2656 	return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2657       case RK_FUNCTION:
2658 	return ev.formatted_print ("write to function %qE here", m_decl);
2659       case RK_LABEL:
2660 	return ev.formatted_print ("write to label %qE here", m_decl);
2661       }
2662   }
2663 
2664 private:
2665   const region *m_reg;
2666   tree m_decl;
2667 };
2668 
2669 /* A subclass of pending_diagnostic for complaining about writes to
2670    string literals.  */
2671 
2672 class write_to_string_literal_diagnostic
2673 : public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2674 {
2675 public:
write_to_string_literal_diagnostic(const region * reg)2676   write_to_string_literal_diagnostic (const region *reg)
2677   : m_reg (reg)
2678   {}
2679 
get_kind() const2680   const char *get_kind () const FINAL OVERRIDE
2681   {
2682     return "write_to_string_literal_diagnostic";
2683   }
2684 
operator ==(const write_to_string_literal_diagnostic & other) const2685   bool operator== (const write_to_string_literal_diagnostic &other) const
2686   {
2687     return m_reg == other.m_reg;
2688   }
2689 
get_controlling_option() const2690   int get_controlling_option () const FINAL OVERRIDE
2691   {
2692     return OPT_Wanalyzer_write_to_string_literal;
2693   }
2694 
emit(rich_location * rich_loc)2695   bool emit (rich_location *rich_loc) FINAL OVERRIDE
2696   {
2697     return warning_at (rich_loc, get_controlling_option (),
2698 		       "write to string literal");
2699     /* Ideally we would show the location of the STRING_CST as well,
2700        but it is not available at this point.  */
2701   }
2702 
describe_final_event(const evdesc::final_event & ev)2703   label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
2704   {
2705     return ev.formatted_print ("write to string literal here");
2706   }
2707 
2708 private:
2709   const region *m_reg;
2710 };
2711 
2712 /* Use CTXT to warn If DEST_REG is a region that shouldn't be written to.  */
2713 
2714 void
check_for_writable_region(const region * dest_reg,region_model_context * ctxt) const2715 region_model::check_for_writable_region (const region* dest_reg,
2716 					 region_model_context *ctxt) const
2717 {
2718   /* Fail gracefully if CTXT is NULL.  */
2719   if (!ctxt)
2720     return;
2721 
2722   const region *base_reg = dest_reg->get_base_region ();
2723   switch (base_reg->get_kind ())
2724     {
2725     default:
2726       break;
2727     case RK_FUNCTION:
2728       {
2729 	const function_region *func_reg = as_a <const function_region *> (base_reg);
2730 	tree fndecl = func_reg->get_fndecl ();
2731 	ctxt->warn (new write_to_const_diagnostic (func_reg, fndecl));
2732       }
2733       break;
2734     case RK_LABEL:
2735       {
2736 	const label_region *label_reg = as_a <const label_region *> (base_reg);
2737 	tree label = label_reg->get_label ();
2738 	ctxt->warn (new write_to_const_diagnostic (label_reg, label));
2739       }
2740       break;
2741     case RK_DECL:
2742       {
2743 	const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2744 	tree decl = decl_reg->get_decl ();
2745 	/* Warn about writes to const globals.
2746 	   Don't warn for writes to const locals, and params in particular,
2747 	   since we would warn in push_frame when setting them up (e.g the
2748 	   "this" param is "T* const").  */
2749 	if (TREE_READONLY (decl)
2750 	    && is_global_var (decl))
2751 	  ctxt->warn (new write_to_const_diagnostic (dest_reg, decl));
2752       }
2753       break;
2754     case RK_STRING:
2755       ctxt->warn (new write_to_string_literal_diagnostic (dest_reg));
2756       break;
2757     }
2758 }
2759 
2760 /* Get the capacity of REG in bytes.  */
2761 
2762 const svalue *
get_capacity(const region * reg) const2763 region_model::get_capacity (const region *reg) const
2764 {
2765   switch (reg->get_kind ())
2766     {
2767     default:
2768       break;
2769     case RK_DECL:
2770       {
2771 	const decl_region *decl_reg = as_a <const decl_region *> (reg);
2772 	tree decl = decl_reg->get_decl ();
2773 	if (TREE_CODE (decl) == SSA_NAME)
2774 	  {
2775 	    tree type = TREE_TYPE (decl);
2776 	    tree size = TYPE_SIZE (type);
2777 	    return get_rvalue (size, NULL);
2778 	  }
2779 	else
2780 	  {
2781 	    tree size = decl_init_size (decl, false);
2782 	    if (size)
2783 	      return get_rvalue (size, NULL);
2784 	  }
2785       }
2786       break;
2787     case RK_SIZED:
2788       /* Look through sized regions to get at the capacity
2789 	 of the underlying regions.  */
2790       return get_capacity (reg->get_parent_region ());
2791     }
2792 
2793   if (const svalue *recorded = get_dynamic_extents (reg))
2794     return recorded;
2795 
2796   return m_mgr->get_or_create_unknown_svalue (sizetype);
2797 }
2798 
2799 /* If CTXT is non-NULL, use it to warn about any problems accessing REG,
2800    using DIR to determine if this access is a read or write.  */
2801 
2802 void
check_region_access(const region * reg,enum access_direction dir,region_model_context * ctxt) const2803 region_model::check_region_access (const region *reg,
2804 				   enum access_direction dir,
2805 				   region_model_context *ctxt) const
2806 {
2807   /* Fail gracefully if CTXT is NULL.  */
2808   if (!ctxt)
2809     return;
2810 
2811   check_region_for_taint (reg, dir, ctxt);
2812 
2813   switch (dir)
2814     {
2815     default:
2816       gcc_unreachable ();
2817     case DIR_READ:
2818       /* Currently a no-op.  */
2819       break;
2820     case DIR_WRITE:
2821       check_for_writable_region (reg, ctxt);
2822       break;
2823     }
2824 }
2825 
2826 /* If CTXT is non-NULL, use it to warn about any problems writing to REG.  */
2827 
2828 void
check_region_for_write(const region * dest_reg,region_model_context * ctxt) const2829 region_model::check_region_for_write (const region *dest_reg,
2830 				      region_model_context *ctxt) const
2831 {
2832   check_region_access (dest_reg, DIR_WRITE, ctxt);
2833 }
2834 
2835 /* If CTXT is non-NULL, use it to warn about any problems reading from REG.  */
2836 
2837 void
check_region_for_read(const region * src_reg,region_model_context * ctxt) const2838 region_model::check_region_for_read (const region *src_reg,
2839 				     region_model_context *ctxt) const
2840 {
2841   check_region_access (src_reg, DIR_READ, ctxt);
2842 }
2843 
2844 /* Set the value of the region given by LHS_REG to the value given
2845    by RHS_SVAL.
2846    Use CTXT to report any warnings associated with writing to LHS_REG.  */
2847 
2848 void
set_value(const region * lhs_reg,const svalue * rhs_sval,region_model_context * ctxt)2849 region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
2850 			 region_model_context *ctxt)
2851 {
2852   gcc_assert (lhs_reg);
2853   gcc_assert (rhs_sval);
2854 
2855   check_region_for_write (lhs_reg, ctxt);
2856 
2857   m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
2858 		     ctxt ? ctxt->get_uncertainty () : NULL);
2859 }
2860 
2861 /* Set the value of the region given by LHS to the value given by RHS.  */
2862 
2863 void
set_value(tree lhs,tree rhs,region_model_context * ctxt)2864 region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
2865 {
2866   const region *lhs_reg = get_lvalue (lhs, ctxt);
2867   const svalue *rhs_sval = get_rvalue (rhs, ctxt);
2868   gcc_assert (lhs_reg);
2869   gcc_assert (rhs_sval);
2870   set_value (lhs_reg, rhs_sval, ctxt);
2871 }
2872 
2873 /* Remove all bindings overlapping REG within the store.  */
2874 
2875 void
clobber_region(const region * reg)2876 region_model::clobber_region (const region *reg)
2877 {
2878   m_store.clobber_region (m_mgr->get_store_manager(), reg);
2879 }
2880 
2881 /* Remove any bindings for REG within the store.  */
2882 
2883 void
purge_region(const region * reg)2884 region_model::purge_region (const region *reg)
2885 {
2886   m_store.purge_region (m_mgr->get_store_manager(), reg);
2887 }
2888 
2889 /* Fill REG with SVAL.  */
2890 
2891 void
fill_region(const region * reg,const svalue * sval)2892 region_model::fill_region (const region *reg, const svalue *sval)
2893 {
2894   m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
2895 }
2896 
2897 /* Zero-fill REG.  */
2898 
2899 void
zero_fill_region(const region * reg)2900 region_model::zero_fill_region (const region *reg)
2901 {
2902   m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
2903 }
2904 
2905 /* Mark REG as having unknown content.  */
2906 
2907 void
mark_region_as_unknown(const region * reg,uncertainty_t * uncertainty)2908 region_model::mark_region_as_unknown (const region *reg,
2909 				      uncertainty_t *uncertainty)
2910 {
2911   m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
2912 				  uncertainty);
2913 }
2914 
2915 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
2916    this model.  */
2917 
2918 tristate
eval_condition(const svalue * lhs,enum tree_code op,const svalue * rhs) const2919 region_model::eval_condition (const svalue *lhs,
2920 			       enum tree_code op,
2921 			       const svalue *rhs) const
2922 {
2923   /* For now, make no attempt to capture constraints on floating-point
2924      values.  */
2925   if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
2926       || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
2927     return tristate::unknown ();
2928 
2929   tristate ts = eval_condition_without_cm (lhs, op, rhs);
2930   if (ts.is_known ())
2931     return ts;
2932 
2933   /* Otherwise, try constraints.  */
2934   return m_constraints->eval_condition (lhs, op, rhs);
2935 }
2936 
2937 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
2938    this model, without resorting to the constraint_manager.
2939 
2940    This is exposed so that impl_region_model_context::on_state_leak can
2941    check for equality part-way through region_model::purge_unused_svalues
2942    without risking creating new ECs.  */
2943 
2944 tristate
eval_condition_without_cm(const svalue * lhs,enum tree_code op,const svalue * rhs) const2945 region_model::eval_condition_without_cm (const svalue *lhs,
2946 					  enum tree_code op,
2947 					  const svalue *rhs) const
2948 {
2949   gcc_assert (lhs);
2950   gcc_assert (rhs);
2951 
2952   /* See what we know based on the values.  */
2953 
2954   /* For now, make no attempt to capture constraints on floating-point
2955      values.  */
2956   if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
2957       || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
2958     return tristate::unknown ();
2959 
2960   /* Unwrap any unmergeable values.  */
2961   lhs = lhs->unwrap_any_unmergeable ();
2962   rhs = rhs->unwrap_any_unmergeable ();
2963 
2964   if (lhs == rhs)
2965     {
2966       /* If we have the same svalue, then we have equality
2967 	 (apart from NaN-handling).
2968 	 TODO: should this definitely be the case for poisoned values?  */
2969       /* Poisoned and unknown values are "unknowable".  */
2970       if (lhs->get_kind () == SK_POISONED
2971 	  || lhs->get_kind () == SK_UNKNOWN)
2972 	return tristate::TS_UNKNOWN;
2973 
2974       switch (op)
2975 	{
2976 	case EQ_EXPR:
2977 	case GE_EXPR:
2978 	case LE_EXPR:
2979 	  return tristate::TS_TRUE;
2980 
2981 	case NE_EXPR:
2982 	case GT_EXPR:
2983 	case LT_EXPR:
2984 	  return tristate::TS_FALSE;
2985 
2986 	default:
2987 	  /* For other ops, use the logic below.  */
2988 	  break;
2989 	}
2990     }
2991 
2992   /* If we have a pair of region_svalues, compare them.  */
2993   if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
2994     if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
2995       {
2996 	tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
2997 	if (res.is_known ())
2998 	  return res;
2999 	/* Otherwise, only known through constraints.  */
3000       }
3001 
3002   if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
3003     {
3004       /* If we have a pair of constants, compare them.  */
3005       if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3006 	return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
3007       else
3008 	{
3009 	  /* When we have one constant, put it on the RHS.  */
3010 	  std::swap (lhs, rhs);
3011 	  op = swap_tree_comparison (op);
3012 	}
3013     }
3014   gcc_assert (lhs->get_kind () != SK_CONSTANT);
3015 
3016   /* Handle comparison against zero.  */
3017   if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3018     if (zerop (cst_rhs->get_constant ()))
3019       {
3020 	if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
3021 	  {
3022 	    /* A region_svalue is a non-NULL pointer, except in certain
3023 	       special cases (see the comment for region::non_null_p).  */
3024 	    const region *pointee = ptr->get_pointee ();
3025 	    if (pointee->non_null_p ())
3026 	      {
3027 		switch (op)
3028 		  {
3029 		  default:
3030 		    gcc_unreachable ();
3031 
3032 		  case EQ_EXPR:
3033 		  case GE_EXPR:
3034 		  case LE_EXPR:
3035 		    return tristate::TS_FALSE;
3036 
3037 		  case NE_EXPR:
3038 		  case GT_EXPR:
3039 		  case LT_EXPR:
3040 		    return tristate::TS_TRUE;
3041 		  }
3042 	      }
3043 	  }
3044 	else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3045 	  {
3046 	    /* Treat offsets from a non-NULL pointer as being non-NULL.  This
3047 	       isn't strictly true, in that eventually ptr++ will wrap
3048 	       around and be NULL, but it won't occur in practise and thus
3049 	       can be used to suppress effectively false positives that we
3050 	       shouldn't warn for.  */
3051 	    if (binop->get_op () == POINTER_PLUS_EXPR)
3052 	      {
3053 		tristate lhs_ts
3054 		  = eval_condition_without_cm (binop->get_arg0 (),
3055 					       op, rhs);
3056 		if (lhs_ts.is_known ())
3057 		  return lhs_ts;
3058 	      }
3059 	  }
3060 	else if (const unaryop_svalue *unaryop
3061 		   = lhs->dyn_cast_unaryop_svalue ())
3062 	  {
3063 	    if (unaryop->get_op () == NEGATE_EXPR)
3064 	      {
3065 		/* e.g. "-X <= 0" is equivalent to X >= 0".  */
3066 		tristate lhs_ts = eval_condition (unaryop->get_arg (),
3067 						  swap_tree_comparison (op),
3068 						  rhs);
3069 		if (lhs_ts.is_known ())
3070 		  return lhs_ts;
3071 	      }
3072 	  }
3073       }
3074 
3075   /* Handle rejection of equality for comparisons of the initial values of
3076      "external" values (such as params) with the address of locals.  */
3077   if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
3078     if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3079       {
3080 	tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
3081 	if (res.is_known ())
3082 	  return res;
3083       }
3084   if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
3085     if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3086       {
3087 	tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
3088 	if (res.is_known ())
3089 	  return res;
3090       }
3091 
3092   if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
3093     if (tree rhs_cst = rhs->maybe_get_constant ())
3094       {
3095 	tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
3096 	if (res.is_known ())
3097 	  return res;
3098       }
3099 
3100   return tristate::TS_UNKNOWN;
3101 }
3102 
3103 /* Subroutine of region_model::eval_condition_without_cm, for rejecting
3104    equality of INIT_VAL(PARM) with &LOCAL.  */
3105 
3106 tristate
compare_initial_and_pointer(const initial_svalue * init,const region_svalue * ptr) const3107 region_model::compare_initial_and_pointer (const initial_svalue *init,
3108 					    const region_svalue *ptr) const
3109 {
3110   const region *pointee = ptr->get_pointee ();
3111 
3112   /* If we have a pointer to something within a stack frame, it can't be the
3113      initial value of a param.  */
3114   if (pointee->maybe_get_frame_region ())
3115     if (init->initial_value_of_param_p ())
3116       return tristate::TS_FALSE;
3117 
3118   return tristate::TS_UNKNOWN;
3119 }
3120 
3121 /* Handle various constraints of the form:
3122      LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
3123      OP : == or !=
3124      RHS: zero
3125    and (with a cast):
3126      LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
3127      OP : == or !=
3128      RHS: zero
3129    by adding constraints for INNER_LHS INNEROP INNER_RHS.
3130 
3131    Return true if this function can fully handle the constraint; if
3132    so, add the implied constraint(s) and write true to *OUT if they
3133    are consistent with existing constraints, or write false to *OUT
3134    if they contradicts existing constraints.
3135 
3136    Return false for cases that this function doeesn't know how to handle.
3137 
3138    For example, if we're checking a stored conditional, we'll have
3139    something like:
3140      LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
3141      OP : NE_EXPR
3142      RHS: zero
3143    which this function can turn into an add_constraint of:
3144      (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
3145 
3146    Similarly, optimized && and || conditionals lead to e.g.
3147      if (p && q)
3148    becoming gimple like this:
3149      _1 = p_6 == 0B;
3150      _2 = q_8 == 0B
3151      _3 = _1 | _2
3152    On the "_3 is false" branch we can have constraints of the form:
3153      ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3154       | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
3155      == 0
3156    which implies that both _1 and _2 are false,
3157    which this function can turn into a pair of add_constraints of
3158      (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3159    and:
3160      (&HEAP_ALLOCATED_REGION(10)!=(int *)0B).  */
3161 
3162 bool
add_constraints_from_binop(const svalue * outer_lhs,enum tree_code outer_op,const svalue * outer_rhs,bool * out,region_model_context * ctxt)3163 region_model::add_constraints_from_binop (const svalue *outer_lhs,
3164 					  enum tree_code outer_op,
3165 					  const svalue *outer_rhs,
3166 					  bool *out,
3167 					  region_model_context *ctxt)
3168 {
3169   while (const svalue *cast = outer_lhs->maybe_undo_cast ())
3170     outer_lhs = cast;
3171   const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
3172   if (!binop_sval)
3173     return false;
3174   if (!outer_rhs->all_zeroes_p ())
3175     return false;
3176 
3177   const svalue *inner_lhs = binop_sval->get_arg0 ();
3178   enum tree_code inner_op = binop_sval->get_op ();
3179   const svalue *inner_rhs = binop_sval->get_arg1 ();
3180 
3181   if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
3182     return false;
3183 
3184   /* We have either
3185      - "OUTER_LHS != false" (i.e. OUTER is true), or
3186      - "OUTER_LHS == false" (i.e. OUTER is false).  */
3187   bool is_true = outer_op == NE_EXPR;
3188 
3189   switch (inner_op)
3190     {
3191     default:
3192       return false;
3193 
3194     case EQ_EXPR:
3195     case NE_EXPR:
3196       {
3197 	/* ...and "(inner_lhs OP inner_rhs) == 0"
3198 	   then (inner_lhs OP inner_rhs) must have the same
3199 	   logical value as LHS.  */
3200 	if (!is_true)
3201 	  inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
3202 	*out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
3203 	return true;
3204       }
3205       break;
3206 
3207     case BIT_AND_EXPR:
3208       if (is_true)
3209 	{
3210 	  /* ...and "(inner_lhs & inner_rhs) != 0"
3211 	     then both inner_lhs and inner_rhs must be true.  */
3212 	  const svalue *false_sval
3213 	    = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3214 	  bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
3215 	  bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
3216 	  *out = sat1 && sat2;
3217 	  return true;
3218 	}
3219       return false;
3220 
3221     case BIT_IOR_EXPR:
3222       if (!is_true)
3223 	{
3224 	  /* ...and "(inner_lhs | inner_rhs) == 0"
3225 	     i.e. "(inner_lhs | inner_rhs)" is false
3226 	     then both inner_lhs and inner_rhs must be false.  */
3227 	  const svalue *false_sval
3228 	    = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3229 	  bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
3230 	  bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
3231 	  *out = sat1 && sat2;
3232 	  return true;
3233 	}
3234       return false;
3235     }
3236 }
3237 
3238 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
3239    If it is consistent with existing constraints, add it, and return true.
3240    Return false if it contradicts existing constraints.
3241    Use CTXT for reporting any diagnostics associated with the accesses.  */
3242 
3243 bool
add_constraint(tree lhs,enum tree_code op,tree rhs,region_model_context * ctxt)3244 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3245 			      region_model_context *ctxt)
3246 {
3247   /* For now, make no attempt to capture constraints on floating-point
3248      values.  */
3249   if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3250     return true;
3251 
3252   const svalue *lhs_sval = get_rvalue (lhs, ctxt);
3253   const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3254 
3255   return add_constraint (lhs_sval, op, rhs_sval, ctxt);
3256 }
3257 
3258 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
3259    If it is consistent with existing constraints, add it, and return true.
3260    Return false if it contradicts existing constraints.
3261    Use CTXT for reporting any diagnostics associated with the accesses.  */
3262 
3263 bool
add_constraint(const svalue * lhs,enum tree_code op,const svalue * rhs,region_model_context * ctxt)3264 region_model::add_constraint (const svalue *lhs,
3265 			      enum tree_code op,
3266 			      const svalue *rhs,
3267 			      region_model_context *ctxt)
3268 {
3269   tristate t_cond = eval_condition (lhs, op, rhs);
3270 
3271   /* If we already have the condition, do nothing.  */
3272   if (t_cond.is_true ())
3273     return true;
3274 
3275   /* Reject a constraint that would contradict existing knowledge, as
3276      unsatisfiable.  */
3277   if (t_cond.is_false ())
3278     return false;
3279 
3280   bool out;
3281   if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
3282     return out;
3283 
3284   /* Attempt to store the constraint.  */
3285   if (!m_constraints->add_constraint (lhs, op, rhs))
3286     return false;
3287 
3288   /* Notify the context, if any.  This exists so that the state machines
3289      in a program_state can be notified about the condition, and so can
3290      set sm-state for e.g. unchecked->checked, both for cfg-edges, and
3291      when synthesizing constraints as above.  */
3292   if (ctxt)
3293     ctxt->on_condition (lhs, op, rhs);
3294 
3295   /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
3296      the case where REGION is heap-allocated and thus could be NULL).  */
3297   if (tree rhs_cst = rhs->maybe_get_constant ())
3298     if (op == EQ_EXPR && zerop (rhs_cst))
3299       if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
3300 	unset_dynamic_extents (region_sval->get_pointee ());
3301 
3302   return true;
3303 }
3304 
3305 /* As above, but when returning false, if OUT is non-NULL, write a
3306    new rejected_constraint to *OUT.  */
3307 
3308 bool
add_constraint(tree lhs,enum tree_code op,tree rhs,region_model_context * ctxt,rejected_constraint ** out)3309 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3310 			      region_model_context *ctxt,
3311 			      rejected_constraint **out)
3312 {
3313   bool sat = add_constraint (lhs, op, rhs, ctxt);
3314   if (!sat && out)
3315     *out = new rejected_op_constraint (*this, lhs, op, rhs);
3316   return sat;
3317 }
3318 
3319 /* Determine what is known about the condition "LHS OP RHS" within
3320    this model.
3321    Use CTXT for reporting any diagnostics associated with the accesses.  */
3322 
3323 tristate
eval_condition(tree lhs,enum tree_code op,tree rhs,region_model_context * ctxt)3324 region_model::eval_condition (tree lhs,
3325 			      enum tree_code op,
3326 			      tree rhs,
3327 			      region_model_context *ctxt)
3328 {
3329   /* For now, make no attempt to model constraints on floating-point
3330      values.  */
3331   if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3332     return tristate::unknown ();
3333 
3334   return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
3335 }
3336 
3337 /* Implementation of region_model::get_representative_path_var.
3338    Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3339    Use VISITED to prevent infinite mutual recursion with the overload for
3340    regions.  */
3341 
3342 path_var
get_representative_path_var_1(const svalue * sval,svalue_set * visited) const3343 region_model::get_representative_path_var_1 (const svalue *sval,
3344 					     svalue_set *visited) const
3345 {
3346   gcc_assert (sval);
3347 
3348   /* Prevent infinite recursion.  */
3349   if (visited->contains (sval))
3350     return path_var (NULL_TREE, 0);
3351   visited->add (sval);
3352 
3353   /* Handle casts by recursion into get_representative_path_var.  */
3354   if (const svalue *cast_sval = sval->maybe_undo_cast ())
3355     {
3356       path_var result = get_representative_path_var (cast_sval, visited);
3357       tree orig_type = sval->get_type ();
3358       /* If necessary, wrap the result in a cast.  */
3359       if (result.m_tree && orig_type)
3360 	result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
3361       return result;
3362     }
3363 
3364   auto_vec<path_var> pvs;
3365   m_store.get_representative_path_vars (this, visited, sval, &pvs);
3366 
3367   if (tree cst = sval->maybe_get_constant ())
3368     pvs.safe_push (path_var (cst, 0));
3369 
3370   /* Handle string literals and various other pointers.  */
3371   if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3372     {
3373       const region *reg = ptr_sval->get_pointee ();
3374       if (path_var pv = get_representative_path_var (reg, visited))
3375 	return path_var (build1 (ADDR_EXPR,
3376 				 sval->get_type (),
3377 				 pv.m_tree),
3378 			 pv.m_stack_depth);
3379     }
3380 
3381   /* If we have a sub_svalue, look for ways to represent the parent.  */
3382   if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
3383     {
3384       const svalue *parent_sval = sub_sval->get_parent ();
3385       const region *subreg = sub_sval->get_subregion ();
3386       if (path_var parent_pv
3387 	    = get_representative_path_var (parent_sval, visited))
3388 	if (const field_region *field_reg = subreg->dyn_cast_field_region ())
3389 	  return path_var (build3 (COMPONENT_REF,
3390 				   sval->get_type (),
3391 				   parent_pv.m_tree,
3392 				   field_reg->get_field (),
3393 				   NULL_TREE),
3394 			   parent_pv.m_stack_depth);
3395     }
3396 
3397   /* Handle binops.  */
3398   if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
3399     if (path_var lhs_pv
3400 	= get_representative_path_var (binop_sval->get_arg0 (), visited))
3401       if (path_var rhs_pv
3402 	  = get_representative_path_var (binop_sval->get_arg1 (), visited))
3403 	return path_var (build2 (binop_sval->get_op (),
3404 				 sval->get_type (),
3405 				 lhs_pv.m_tree, rhs_pv.m_tree),
3406 			 lhs_pv.m_stack_depth);
3407 
3408   if (pvs.length () < 1)
3409     return path_var (NULL_TREE, 0);
3410 
3411   pvs.qsort (readability_comparator);
3412   return pvs[0];
3413 }
3414 
3415 /* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3416    Use VISITED to prevent infinite mutual recursion with the overload for
3417    regions
3418 
3419    This function defers to get_representative_path_var_1 to do the work;
3420    it adds verification that get_representative_path_var_1 returned a tree
3421    of the correct type.  */
3422 
3423 path_var
get_representative_path_var(const svalue * sval,svalue_set * visited) const3424 region_model::get_representative_path_var (const svalue *sval,
3425 					   svalue_set *visited) const
3426 {
3427   if (sval == NULL)
3428     return path_var (NULL_TREE, 0);
3429 
3430   tree orig_type = sval->get_type ();
3431 
3432   path_var result = get_representative_path_var_1 (sval, visited);
3433 
3434   /* Verify that the result has the same type as SVAL, if any.  */
3435   if (result.m_tree && orig_type)
3436     gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
3437 
3438   return result;
3439 }
3440 
3441 /* Attempt to return a tree that represents SVAL, or return NULL_TREE.
3442 
3443    Strip off any top-level cast, to avoid messages like
3444      double-free of '(void *)ptr'
3445    from analyzer diagnostics.  */
3446 
3447 tree
get_representative_tree(const svalue * sval) const3448 region_model::get_representative_tree (const svalue *sval) const
3449 {
3450   svalue_set visited;
3451   tree expr = get_representative_path_var (sval, &visited).m_tree;
3452 
3453   /* Strip off any top-level cast.  */
3454   if (expr && TREE_CODE (expr) == NOP_EXPR)
3455     expr = TREE_OPERAND (expr, 0);
3456 
3457   return fixup_tree_for_diagnostic (expr);
3458 }
3459 
3460 /* Implementation of region_model::get_representative_path_var.
3461 
3462    Attempt to return a path_var that represents REG, or return
3463    the NULL path_var.
3464    For example, a region for a field of a local would be a path_var
3465    wrapping a COMPONENT_REF.
3466    Use VISITED to prevent infinite mutual recursion with the overload for
3467    svalues.  */
3468 
3469 path_var
get_representative_path_var_1(const region * reg,svalue_set * visited) const3470 region_model::get_representative_path_var_1 (const region *reg,
3471 					     svalue_set *visited) const
3472 {
3473   switch (reg->get_kind ())
3474     {
3475     default:
3476       gcc_unreachable ();
3477 
3478     case RK_FRAME:
3479     case RK_GLOBALS:
3480     case RK_CODE:
3481     case RK_HEAP:
3482     case RK_STACK:
3483     case RK_ROOT:
3484        /* Regions that represent memory spaces are not expressible as trees.  */
3485       return path_var (NULL_TREE, 0);
3486 
3487     case RK_FUNCTION:
3488       {
3489 	const function_region *function_reg
3490 	  = as_a <const function_region *> (reg);
3491 	return path_var (function_reg->get_fndecl (), 0);
3492       }
3493     case RK_LABEL:
3494       {
3495 	const label_region *label_reg = as_a <const label_region *> (reg);
3496 	return path_var (label_reg->get_label (), 0);
3497       }
3498 
3499     case RK_SYMBOLIC:
3500       {
3501 	const symbolic_region *symbolic_reg
3502 	  = as_a <const symbolic_region *> (reg);
3503 	const svalue *pointer = symbolic_reg->get_pointer ();
3504 	path_var pointer_pv = get_representative_path_var (pointer, visited);
3505 	if (!pointer_pv)
3506 	  return path_var (NULL_TREE, 0);
3507 	tree offset = build_int_cst (pointer->get_type (), 0);
3508 	return path_var (build2 (MEM_REF,
3509 				 reg->get_type (),
3510 				 pointer_pv.m_tree,
3511 				 offset),
3512 			 pointer_pv.m_stack_depth);
3513       }
3514     case RK_DECL:
3515       {
3516 	const decl_region *decl_reg = as_a <const decl_region *> (reg);
3517 	return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
3518       }
3519     case RK_FIELD:
3520       {
3521 	const field_region *field_reg = as_a <const field_region *> (reg);
3522 	path_var parent_pv
3523 	  = get_representative_path_var (reg->get_parent_region (), visited);
3524 	if (!parent_pv)
3525 	  return path_var (NULL_TREE, 0);
3526 	return path_var (build3 (COMPONENT_REF,
3527 				 reg->get_type (),
3528 				 parent_pv.m_tree,
3529 				 field_reg->get_field (),
3530 				 NULL_TREE),
3531 			 parent_pv.m_stack_depth);
3532       }
3533 
3534     case RK_ELEMENT:
3535       {
3536 	const element_region *element_reg
3537 	  = as_a <const element_region *> (reg);
3538 	path_var parent_pv
3539 	  = get_representative_path_var (reg->get_parent_region (), visited);
3540 	if (!parent_pv)
3541 	  return path_var (NULL_TREE, 0);
3542 	path_var index_pv
3543 	  = get_representative_path_var (element_reg->get_index (), visited);
3544 	if (!index_pv)
3545 	  return path_var (NULL_TREE, 0);
3546 	return path_var (build4 (ARRAY_REF,
3547 				 reg->get_type (),
3548 				 parent_pv.m_tree, index_pv.m_tree,
3549 				 NULL_TREE, NULL_TREE),
3550 			 parent_pv.m_stack_depth);
3551       }
3552 
3553     case RK_OFFSET:
3554       {
3555 	const offset_region *offset_reg
3556 	  = as_a <const offset_region *> (reg);
3557 	path_var parent_pv
3558 	  = get_representative_path_var (reg->get_parent_region (), visited);
3559 	if (!parent_pv)
3560 	  return path_var (NULL_TREE, 0);
3561 	path_var offset_pv
3562 	  = get_representative_path_var (offset_reg->get_byte_offset (),
3563 					 visited);
3564 	if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
3565 	  return path_var (NULL_TREE, 0);
3566 	tree addr_parent = build1 (ADDR_EXPR,
3567 				   build_pointer_type (reg->get_type ()),
3568 				   parent_pv.m_tree);
3569 	return path_var (build2 (MEM_REF,
3570 				 reg->get_type (),
3571 				 addr_parent, offset_pv.m_tree),
3572 			 parent_pv.m_stack_depth);
3573       }
3574 
3575     case RK_SIZED:
3576       return path_var (NULL_TREE, 0);
3577 
3578     case RK_CAST:
3579       {
3580 	path_var parent_pv
3581 	  = get_representative_path_var (reg->get_parent_region (), visited);
3582 	if (!parent_pv)
3583 	  return path_var (NULL_TREE, 0);
3584 	return path_var (build1 (NOP_EXPR,
3585 				 reg->get_type (),
3586 				 parent_pv.m_tree),
3587 			 parent_pv.m_stack_depth);
3588       }
3589 
3590     case RK_HEAP_ALLOCATED:
3591     case RK_ALLOCA:
3592       /* No good way to express heap-allocated/alloca regions as trees.  */
3593       return path_var (NULL_TREE, 0);
3594 
3595     case RK_STRING:
3596       {
3597 	const string_region *string_reg = as_a <const string_region *> (reg);
3598 	return path_var (string_reg->get_string_cst (), 0);
3599       }
3600 
3601     case RK_UNKNOWN:
3602       return path_var (NULL_TREE, 0);
3603     }
3604 }
3605 
3606 /* Attempt to return a path_var that represents REG, or return
3607    the NULL path_var.
3608    For example, a region for a field of a local would be a path_var
3609    wrapping a COMPONENT_REF.
3610    Use VISITED to prevent infinite mutual recursion with the overload for
3611    svalues.
3612 
3613    This function defers to get_representative_path_var_1 to do the work;
3614    it adds verification that get_representative_path_var_1 returned a tree
3615    of the correct type.  */
3616 
3617 path_var
get_representative_path_var(const region * reg,svalue_set * visited) const3618 region_model::get_representative_path_var (const region *reg,
3619 					   svalue_set *visited) const
3620 {
3621   path_var result = get_representative_path_var_1 (reg, visited);
3622 
3623   /* Verify that the result has the same type as REG, if any.  */
3624   if (result.m_tree && reg->get_type ())
3625     gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
3626 
3627   return result;
3628 }
3629 
3630 /* Update this model for any phis in SNODE, assuming we came from
3631    LAST_CFG_SUPEREDGE.  */
3632 
3633 void
update_for_phis(const supernode * snode,const cfg_superedge * last_cfg_superedge,region_model_context * ctxt)3634 region_model::update_for_phis (const supernode *snode,
3635 			       const cfg_superedge *last_cfg_superedge,
3636 			       region_model_context *ctxt)
3637 {
3638   gcc_assert (last_cfg_superedge);
3639 
3640   /* Copy this state and pass it to handle_phi so that all of the phi stmts
3641      are effectively handled simultaneously.  */
3642   const region_model old_state (*this);
3643 
3644   for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
3645        !gsi_end_p (gpi); gsi_next (&gpi))
3646     {
3647       gphi *phi = gpi.phi ();
3648 
3649       tree src = last_cfg_superedge->get_phi_arg (phi);
3650       tree lhs = gimple_phi_result (phi);
3651 
3652       /* Update next_state based on phi and old_state.  */
3653       handle_phi (phi, lhs, src, old_state, ctxt);
3654     }
3655 }
3656 
3657 /* Attempt to update this model for taking EDGE (where the last statement
3658    was LAST_STMT), returning true if the edge can be taken, false
3659    otherwise.
3660    When returning false, if OUT is non-NULL, write a new rejected_constraint
3661    to it.
3662 
3663    For CFG superedges where LAST_STMT is a conditional or a switch
3664    statement, attempt to add the relevant conditions for EDGE to this
3665    model, returning true if they are feasible, or false if they are
3666    impossible.
3667 
3668    For call superedges, push frame information and store arguments
3669    into parameters.
3670 
3671    For return superedges, pop frame information and store return
3672    values into any lhs.
3673 
3674    Rejection of call/return superedges happens elsewhere, in
3675    program_point::on_edge (i.e. based on program point, rather
3676    than program state).  */
3677 
3678 bool
maybe_update_for_edge(const superedge & edge,const gimple * last_stmt,region_model_context * ctxt,rejected_constraint ** out)3679 region_model::maybe_update_for_edge (const superedge &edge,
3680 				     const gimple *last_stmt,
3681 				     region_model_context *ctxt,
3682 				     rejected_constraint **out)
3683 {
3684   /* Handle frame updates for interprocedural edges.  */
3685   switch (edge.m_kind)
3686     {
3687     default:
3688       break;
3689 
3690     case SUPEREDGE_CALL:
3691       {
3692 	const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
3693 	update_for_call_superedge (*call_edge, ctxt);
3694       }
3695       break;
3696 
3697     case SUPEREDGE_RETURN:
3698       {
3699 	const return_superedge *return_edge
3700 	  = as_a <const return_superedge *> (&edge);
3701 	update_for_return_superedge (*return_edge, ctxt);
3702       }
3703       break;
3704 
3705     case SUPEREDGE_INTRAPROCEDURAL_CALL:
3706       {
3707 	const callgraph_superedge *cg_sedge
3708 	  = as_a <const callgraph_superedge *> (&edge);
3709 	update_for_call_summary (*cg_sedge, ctxt);
3710       }
3711       break;
3712     }
3713 
3714   if (last_stmt == NULL)
3715     return true;
3716 
3717   /* Apply any constraints for conditionals/switch statements.  */
3718 
3719   if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
3720     {
3721       const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
3722       return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
3723     }
3724 
3725   if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
3726     {
3727       const switch_cfg_superedge *switch_sedge
3728 	= as_a <const switch_cfg_superedge *> (&edge);
3729       return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
3730 					    ctxt, out);
3731     }
3732 
3733   /* Apply any constraints due to an exception being thrown.  */
3734   if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
3735     if (cfg_sedge->get_flags () & EDGE_EH)
3736       return apply_constraints_for_exception (last_stmt, ctxt, out);
3737 
3738   return true;
3739 }
3740 
3741 /* Push a new frame_region on to the stack region.
3742    Populate the frame_region with child regions for the function call's
3743    parameters, using values from the arguments at the callsite in the
3744    caller's frame.  */
3745 
3746 void
update_for_gcall(const gcall * call_stmt,region_model_context * ctxt,function * callee)3747 region_model::update_for_gcall (const gcall *call_stmt,
3748 				region_model_context *ctxt,
3749 				function *callee)
3750 {
3751   /* Build a vec of argument svalues, using the current top
3752      frame for resolving tree expressions.  */
3753   auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
3754 
3755   for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
3756     {
3757       tree arg = gimple_call_arg (call_stmt, i);
3758       arg_svals.quick_push (get_rvalue (arg, ctxt));
3759     }
3760 
3761   if(!callee)
3762   {
3763     /* Get the function * from the gcall.  */
3764     tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
3765     callee = DECL_STRUCT_FUNCTION (fn_decl);
3766   }
3767 
3768   push_frame (callee, &arg_svals, ctxt);
3769 }
3770 
3771 /* Pop the top-most frame_region from the stack, and copy the return
3772    region's values (if any) into the region for the lvalue of the LHS of
3773    the call (if any).  */
3774 
3775 void
update_for_return_gcall(const gcall * call_stmt,region_model_context * ctxt)3776 region_model::update_for_return_gcall (const gcall *call_stmt,
3777 				       region_model_context *ctxt)
3778 {
3779   /* Get the lvalue for the result of the call, passing it to pop_frame,
3780      so that pop_frame can determine the region with respect to the
3781      *caller* frame.  */
3782   tree lhs = gimple_call_lhs (call_stmt);
3783   pop_frame (lhs, NULL, ctxt);
3784 }
3785 
3786 /* Extract calling information from the superedge and update the model for the
3787    call  */
3788 
3789 void
update_for_call_superedge(const call_superedge & call_edge,region_model_context * ctxt)3790 region_model::update_for_call_superedge (const call_superedge &call_edge,
3791 					 region_model_context *ctxt)
3792 {
3793   const gcall *call_stmt = call_edge.get_call_stmt ();
3794   update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
3795 }
3796 
3797 /* Extract calling information from the return superedge and update the model
3798    for the returning call */
3799 
3800 void
update_for_return_superedge(const return_superedge & return_edge,region_model_context * ctxt)3801 region_model::update_for_return_superedge (const return_superedge &return_edge,
3802 					   region_model_context *ctxt)
3803 {
3804   const gcall *call_stmt = return_edge.get_call_stmt ();
3805   update_for_return_gcall (call_stmt, ctxt);
3806 }
3807 
3808 /* Update this region_model with a summary of the effect of calling
3809    and returning from CG_SEDGE.
3810 
3811    TODO: Currently this is extremely simplistic: we merely set the
3812    return value to "unknown".  A proper implementation would e.g. update
3813    sm-state, and presumably be reworked to support multiple outcomes.  */
3814 
3815 void
update_for_call_summary(const callgraph_superedge & cg_sedge,region_model_context * ctxt)3816 region_model::update_for_call_summary (const callgraph_superedge &cg_sedge,
3817 				       region_model_context *ctxt)
3818 {
3819   /* For now, set any return value to "unknown".  */
3820   const gcall *call_stmt = cg_sedge.get_call_stmt ();
3821   tree lhs = gimple_call_lhs (call_stmt);
3822   if (lhs)
3823     mark_region_as_unknown (get_lvalue (lhs, ctxt),
3824 			    ctxt ? ctxt->get_uncertainty () : NULL);
3825 
3826   // TODO: actually implement some kind of summary here
3827 }
3828 
3829 /* Given a true or false edge guarded by conditional statement COND_STMT,
3830    determine appropriate constraints for the edge to be taken.
3831 
3832    If they are feasible, add the constraints and return true.
3833 
3834    Return false if the constraints contradict existing knowledge
3835    (and so the edge should not be taken).
3836    When returning false, if OUT is non-NULL, write a new rejected_constraint
3837    to it.  */
3838 
3839 bool
apply_constraints_for_gcond(const cfg_superedge & sedge,const gcond * cond_stmt,region_model_context * ctxt,rejected_constraint ** out)3840 region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
3841 					   const gcond *cond_stmt,
3842 					   region_model_context *ctxt,
3843 					   rejected_constraint **out)
3844 {
3845   ::edge cfg_edge = sedge.get_cfg_edge ();
3846   gcc_assert (cfg_edge != NULL);
3847   gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
3848 
3849   enum tree_code op = gimple_cond_code (cond_stmt);
3850   tree lhs = gimple_cond_lhs (cond_stmt);
3851   tree rhs = gimple_cond_rhs (cond_stmt);
3852   if (cfg_edge->flags & EDGE_FALSE_VALUE)
3853     op = invert_tree_comparison (op, false /* honor_nans */);
3854   return add_constraint (lhs, op, rhs, ctxt, out);
3855 }
3856 
3857 /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
3858    for the edge to be taken.
3859 
3860    If they are feasible, add the constraints and return true.
3861 
3862    Return false if the constraints contradict existing knowledge
3863    (and so the edge should not be taken).
3864    When returning false, if OUT is non-NULL, write a new rejected_constraint
3865    to it.  */
3866 
3867 bool
apply_constraints_for_gswitch(const switch_cfg_superedge & edge,const gswitch * switch_stmt,region_model_context * ctxt,rejected_constraint ** out)3868 region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
3869 					     const gswitch *switch_stmt,
3870 					     region_model_context *ctxt,
3871 					     rejected_constraint **out)
3872 {
3873   bounded_ranges_manager *ranges_mgr = get_range_manager ();
3874   const bounded_ranges *all_cases_ranges
3875     = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
3876   tree index  = gimple_switch_index (switch_stmt);
3877   const svalue *index_sval = get_rvalue (index, ctxt);
3878   bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
3879   if (!sat && out)
3880     *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
3881   return sat;
3882 }
3883 
3884 /* Apply any constraints due to an exception being thrown at LAST_STMT.
3885 
3886    If they are feasible, add the constraints and return true.
3887 
3888    Return false if the constraints contradict existing knowledge
3889    (and so the edge should not be taken).
3890    When returning false, if OUT is non-NULL, write a new rejected_constraint
3891    to it.  */
3892 
3893 bool
apply_constraints_for_exception(const gimple * last_stmt,region_model_context * ctxt,rejected_constraint ** out)3894 region_model::apply_constraints_for_exception (const gimple *last_stmt,
3895 					       region_model_context *ctxt,
3896 					       rejected_constraint **out)
3897 {
3898   gcc_assert (last_stmt);
3899   if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
3900     if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
3901       if (is_named_call_p (callee_fndecl, "operator new", call, 1)
3902 	  || is_named_call_p (callee_fndecl, "operator new []", call, 1))
3903 	{
3904 	  /* We have an exception thrown from operator new.
3905 	     Add a constraint that the result was NULL, to avoid a false
3906 	     leak report due to the result being lost when following
3907 	     the EH edge.  */
3908 	  if (tree lhs = gimple_call_lhs (call))
3909 	    return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
3910 	  return true;
3911 	}
3912   return true;
3913 }
3914 
3915 /* For use with push_frame when handling a top-level call within the analysis.
3916    PARAM has a defined but unknown initial value.
3917    Anything it points to has escaped, since the calling context "knows"
3918    the pointer, and thus calls to unknown functions could read/write into
3919    the region.
3920    If NONNULL is true, then assume that PARAM must be non-NULL.  */
3921 
3922 void
on_top_level_param(tree param,bool nonnull,region_model_context * ctxt)3923 region_model::on_top_level_param (tree param,
3924 				  bool nonnull,
3925 				  region_model_context *ctxt)
3926 {
3927   if (POINTER_TYPE_P (TREE_TYPE (param)))
3928     {
3929       const region *param_reg = get_lvalue (param, ctxt);
3930       const svalue *init_ptr_sval
3931 	= m_mgr->get_or_create_initial_value (param_reg);
3932       const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
3933       m_store.mark_as_escaped (pointee_reg);
3934       if (nonnull)
3935 	{
3936 	  const svalue *null_ptr_sval
3937 	    = m_mgr->get_or_create_null_ptr (TREE_TYPE (param));
3938 	  add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt);
3939 	}
3940     }
3941 }
3942 
3943 /* Update this region_model to reflect pushing a frame onto the stack
3944    for a call to FUN.
3945 
3946    If ARG_SVALS is non-NULL, use it to populate the parameters
3947    in the new frame.
3948    Otherwise, the params have their initial_svalues.
3949 
3950    Return the frame_region for the new frame.  */
3951 
3952 const region *
push_frame(function * fun,const vec<const svalue * > * arg_svals,region_model_context * ctxt)3953 region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
3954 			  region_model_context *ctxt)
3955 {
3956   m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
3957   if (arg_svals)
3958     {
3959       /* Arguments supplied from a caller frame.  */
3960       tree fndecl = fun->decl;
3961       unsigned idx = 0;
3962       for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
3963 	   iter_parm = DECL_CHAIN (iter_parm), ++idx)
3964 	{
3965 	  /* If there's a mismatching declaration, the call stmt might
3966 	     not have enough args.  Handle this case by leaving the
3967 	     rest of the params as uninitialized.  */
3968 	  if (idx >= arg_svals->length ())
3969 	    break;
3970 	  tree parm_lval = iter_parm;
3971 	  if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
3972 	    parm_lval = parm_default_ssa;
3973 	  const region *parm_reg = get_lvalue (parm_lval, ctxt);
3974 	  const svalue *arg_sval = (*arg_svals)[idx];
3975 	  set_value (parm_reg, arg_sval, ctxt);
3976 	}
3977     }
3978   else
3979     {
3980       /* Otherwise we have a top-level call within the analysis.  The params
3981 	 have defined but unknown initial values.
3982 	 Anything they point to has escaped.  */
3983       tree fndecl = fun->decl;
3984 
3985       /* Handle "__attribute__((nonnull))".   */
3986       tree fntype = TREE_TYPE (fndecl);
3987       bitmap nonnull_args = get_nonnull_args (fntype);
3988 
3989       unsigned parm_idx = 0;
3990       for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
3991 	   iter_parm = DECL_CHAIN (iter_parm))
3992 	{
3993 	  bool non_null = (nonnull_args
3994 			   ? (bitmap_empty_p (nonnull_args)
3995 			      || bitmap_bit_p (nonnull_args, parm_idx))
3996 			   : false);
3997 	  if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
3998 	    on_top_level_param (parm_default_ssa, non_null, ctxt);
3999 	  else
4000 	    on_top_level_param (iter_parm, non_null, ctxt);
4001 	  parm_idx++;
4002 	}
4003 
4004       BITMAP_FREE (nonnull_args);
4005     }
4006 
4007   return m_current_frame;
4008 }
4009 
4010 /* Get the function of the top-most frame in this region_model's stack.
4011    There must be such a frame.  */
4012 
4013 function *
get_current_function() const4014 region_model::get_current_function () const
4015 {
4016   const frame_region *frame = get_current_frame ();
4017   gcc_assert (frame);
4018   return frame->get_function ();
4019 }
4020 
4021 /* Pop the topmost frame_region from this region_model's stack;
4022 
4023    If RESULT_LVALUE is non-null, copy any return value from the frame
4024    into the corresponding region (evaluated with respect to the *caller*
4025    frame, rather than the called frame).
4026    If OUT_RESULT is non-null, copy any return value from the frame
4027    into *OUT_RESULT.
4028 
4029    If EVAL_RETURN_SVALUE is false, then don't evaluate the return value.
4030    This is for use when unwinding frames e.g. due to longjmp, to suppress
4031    erroneously reporting uninitialized return values.
4032 
4033    Purge the frame region and all its descendent regions.
4034    Convert any pointers that point into such regions into
4035    POISON_KIND_POPPED_STACK svalues.  */
4036 
4037 void
pop_frame(tree result_lvalue,const svalue ** out_result,region_model_context * ctxt,bool eval_return_svalue)4038 region_model::pop_frame (tree result_lvalue,
4039 			 const svalue **out_result,
4040 			 region_model_context *ctxt,
4041 			 bool eval_return_svalue)
4042 {
4043   gcc_assert (m_current_frame);
4044 
4045   /* Evaluate the result, within the callee frame.  */
4046   const frame_region *frame_reg = m_current_frame;
4047   tree fndecl = m_current_frame->get_function ()->decl;
4048   tree result = DECL_RESULT (fndecl);
4049   const svalue *retval = NULL;
4050   if (result
4051       && TREE_TYPE (result) != void_type_node
4052       && eval_return_svalue)
4053     {
4054       retval = get_rvalue (result, ctxt);
4055       if (out_result)
4056 	*out_result = retval;
4057     }
4058 
4059   /* Pop the frame.  */
4060   m_current_frame = m_current_frame->get_calling_frame ();
4061 
4062   if (result_lvalue && retval)
4063     {
4064       gcc_assert (eval_return_svalue);
4065 
4066       /* Compute result_dst_reg using RESULT_LVALUE *after* popping
4067 	 the frame, but before poisoning pointers into the old frame.  */
4068       const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
4069       set_value (result_dst_reg, retval, ctxt);
4070     }
4071 
4072   unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
4073 }
4074 
4075 /* Get the number of frames in this region_model's stack.  */
4076 
4077 int
get_stack_depth() const4078 region_model::get_stack_depth () const
4079 {
4080   const frame_region *frame = get_current_frame ();
4081   if (frame)
4082     return frame->get_stack_depth ();
4083   else
4084     return 0;
4085 }
4086 
4087 /* Get the frame_region with the given index within the stack.
4088    The frame_region must exist.  */
4089 
4090 const frame_region *
get_frame_at_index(int index) const4091 region_model::get_frame_at_index (int index) const
4092 {
4093   const frame_region *frame = get_current_frame ();
4094   gcc_assert (frame);
4095   gcc_assert (index >= 0);
4096   gcc_assert (index <= frame->get_index ());
4097   while (index != frame->get_index ())
4098     {
4099       frame = frame->get_calling_frame ();
4100       gcc_assert (frame);
4101     }
4102   return frame;
4103 }
4104 
4105 /* Unbind svalues for any regions in REG and below.
4106    Find any pointers to such regions; convert them to
4107    poisoned values of kind PKIND.
4108    Also purge any dynamic extents.  */
4109 
4110 void
unbind_region_and_descendents(const region * reg,enum poison_kind pkind)4111 region_model::unbind_region_and_descendents (const region *reg,
4112 					     enum poison_kind pkind)
4113 {
4114   /* Gather a set of base regions to be unbound.  */
4115   hash_set<const region *> base_regs;
4116   for (store::cluster_map_t::iterator iter = m_store.begin ();
4117        iter != m_store.end (); ++iter)
4118     {
4119       const region *iter_base_reg = (*iter).first;
4120       if (iter_base_reg->descendent_of_p (reg))
4121 	base_regs.add (iter_base_reg);
4122     }
4123   for (hash_set<const region *>::iterator iter = base_regs.begin ();
4124        iter != base_regs.end (); ++iter)
4125     m_store.purge_cluster (*iter);
4126 
4127   /* Find any pointers to REG or its descendents; convert to poisoned.  */
4128   poison_any_pointers_to_descendents (reg, pkind);
4129 
4130   /* Purge dynamic extents of any base regions in REG and below
4131      (e.g. VLAs and alloca stack regions).  */
4132   for (auto iter : m_dynamic_extents)
4133     {
4134       const region *iter_reg = iter.first;
4135       if (iter_reg->descendent_of_p (reg))
4136 	unset_dynamic_extents (iter_reg);
4137     }
4138 }
4139 
4140 /* Implementation of BindingVisitor.
4141    Update the bound svalues for regions below REG to use poisoned
4142    values instead.  */
4143 
4144 struct bad_pointer_finder
4145 {
bad_pointer_finderana::bad_pointer_finder4146   bad_pointer_finder (const region *reg, enum poison_kind pkind,
4147 		      region_model_manager *mgr)
4148   : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
4149   {}
4150 
on_bindingana::bad_pointer_finder4151   void on_binding (const binding_key *, const svalue *&sval)
4152   {
4153     if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4154       {
4155 	const region *ptr_dst = ptr_sval->get_pointee ();
4156 	/* Poison ptrs to descendents of REG, but not to REG itself,
4157 	   otherwise double-free detection doesn't work (since sm-state
4158 	   for "free" is stored on the original ptr svalue).  */
4159 	if (ptr_dst->descendent_of_p (m_reg)
4160 	    && ptr_dst != m_reg)
4161 	  {
4162 	    sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
4163 							 sval->get_type ());
4164 	    ++m_count;
4165 	  }
4166       }
4167   }
4168 
4169   const region *m_reg;
4170   enum poison_kind m_pkind;
4171   region_model_manager *const m_mgr;
4172   int m_count;
4173 };
4174 
4175 /* Find any pointers to REG or its descendents; convert them to
4176    poisoned values of kind PKIND.
4177    Return the number of pointers that were poisoned.  */
4178 
4179 int
poison_any_pointers_to_descendents(const region * reg,enum poison_kind pkind)4180 region_model::poison_any_pointers_to_descendents (const region *reg,
4181 						   enum poison_kind pkind)
4182 {
4183   bad_pointer_finder bv (reg, pkind, m_mgr);
4184   m_store.for_each_binding (bv);
4185   return bv.m_count;
4186 }
4187 
4188 /* Attempt to merge THIS with OTHER_MODEL, writing the result
4189    to OUT_MODEL.  Use POINT to distinguish values created as a
4190    result of merging.  */
4191 
4192 bool
can_merge_with_p(const region_model & other_model,const program_point & point,region_model * out_model,const extrinsic_state * ext_state,const program_state * state_a,const program_state * state_b) const4193 region_model::can_merge_with_p (const region_model &other_model,
4194 				const program_point &point,
4195 				region_model *out_model,
4196 				const extrinsic_state *ext_state,
4197 				const program_state *state_a,
4198 				const program_state *state_b) const
4199 {
4200   gcc_assert (out_model);
4201   gcc_assert (m_mgr == other_model.m_mgr);
4202   gcc_assert (m_mgr == out_model->m_mgr);
4203 
4204   if (m_current_frame != other_model.m_current_frame)
4205     return false;
4206   out_model->m_current_frame = m_current_frame;
4207 
4208   model_merger m (this, &other_model, point, out_model,
4209 		  ext_state, state_a, state_b);
4210 
4211   if (!store::can_merge_p (&m_store, &other_model.m_store,
4212 			   &out_model->m_store, m_mgr->get_store_manager (),
4213 			   &m))
4214     return false;
4215 
4216   if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
4217 					   &out_model->m_dynamic_extents))
4218     return false;
4219 
4220   /* Merge constraints.  */
4221   constraint_manager::merge (*m_constraints,
4222 			      *other_model.m_constraints,
4223 			      out_model->m_constraints);
4224 
4225   return true;
4226 }
4227 
4228 /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
4229    otherwise.  */
4230 
4231 tree
get_fndecl_for_call(const gcall * call,region_model_context * ctxt)4232 region_model::get_fndecl_for_call (const gcall *call,
4233 				   region_model_context *ctxt)
4234 {
4235   tree fn_ptr = gimple_call_fn (call);
4236   if (fn_ptr == NULL_TREE)
4237     return NULL_TREE;
4238   const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
4239   if (const region_svalue *fn_ptr_ptr
4240 	= fn_ptr_sval->dyn_cast_region_svalue ())
4241     {
4242       const region *reg = fn_ptr_ptr->get_pointee ();
4243       if (const function_region *fn_reg = reg->dyn_cast_function_region ())
4244 	{
4245 	  tree fn_decl = fn_reg->get_fndecl ();
4246 	  cgraph_node *node = cgraph_node::get (fn_decl);
4247 	  if (!node)
4248 	    return NULL_TREE;
4249 	  const cgraph_node *ultimate_node = node->ultimate_alias_target ();
4250 	  if (ultimate_node)
4251 	    return ultimate_node->decl;
4252 	}
4253     }
4254 
4255   return NULL_TREE;
4256 }
4257 
4258 /* Would be much simpler to use a lambda here, if it were supported.  */
4259 
4260 struct append_regions_cb_data
4261 {
4262   const region_model *model;
4263   auto_vec<const decl_region *> *out;
4264 };
4265 
4266 /* Populate *OUT with all decl_regions in the current
4267    frame that have clusters within the store.  */
4268 
4269 void
4270 region_model::
get_regions_for_current_frame(auto_vec<const decl_region * > * out) const4271 get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
4272 {
4273   append_regions_cb_data data;
4274   data.model = this;
4275   data.out = out;
4276   m_store.for_each_cluster (append_regions_cb, &data);
4277 }
4278 
4279 /* Implementation detail of get_regions_for_current_frame.  */
4280 
4281 void
append_regions_cb(const region * base_reg,append_regions_cb_data * cb_data)4282 region_model::append_regions_cb (const region *base_reg,
4283 				 append_regions_cb_data *cb_data)
4284 {
4285   if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
4286     return;
4287   if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
4288     cb_data->out->safe_push (decl_reg);
4289 }
4290 
4291 /* Return a new region describing a heap-allocated block of memory.
4292    Use CTXT to complain about tainted sizes.  */
4293 
4294 const region *
create_region_for_heap_alloc(const svalue * size_in_bytes,region_model_context * ctxt)4295 region_model::create_region_for_heap_alloc (const svalue *size_in_bytes,
4296 					    region_model_context *ctxt)
4297 {
4298   const region *reg = m_mgr->create_region_for_heap_alloc ();
4299   if (compat_types_p (size_in_bytes->get_type (), size_type_node))
4300     set_dynamic_extents (reg, size_in_bytes, ctxt);
4301   return reg;
4302 }
4303 
4304 /* Return a new region describing a block of memory allocated within the
4305    current frame.
4306    Use CTXT to complain about tainted sizes.  */
4307 
4308 const region *
create_region_for_alloca(const svalue * size_in_bytes,region_model_context * ctxt)4309 region_model::create_region_for_alloca (const svalue *size_in_bytes,
4310 					region_model_context *ctxt)
4311 {
4312   const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
4313   if (compat_types_p (size_in_bytes->get_type (), size_type_node))
4314     set_dynamic_extents (reg, size_in_bytes, ctxt);
4315   return reg;
4316 }
4317 
4318 /* Record that the size of REG is SIZE_IN_BYTES.
4319    Use CTXT to complain about tainted sizes.  */
4320 
4321 void
set_dynamic_extents(const region * reg,const svalue * size_in_bytes,region_model_context * ctxt)4322 region_model::set_dynamic_extents (const region *reg,
4323 				   const svalue *size_in_bytes,
4324 				   region_model_context *ctxt)
4325 {
4326   assert_compat_types (size_in_bytes->get_type (), size_type_node);
4327   if (ctxt)
4328     check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
4329 				  ctxt);
4330   m_dynamic_extents.put (reg, size_in_bytes);
4331 }
4332 
4333 /* Get the recording of REG in bytes, or NULL if no dynamic size was
4334    recorded.  */
4335 
4336 const svalue *
get_dynamic_extents(const region * reg) const4337 region_model::get_dynamic_extents (const region *reg) const
4338 {
4339   if (const svalue * const *slot = m_dynamic_extents.get (reg))
4340     return *slot;
4341   return NULL;
4342 }
4343 
4344 /* Unset any recorded dynamic size of REG.  */
4345 
4346 void
unset_dynamic_extents(const region * reg)4347 region_model::unset_dynamic_extents (const region *reg)
4348 {
4349   m_dynamic_extents.remove (reg);
4350 }
4351 
4352 /* class noop_region_model_context : public region_model_context.  */
4353 
4354 void
add_note(pending_note * pn)4355 noop_region_model_context::add_note (pending_note *pn)
4356 {
4357   delete pn;
4358 }
4359 
4360 void
bifurcate(custom_edge_info * info)4361 noop_region_model_context::bifurcate (custom_edge_info *info)
4362 {
4363   delete info;
4364 }
4365 
4366 void
terminate_path()4367 noop_region_model_context::terminate_path ()
4368 {
4369 }
4370 
4371 /* struct model_merger.  */
4372 
4373 /* Dump a multiline representation of this merger to PP.  */
4374 
4375 void
dump_to_pp(pretty_printer * pp,bool simple) const4376 model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
4377 {
4378   pp_string (pp, "model A:");
4379   pp_newline (pp);
4380   m_model_a->dump_to_pp (pp, simple, true);
4381   pp_newline (pp);
4382 
4383   pp_string (pp, "model B:");
4384   pp_newline (pp);
4385   m_model_b->dump_to_pp (pp, simple, true);
4386   pp_newline (pp);
4387 
4388   pp_string (pp, "merged model:");
4389   pp_newline (pp);
4390   m_merged_model->dump_to_pp (pp, simple, true);
4391   pp_newline (pp);
4392 }
4393 
4394 /* Dump a multiline representation of this merger to FILE.  */
4395 
4396 void
dump(FILE * fp,bool simple) const4397 model_merger::dump (FILE *fp, bool simple) const
4398 {
4399   pretty_printer pp;
4400   pp_format_decoder (&pp) = default_tree_printer;
4401   pp_show_color (&pp) = pp_show_color (global_dc->printer);
4402   pp.buffer->stream = fp;
4403   dump_to_pp (&pp, simple);
4404   pp_flush (&pp);
4405 }
4406 
4407 /* Dump a multiline representation of this merger to stderr.  */
4408 
4409 DEBUG_FUNCTION void
dump(bool simple) const4410 model_merger::dump (bool simple) const
4411 {
4412   dump (stderr, simple);
4413 }
4414 
4415 /* Return true if it's OK to merge SVAL with other svalues.  */
4416 
4417 bool
mergeable_svalue_p(const svalue * sval) const4418 model_merger::mergeable_svalue_p (const svalue *sval) const
4419 {
4420   if (m_ext_state)
4421     {
4422       /* Reject merging svalues that have non-purgable sm-state,
4423 	 to avoid falsely reporting memory leaks by merging them
4424 	 with something else.  For example, given a local var "p",
4425 	 reject the merger of a:
4426 	   store_a mapping "p" to a malloc-ed ptr
4427 	 with:
4428 	   store_b mapping "p" to a NULL ptr.  */
4429       if (m_state_a)
4430 	if (!m_state_a->can_purge_p (*m_ext_state, sval))
4431 	  return false;
4432       if (m_state_b)
4433 	if (!m_state_b->can_purge_p (*m_ext_state, sval))
4434 	  return false;
4435     }
4436   return true;
4437 }
4438 
4439 } // namespace ana
4440 
4441 /* Dump RMODEL fully to stderr (i.e. without summarization).  */
4442 
4443 DEBUG_FUNCTION void
debug(const region_model & rmodel)4444 debug (const region_model &rmodel)
4445 {
4446   rmodel.dump (false);
4447 }
4448 
4449 /* class rejected_op_constraint : public rejected_constraint.  */
4450 
4451 void
dump_to_pp(pretty_printer * pp) const4452 rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
4453 {
4454   region_model m (m_model);
4455   const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
4456   const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
4457   lhs_sval->dump_to_pp (pp, true);
4458   pp_printf (pp, " %s ", op_symbol_code (m_op));
4459   rhs_sval->dump_to_pp (pp, true);
4460 }
4461 
4462 /* class rejected_ranges_constraint : public rejected_constraint.  */
4463 
4464 void
dump_to_pp(pretty_printer * pp) const4465 rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
4466 {
4467   region_model m (m_model);
4468   const svalue *sval = m.get_rvalue (m_expr, NULL);
4469   sval->dump_to_pp (pp, true);
4470   pp_string (pp, " in ");
4471   m_ranges->dump_to_pp (pp, true);
4472 }
4473 
4474 /* class engine.  */
4475 
4476 /* engine's ctor.  */
4477 
engine(const supergraph * sg,logger * logger)4478 engine::engine (const supergraph *sg, logger *logger)
4479 : m_sg (sg), m_mgr (logger)
4480 {
4481 }
4482 
4483 /* Dump the managed objects by class to LOGGER, and the per-class totals.  */
4484 
4485 void
log_stats(logger * logger) const4486 engine::log_stats (logger *logger) const
4487 {
4488   m_mgr.log_stats (logger, true);
4489 }
4490 
4491 namespace ana {
4492 
4493 #if CHECKING_P
4494 
4495 namespace selftest {
4496 
4497 /* Build a constant tree of the given type from STR.  */
4498 
4499 static tree
build_real_cst_from_string(tree type,const char * str)4500 build_real_cst_from_string (tree type, const char *str)
4501 {
4502   REAL_VALUE_TYPE real;
4503   real_from_string (&real, str);
4504   return build_real (type, real);
4505 }
4506 
4507 /* Append various "interesting" constants to OUT (e.g. NaN).  */
4508 
4509 static void
append_interesting_constants(auto_vec<tree> * out)4510 append_interesting_constants (auto_vec<tree> *out)
4511 {
4512   out->safe_push (build_int_cst (integer_type_node, 0));
4513   out->safe_push (build_int_cst (integer_type_node, 42));
4514   out->safe_push (build_int_cst (unsigned_type_node, 0));
4515   out->safe_push (build_int_cst (unsigned_type_node, 42));
4516   out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
4517   out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
4518   out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
4519   out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
4520   out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
4521   out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
4522   out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
4523   out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
4524 }
4525 
4526 /* Verify that tree_cmp is a well-behaved comparator for qsort, even
4527    if the underlying constants aren't comparable.  */
4528 
4529 static void
test_tree_cmp_on_constants()4530 test_tree_cmp_on_constants ()
4531 {
4532   auto_vec<tree> csts;
4533   append_interesting_constants (&csts);
4534 
4535   /* Try sorting every triple. */
4536   const unsigned num = csts.length ();
4537   for (unsigned i = 0; i < num; i++)
4538     for (unsigned j = 0; j < num; j++)
4539       for (unsigned k = 0; k < num; k++)
4540 	{
4541 	  auto_vec<tree> v (3);
4542 	  v.quick_push (csts[i]);
4543 	  v.quick_push (csts[j]);
4544 	  v.quick_push (csts[k]);
4545 	  v.qsort (tree_cmp);
4546 	}
4547 }
4548 
4549 /* Implementation detail of the ASSERT_CONDITION_* macros.  */
4550 
4551 void
assert_condition(const location & loc,region_model & model,const svalue * lhs,tree_code op,const svalue * rhs,tristate expected)4552 assert_condition (const location &loc,
4553 		  region_model &model,
4554 		  const svalue *lhs, tree_code op, const svalue *rhs,
4555 		  tristate expected)
4556 {
4557   tristate actual = model.eval_condition (lhs, op, rhs);
4558   ASSERT_EQ_AT (loc, actual, expected);
4559 }
4560 
4561 /* Implementation detail of the ASSERT_CONDITION_* macros.  */
4562 
4563 void
assert_condition(const location & loc,region_model & model,tree lhs,tree_code op,tree rhs,tristate expected)4564 assert_condition (const location &loc,
4565 		  region_model &model,
4566 		  tree lhs, tree_code op, tree rhs,
4567 		  tristate expected)
4568 {
4569   tristate actual = model.eval_condition (lhs, op, rhs, NULL);
4570   ASSERT_EQ_AT (loc, actual, expected);
4571 }
4572 
4573 /* Implementation detail of ASSERT_DUMP_TREE_EQ.  */
4574 
4575 static void
assert_dump_tree_eq(const location & loc,tree t,const char * expected)4576 assert_dump_tree_eq (const location &loc, tree t, const char *expected)
4577 {
4578   auto_fix_quotes sentinel;
4579   pretty_printer pp;
4580   pp_format_decoder (&pp) = default_tree_printer;
4581   dump_tree (&pp, t);
4582   ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
4583 }
4584 
4585 /* Assert that dump_tree (T) is EXPECTED.  */
4586 
4587 #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
4588   SELFTEST_BEGIN_STMT							\
4589   assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
4590   SELFTEST_END_STMT
4591 
4592 /* Implementation detail of ASSERT_DUMP_EQ.  */
4593 
4594 static void
assert_dump_eq(const location & loc,const region_model & model,bool summarize,const char * expected)4595 assert_dump_eq (const location &loc,
4596 		const region_model &model,
4597 		bool summarize,
4598 		const char *expected)
4599 {
4600   auto_fix_quotes sentinel;
4601   pretty_printer pp;
4602   pp_format_decoder (&pp) = default_tree_printer;
4603 
4604   model.dump_to_pp (&pp, summarize, true);
4605   ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
4606 }
4607 
4608 /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED.  */
4609 
4610 #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
4611   SELFTEST_BEGIN_STMT							\
4612   assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
4613   SELFTEST_END_STMT
4614 
4615 /* Smoketest for region_model::dump_to_pp.  */
4616 
4617 static void
test_dump()4618 test_dump ()
4619 {
4620   region_model_manager mgr;
4621   region_model model (&mgr);
4622 
4623   ASSERT_DUMP_EQ (model, false,
4624 		  "stack depth: 0\n"
4625 		  "m_called_unknown_fn: FALSE\n"
4626 		  "constraint_manager:\n"
4627 		  "  equiv classes:\n"
4628 		  "  constraints:\n");
4629   ASSERT_DUMP_EQ (model, true,
4630 		  "stack depth: 0\n"
4631 		  "m_called_unknown_fn: FALSE\n"
4632 		  "constraint_manager:\n"
4633 		  "  equiv classes:\n"
4634 		  "  constraints:\n");
4635 }
4636 
4637 /* Helper function for selftests.  Create a struct or union type named NAME,
4638    with the fields given by the FIELD_DECLS in FIELDS.
4639    If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
4640    create a UNION_TYPE.  */
4641 
4642 static tree
make_test_compound_type(const char * name,bool is_struct,const auto_vec<tree> * fields)4643 make_test_compound_type (const char *name, bool is_struct,
4644 			 const auto_vec<tree> *fields)
4645 {
4646   tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
4647   TYPE_NAME (t) = get_identifier (name);
4648   TYPE_SIZE (t) = 0;
4649 
4650   tree fieldlist = NULL;
4651   int i;
4652   tree field;
4653   FOR_EACH_VEC_ELT (*fields, i, field)
4654     {
4655       gcc_assert (TREE_CODE (field) == FIELD_DECL);
4656       DECL_CONTEXT (field) = t;
4657       fieldlist = chainon (field, fieldlist);
4658     }
4659   fieldlist = nreverse (fieldlist);
4660   TYPE_FIELDS (t) = fieldlist;
4661 
4662   layout_type (t);
4663   return t;
4664 }
4665 
4666 /* Selftest fixture for creating the type "struct coord {int x; int y; };".  */
4667 
4668 struct coord_test
4669 {
coord_testana::selftest::coord_test4670   coord_test ()
4671   {
4672     auto_vec<tree> fields;
4673     m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
4674 			       get_identifier ("x"), integer_type_node);
4675     fields.safe_push (m_x_field);
4676     m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
4677 			       get_identifier ("y"), integer_type_node);
4678     fields.safe_push (m_y_field);
4679     m_coord_type = make_test_compound_type ("coord", true, &fields);
4680   }
4681 
4682   tree m_x_field;
4683   tree m_y_field;
4684   tree m_coord_type;
4685 };
4686 
4687 /* Verify usage of a struct.  */
4688 
4689 static void
test_struct()4690 test_struct ()
4691 {
4692   coord_test ct;
4693 
4694   tree c = build_global_decl ("c", ct.m_coord_type);
4695   tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4696 		     c, ct.m_x_field, NULL_TREE);
4697   tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4698 		     c, ct.m_y_field, NULL_TREE);
4699 
4700   tree int_17 = build_int_cst (integer_type_node, 17);
4701   tree int_m3 = build_int_cst (integer_type_node, -3);
4702 
4703   region_model_manager mgr;
4704   region_model model (&mgr);
4705   model.set_value (c_x, int_17, NULL);
4706   model.set_value (c_y, int_m3, NULL);
4707 
4708   /* Verify get_offset for "c.x".  */
4709   {
4710     const region *c_x_reg = model.get_lvalue (c_x, NULL);
4711     region_offset offset = c_x_reg->get_offset ();
4712     ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
4713     ASSERT_EQ (offset.get_bit_offset (), 0);
4714   }
4715 
4716   /* Verify get_offset for "c.y".  */
4717   {
4718     const region *c_y_reg = model.get_lvalue (c_y, NULL);
4719     region_offset offset = c_y_reg->get_offset ();
4720     ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
4721     ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
4722   }
4723 }
4724 
4725 /* Verify usage of an array element.  */
4726 
4727 static void
test_array_1()4728 test_array_1 ()
4729 {
4730   tree tlen = size_int (10);
4731   tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
4732 
4733   tree a = build_global_decl ("a", arr_type);
4734 
4735   region_model_manager mgr;
4736   region_model model (&mgr);
4737   tree int_0 = build_int_cst (integer_type_node, 0);
4738   tree a_0 = build4 (ARRAY_REF, char_type_node,
4739 		     a, int_0, NULL_TREE, NULL_TREE);
4740   tree char_A = build_int_cst (char_type_node, 'A');
4741   model.set_value (a_0, char_A, NULL);
4742 }
4743 
4744 /* Verify that region_model::get_representative_tree works as expected.  */
4745 
4746 static void
test_get_representative_tree()4747 test_get_representative_tree ()
4748 {
4749   region_model_manager mgr;
4750 
4751   /* STRING_CST.  */
4752   {
4753     tree string_cst = build_string (4, "foo");
4754     region_model m (&mgr);
4755     const svalue *str_sval = m.get_rvalue (string_cst, NULL);
4756     tree rep = m.get_representative_tree (str_sval);
4757     ASSERT_EQ (rep, string_cst);
4758   }
4759 
4760   /* String literal.  */
4761   {
4762     tree string_cst_ptr = build_string_literal (4, "foo");
4763     region_model m (&mgr);
4764     const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
4765     tree rep = m.get_representative_tree (str_sval);
4766     ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
4767   }
4768 
4769   /* Value of an element within an array.  */
4770   {
4771     tree tlen = size_int (10);
4772     tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
4773     tree a = build_global_decl ("a", arr_type);
4774     placeholder_svalue test_sval (char_type_node, "test value");
4775 
4776     /* Value of a[3].  */
4777     {
4778       test_region_model_context ctxt;
4779       region_model model (&mgr);
4780       tree int_3 = build_int_cst (integer_type_node, 3);
4781       tree a_3 = build4 (ARRAY_REF, char_type_node,
4782 			 a, int_3, NULL_TREE, NULL_TREE);
4783       const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
4784       model.set_value (a_3_reg, &test_sval, &ctxt);
4785       tree rep = model.get_representative_tree (&test_sval);
4786       ASSERT_DUMP_TREE_EQ (rep, "a[3]");
4787     }
4788 
4789     /* Value of a[0].  */
4790     {
4791       test_region_model_context ctxt;
4792       region_model model (&mgr);
4793       tree idx = build_int_cst (integer_type_node, 0);
4794       tree a_0 = build4 (ARRAY_REF, char_type_node,
4795 			 a, idx, NULL_TREE, NULL_TREE);
4796       const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
4797       model.set_value (a_0_reg, &test_sval, &ctxt);
4798       tree rep = model.get_representative_tree (&test_sval);
4799       ASSERT_DUMP_TREE_EQ (rep, "a[0]");
4800     }
4801   }
4802 
4803   /* Value of a field within a struct.  */
4804   {
4805     coord_test ct;
4806 
4807     tree c = build_global_decl ("c", ct.m_coord_type);
4808     tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4809 		       c, ct.m_x_field, NULL_TREE);
4810     tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4811 		       c, ct.m_y_field, NULL_TREE);
4812 
4813     test_region_model_context ctxt;
4814 
4815     /* Value of initial field.  */
4816     {
4817       region_model m (&mgr);
4818       const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
4819       placeholder_svalue test_sval_x (integer_type_node, "test x val");
4820       m.set_value (c_x_reg, &test_sval_x, &ctxt);
4821       tree rep = m.get_representative_tree (&test_sval_x);
4822       ASSERT_DUMP_TREE_EQ (rep, "c.x");
4823     }
4824 
4825     /* Value of non-initial field.  */
4826     {
4827       region_model m (&mgr);
4828       const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
4829       placeholder_svalue test_sval_y (integer_type_node, "test y val");
4830       m.set_value (c_y_reg, &test_sval_y, &ctxt);
4831       tree rep = m.get_representative_tree (&test_sval_y);
4832       ASSERT_DUMP_TREE_EQ (rep, "c.y");
4833     }
4834   }
4835 }
4836 
4837 /* Verify that calling region_model::get_rvalue repeatedly on the same
4838    tree constant retrieves the same svalue *.  */
4839 
4840 static void
test_unique_constants()4841 test_unique_constants ()
4842 {
4843   tree int_0 = build_int_cst (integer_type_node, 0);
4844   tree int_42 = build_int_cst (integer_type_node, 42);
4845 
4846   test_region_model_context ctxt;
4847   region_model_manager mgr;
4848   region_model model (&mgr);
4849   ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
4850   ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
4851 	     model.get_rvalue (int_42, &ctxt));
4852   ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
4853   ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
4854 
4855   /* A "(const int)42" will be a different tree from "(int)42)"...  */
4856   tree const_int_type_node
4857     = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
4858   tree const_int_42 = build_int_cst (const_int_type_node, 42);
4859   ASSERT_NE (int_42, const_int_42);
4860   /* It should have a different const_svalue.  */
4861   const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
4862   const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
4863   ASSERT_NE (int_42_sval, const_int_42_sval);
4864   /* But they should compare as equal.  */
4865   ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
4866   ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
4867 }
4868 
4869 /* Verify that each type gets its own singleton unknown_svalue within a
4870    region_model_manager, and that NULL_TREE gets its own singleton.  */
4871 
4872 static void
test_unique_unknowns()4873 test_unique_unknowns ()
4874 {
4875   region_model_manager mgr;
4876   const svalue *unknown_int
4877     = mgr.get_or_create_unknown_svalue (integer_type_node);
4878   /* Repeated calls with the same type should get the same "unknown"
4879      svalue.  */
4880   const svalue *unknown_int_2
4881     = mgr.get_or_create_unknown_svalue (integer_type_node);
4882   ASSERT_EQ (unknown_int, unknown_int_2);
4883 
4884   /* Different types (or the NULL type) should have different
4885      unknown_svalues.  */
4886   const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
4887   ASSERT_NE (unknown_NULL_type, unknown_int);
4888 
4889   /* Repeated calls with NULL for the type should get the same "unknown"
4890      svalue.  */
4891   const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
4892   ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
4893 }
4894 
4895 /* Verify that initial_svalue are handled as expected.  */
4896 
4897 static void
test_initial_svalue_folding()4898 test_initial_svalue_folding ()
4899 {
4900   region_model_manager mgr;
4901   tree x = build_global_decl ("x", integer_type_node);
4902   tree y = build_global_decl ("y", integer_type_node);
4903 
4904   test_region_model_context ctxt;
4905   region_model model (&mgr);
4906   const svalue *x_init = model.get_rvalue (x, &ctxt);
4907   const svalue *y_init = model.get_rvalue (y, &ctxt);
4908   ASSERT_NE (x_init, y_init);
4909   const region *x_reg = model.get_lvalue (x, &ctxt);
4910   ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
4911 
4912 }
4913 
4914 /* Verify that unary ops are folded as expected.  */
4915 
4916 static void
test_unaryop_svalue_folding()4917 test_unaryop_svalue_folding ()
4918 {
4919   region_model_manager mgr;
4920   tree x = build_global_decl ("x", integer_type_node);
4921   tree y = build_global_decl ("y", integer_type_node);
4922 
4923   test_region_model_context ctxt;
4924   region_model model (&mgr);
4925   const svalue *x_init = model.get_rvalue (x, &ctxt);
4926   const svalue *y_init = model.get_rvalue (y, &ctxt);
4927   const region *x_reg = model.get_lvalue (x, &ctxt);
4928   ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
4929 
4930   /* "(int)x" -> "x".  */
4931   ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
4932 
4933   /* "(void *)x" -> something other than "x".  */
4934   ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
4935 
4936   /* "!(x == y)" -> "x != y".  */
4937   ASSERT_EQ (mgr.get_or_create_unaryop
4938 	       (boolean_type_node, TRUTH_NOT_EXPR,
4939 		mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
4940 					 x_init, y_init)),
4941 	     mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
4942 				      x_init, y_init));
4943   /* "!(x > y)" -> "x <= y".  */
4944   ASSERT_EQ (mgr.get_or_create_unaryop
4945 	       (boolean_type_node, TRUTH_NOT_EXPR,
4946 		mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
4947 					 x_init, y_init)),
4948 	     mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
4949 				      x_init, y_init));
4950 }
4951 
4952 /* Verify that binops on constant svalues are folded.  */
4953 
4954 static void
test_binop_svalue_folding()4955 test_binop_svalue_folding ()
4956 {
4957 #define NUM_CSTS 10
4958   tree cst_int[NUM_CSTS];
4959   region_model_manager mgr;
4960   const svalue *cst_sval[NUM_CSTS];
4961   for (int i = 0; i < NUM_CSTS; i++)
4962     {
4963       cst_int[i] = build_int_cst (integer_type_node, i);
4964       cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
4965       ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
4966       ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
4967     }
4968 
4969   for (int i = 0; i < NUM_CSTS; i++)
4970     for (int j = 0; j < NUM_CSTS; j++)
4971       {
4972 	if (i != j)
4973 	  ASSERT_NE (cst_sval[i], cst_sval[j]);
4974 	if (i + j < NUM_CSTS)
4975 	  {
4976 	    const svalue *sum
4977 	      = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4978 					 cst_sval[i], cst_sval[j]);
4979 	    ASSERT_EQ (sum, cst_sval[i + j]);
4980 	  }
4981 	if (i - j >= 0)
4982 	  {
4983 	    const svalue *difference
4984 	      = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
4985 					 cst_sval[i], cst_sval[j]);
4986 	    ASSERT_EQ (difference, cst_sval[i - j]);
4987 	  }
4988 	if (i * j < NUM_CSTS)
4989 	  {
4990 	    const svalue *product
4991 	      = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4992 					 cst_sval[i], cst_sval[j]);
4993 	    ASSERT_EQ (product, cst_sval[i * j]);
4994 	  }
4995 	const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
4996 					       cst_sval[i], cst_sval[j]);
4997 	ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
4998 	const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
4999 						cst_sval[i], cst_sval[j]);
5000 	ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
5001 	// etc
5002       }
5003 
5004   tree x = build_global_decl ("x", integer_type_node);
5005 
5006   test_region_model_context ctxt;
5007   region_model model (&mgr);
5008   const svalue *x_init = model.get_rvalue (x, &ctxt);
5009 
5010   /* PLUS_EXPR folding.  */
5011   const svalue *x_init_plus_zero
5012     = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
5013 			       x_init, cst_sval[0]);
5014   ASSERT_EQ (x_init_plus_zero, x_init);
5015   const svalue *zero_plus_x_init
5016     = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
5017 			       cst_sval[0], x_init);
5018   ASSERT_EQ (zero_plus_x_init, x_init);
5019 
5020   /* MULT_EXPR folding.  */
5021   const svalue *x_init_times_zero
5022     = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
5023 			       x_init, cst_sval[0]);
5024   ASSERT_EQ (x_init_times_zero, cst_sval[0]);
5025   const svalue *zero_times_x_init
5026     = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
5027 			       cst_sval[0], x_init);
5028   ASSERT_EQ (zero_times_x_init, cst_sval[0]);
5029 
5030   const svalue *x_init_times_one
5031     = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
5032 			       x_init, cst_sval[1]);
5033   ASSERT_EQ (x_init_times_one, x_init);
5034   const svalue *one_times_x_init
5035     = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
5036 			       cst_sval[1], x_init);
5037   ASSERT_EQ (one_times_x_init, x_init);
5038 
5039   // etc
5040   // TODO: do we want to use the match-and-simplify DSL for this?
5041 
5042   /* Verify that binops put any constants on the RHS.  */
5043   const svalue *four_times_x_init
5044     = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
5045 			       cst_sval[4], x_init);
5046   const svalue *x_init_times_four
5047     = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
5048 			       x_init, cst_sval[4]);
5049   ASSERT_EQ (four_times_x_init, x_init_times_four);
5050   const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
5051   ASSERT_EQ (binop->get_op (), MULT_EXPR);
5052   ASSERT_EQ (binop->get_arg0 (), x_init);
5053   ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
5054 
5055   /* Verify that ((x + 1) + 1) == (x + 2).  */
5056   const svalue *x_init_plus_one
5057     = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
5058 			       x_init, cst_sval[1]);
5059   const svalue *x_init_plus_two
5060     = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
5061 			       x_init, cst_sval[2]);
5062   const svalue *x_init_plus_one_plus_one
5063     = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
5064 			       x_init_plus_one, cst_sval[1]);
5065   ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
5066 
5067   /* Verify various binops on booleans.  */
5068   {
5069     const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
5070     const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
5071     const svalue *sval_unknown
5072       = mgr.get_or_create_unknown_svalue (boolean_type_node);
5073     const placeholder_svalue sval_placeholder (boolean_type_node, "v");
5074     for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
5075       {
5076 	ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
5077 					    sval_true, sval_unknown),
5078 		   sval_true);
5079 	ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
5080 					    sval_false, sval_unknown),
5081 		   sval_unknown);
5082 	ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
5083 					    sval_false, &sval_placeholder),
5084 		   &sval_placeholder);
5085       }
5086     for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
5087       {
5088 	ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
5089 					    sval_false, sval_unknown),
5090 		   sval_false);
5091 	ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
5092 					    sval_true, sval_unknown),
5093 		   sval_unknown);
5094 	ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
5095 					    sval_true, &sval_placeholder),
5096 		   &sval_placeholder);
5097       }
5098   }
5099 }
5100 
5101 /* Verify that sub_svalues are folded as expected.  */
5102 
5103 static void
test_sub_svalue_folding()5104 test_sub_svalue_folding ()
5105 {
5106   coord_test ct;
5107   tree c = build_global_decl ("c", ct.m_coord_type);
5108   tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
5109 		     c, ct.m_x_field, NULL_TREE);
5110 
5111   region_model_manager mgr;
5112   region_model model (&mgr);
5113   test_region_model_context ctxt;
5114   const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
5115 
5116   /* Verify that sub_svalue of "unknown" simply
5117      yields an unknown.  */
5118 
5119   const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
5120   const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
5121 						      unknown, c_x_reg);
5122   ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
5123   ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
5124 }
5125 
5126 /* Test that region::descendent_of_p works as expected.  */
5127 
5128 static void
test_descendent_of_p()5129 test_descendent_of_p ()
5130 {
5131   region_model_manager mgr;
5132   const region *stack = mgr.get_stack_region ();
5133   const region *heap = mgr.get_heap_region ();
5134   const region *code = mgr.get_code_region ();
5135   const region *globals = mgr.get_globals_region ();
5136 
5137   /* descendent_of_p should return true when used on the region itself.  */
5138   ASSERT_TRUE (stack->descendent_of_p (stack));
5139   ASSERT_FALSE (stack->descendent_of_p (heap));
5140   ASSERT_FALSE (stack->descendent_of_p (code));
5141   ASSERT_FALSE (stack->descendent_of_p (globals));
5142 
5143   tree x = build_global_decl ("x", integer_type_node);
5144   const region *x_reg = mgr.get_region_for_global (x);
5145   ASSERT_TRUE (x_reg->descendent_of_p (globals));
5146 
5147   /* A cast_region should be a descendent of the original region.  */
5148   const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
5149   ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
5150 }
5151 
5152 /* Verify that bit_range_region works as expected.  */
5153 
5154 static void
test_bit_range_regions()5155 test_bit_range_regions ()
5156 {
5157   tree x = build_global_decl ("x", integer_type_node);
5158   region_model_manager mgr;
5159   const region *x_reg = mgr.get_region_for_global (x);
5160   const region *byte0
5161     = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
5162   const region *byte1
5163     = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
5164   ASSERT_TRUE (byte0->descendent_of_p (x_reg));
5165   ASSERT_TRUE (byte1->descendent_of_p (x_reg));
5166   ASSERT_NE (byte0, byte1);
5167 }
5168 
5169 /* Verify that simple assignments work as expected.  */
5170 
5171 static void
test_assignment()5172 test_assignment ()
5173 {
5174   tree int_0 = build_int_cst (integer_type_node, 0);
5175   tree x = build_global_decl ("x", integer_type_node);
5176   tree y = build_global_decl ("y", integer_type_node);
5177 
5178   /* "x == 0", then use of y, then "y = 0;".  */
5179   region_model_manager mgr;
5180   region_model model (&mgr);
5181   ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
5182   ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
5183   model.set_value (model.get_lvalue (y, NULL),
5184 		   model.get_rvalue (int_0, NULL),
5185 		   NULL);
5186   ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
5187   ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
5188 }
5189 
5190 /* Verify that compound assignments work as expected.  */
5191 
5192 static void
test_compound_assignment()5193 test_compound_assignment ()
5194 {
5195   coord_test ct;
5196 
5197   tree c = build_global_decl ("c", ct.m_coord_type);
5198   tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
5199 		     c, ct.m_x_field, NULL_TREE);
5200   tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
5201 		     c, ct.m_y_field, NULL_TREE);
5202   tree d = build_global_decl ("d", ct.m_coord_type);
5203   tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
5204 		     d, ct.m_x_field, NULL_TREE);
5205   tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
5206 		     d, ct.m_y_field, NULL_TREE);
5207 
5208   tree int_17 = build_int_cst (integer_type_node, 17);
5209   tree int_m3 = build_int_cst (integer_type_node, -3);
5210 
5211   region_model_manager mgr;
5212   region_model model (&mgr);
5213   model.set_value (c_x, int_17, NULL);
5214   model.set_value (c_y, int_m3, NULL);
5215 
5216   /* Copy c to d.  */
5217   const svalue *sval = model.get_rvalue (c, NULL);
5218   model.set_value (model.get_lvalue (d, NULL), sval, NULL);
5219 
5220   /* Check that the fields have the same svalues.  */
5221   ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
5222   ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
5223 }
5224 
5225 /* Verify the details of pushing and popping stack frames.  */
5226 
5227 static void
test_stack_frames()5228 test_stack_frames ()
5229 {
5230   tree int_42 = build_int_cst (integer_type_node, 42);
5231   tree int_10 = build_int_cst (integer_type_node, 10);
5232   tree int_5 = build_int_cst (integer_type_node, 5);
5233   tree int_0 = build_int_cst (integer_type_node, 0);
5234 
5235   auto_vec <tree> param_types;
5236   tree parent_fndecl = make_fndecl (integer_type_node,
5237 				    "parent_fn",
5238 				    param_types);
5239   allocate_struct_function (parent_fndecl, true);
5240 
5241   tree child_fndecl = make_fndecl (integer_type_node,
5242 				   "child_fn",
5243 				   param_types);
5244   allocate_struct_function (child_fndecl, true);
5245 
5246   /* "a" and "b" in the parent frame.  */
5247   tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5248 		       get_identifier ("a"),
5249 		       integer_type_node);
5250   DECL_CONTEXT (a) = parent_fndecl;
5251   tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5252 		       get_identifier ("b"),
5253 		       integer_type_node);
5254   DECL_CONTEXT (b) = parent_fndecl;
5255   /* "x" and "y" in a child frame.  */
5256   tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5257 		       get_identifier ("x"),
5258 		       integer_type_node);
5259   DECL_CONTEXT (x) = child_fndecl;
5260   tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5261 		       get_identifier ("y"),
5262 		       integer_type_node);
5263   DECL_CONTEXT (y) = child_fndecl;
5264 
5265   /* "p" global.  */
5266   tree p = build_global_decl ("p", ptr_type_node);
5267 
5268   /* "q" global.  */
5269   tree q = build_global_decl ("q", ptr_type_node);
5270 
5271   region_model_manager mgr;
5272   test_region_model_context ctxt;
5273   region_model model (&mgr);
5274 
5275   /* Push stack frame for "parent_fn".  */
5276   const region *parent_frame_reg
5277     = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
5278 			NULL, &ctxt);
5279   ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
5280   ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
5281   const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
5282   model.set_value (a_in_parent_reg,
5283 		   model.get_rvalue (int_42, &ctxt),
5284 		   &ctxt);
5285   ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
5286 
5287   model.add_constraint (b, LT_EXPR, int_10, &ctxt);
5288   ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
5289 	     tristate (tristate::TS_TRUE));
5290 
5291   /* Push stack frame for "child_fn".  */
5292   const region *child_frame_reg
5293     = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
5294   ASSERT_EQ (model.get_current_frame (), child_frame_reg);
5295   ASSERT_TRUE (model.region_exists_p (child_frame_reg));
5296   const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
5297   model.set_value (x_in_child_reg,
5298 		   model.get_rvalue (int_0, &ctxt),
5299 		   &ctxt);
5300   ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
5301 
5302   model.add_constraint (y, NE_EXPR, int_5, &ctxt);
5303   ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
5304 	     tristate (tristate::TS_TRUE));
5305 
5306   /* Point a global pointer at a local in the child frame:  p = &x.  */
5307   const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
5308   model.set_value (p_in_globals_reg,
5309 		   mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
5310 		   &ctxt);
5311   ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
5312 
5313   /* Point another global pointer at p: q = &p.  */
5314   const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
5315   model.set_value (q_in_globals_reg,
5316 		   mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
5317 		   &ctxt);
5318 
5319   /* Test region::descendent_of_p.  */
5320   ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
5321   ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
5322   ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
5323 
5324   /* Pop the "child_fn" frame from the stack.  */
5325   model.pop_frame (NULL, NULL, &ctxt);
5326   ASSERT_FALSE (model.region_exists_p (child_frame_reg));
5327   ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
5328 
5329   /* Verify that p (which was pointing at the local "x" in the popped
5330      frame) has been poisoned.  */
5331   const svalue *new_p_sval = model.get_rvalue (p, NULL);
5332   ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
5333   ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
5334 	     POISON_KIND_POPPED_STACK);
5335 
5336   /* Verify that q still points to p, in spite of the region
5337      renumbering.  */
5338   const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
5339   ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
5340   ASSERT_EQ (new_q_sval->maybe_get_region (),
5341 	     model.get_lvalue (p, &ctxt));
5342 
5343   /* Verify that top of stack has been updated.  */
5344   ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
5345 
5346   /* Verify locals in parent frame.  */
5347   /* Verify "a" still has its value.  */
5348   const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
5349   ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
5350   ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
5351 	     int_42);
5352   /* Verify "b" still has its constraint.  */
5353   ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
5354 	     tristate (tristate::TS_TRUE));
5355 }
5356 
5357 /* Verify that get_representative_path_var works as expected, that
5358    we can map from regions to parms and back within a recursive call
5359    stack.  */
5360 
5361 static void
test_get_representative_path_var()5362 test_get_representative_path_var ()
5363 {
5364   auto_vec <tree> param_types;
5365   tree fndecl = make_fndecl (integer_type_node,
5366 			     "factorial",
5367 			     param_types);
5368   allocate_struct_function (fndecl, true);
5369 
5370   /* Parm "n".  */
5371   tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5372 		       get_identifier ("n"),
5373 		       integer_type_node);
5374   DECL_CONTEXT (n) = fndecl;
5375 
5376   region_model_manager mgr;
5377   test_region_model_context ctxt;
5378   region_model model (&mgr);
5379 
5380   /* Push 5 stack frames for "factorial", each with a param  */
5381   auto_vec<const region *> parm_regs;
5382   auto_vec<const svalue *> parm_svals;
5383   for (int depth = 0; depth < 5; depth++)
5384     {
5385       const region *frame_n_reg
5386 	= model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
5387       const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
5388       parm_regs.safe_push (parm_n_reg);
5389 
5390       ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
5391       const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
5392       parm_svals.safe_push (sval_n);
5393     }
5394 
5395   /* Verify that we can recognize that the regions are the parms,
5396      at every depth.  */
5397   for (int depth = 0; depth < 5; depth++)
5398     {
5399       {
5400 	svalue_set visited;
5401 	ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
5402 						      &visited),
5403 		   path_var (n, depth + 1));
5404       }
5405       /* ...and that we can lookup lvalues for locals for all frames,
5406 	 not just the top.  */
5407       ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
5408 		 parm_regs[depth]);
5409       /* ...and that we can locate the svalues.  */
5410       {
5411 	svalue_set visited;
5412 	ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
5413 						      &visited),
5414 		   path_var (n, depth + 1));
5415       }
5416     }
5417 }
5418 
5419 /* Ensure that region_model::operator== works as expected.  */
5420 
5421 static void
test_equality_1()5422 test_equality_1 ()
5423 {
5424   tree int_42 = build_int_cst (integer_type_node, 42);
5425   tree int_17 = build_int_cst (integer_type_node, 17);
5426 
5427 /* Verify that "empty" region_model instances are equal to each other.  */
5428   region_model_manager mgr;
5429   region_model model0 (&mgr);
5430   region_model model1 (&mgr);
5431   ASSERT_EQ (model0, model1);
5432 
5433   /* Verify that setting state in model1 makes the models non-equal.  */
5434   tree x = build_global_decl ("x", integer_type_node);
5435   model0.set_value (x, int_42, NULL);
5436   ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
5437   ASSERT_NE (model0, model1);
5438 
5439   /* Verify the copy-ctor.  */
5440   region_model model2 (model0);
5441   ASSERT_EQ (model0, model2);
5442   ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
5443   ASSERT_NE (model1, model2);
5444 
5445   /* Verify that models obtained from copy-ctor are independently editable
5446      w/o affecting the original model.  */
5447   model2.set_value (x, int_17, NULL);
5448   ASSERT_NE (model0, model2);
5449   ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
5450   ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
5451 }
5452 
5453 /* Verify that region models for
5454       x = 42; y = 113;
5455    and
5456       y = 113; x = 42;
5457    are equal.  */
5458 
5459 static void
test_canonicalization_2()5460 test_canonicalization_2 ()
5461 {
5462   tree int_42 = build_int_cst (integer_type_node, 42);
5463   tree int_113 = build_int_cst (integer_type_node, 113);
5464   tree x = build_global_decl ("x", integer_type_node);
5465   tree y = build_global_decl ("y", integer_type_node);
5466 
5467   region_model_manager mgr;
5468   region_model model0 (&mgr);
5469   model0.set_value (model0.get_lvalue (x, NULL),
5470 		    model0.get_rvalue (int_42, NULL),
5471 		    NULL);
5472   model0.set_value (model0.get_lvalue (y, NULL),
5473 		    model0.get_rvalue (int_113, NULL),
5474 		    NULL);
5475 
5476   region_model model1 (&mgr);
5477   model1.set_value (model1.get_lvalue (y, NULL),
5478 		    model1.get_rvalue (int_113, NULL),
5479 		    NULL);
5480   model1.set_value (model1.get_lvalue (x, NULL),
5481 		    model1.get_rvalue (int_42, NULL),
5482 		    NULL);
5483 
5484   ASSERT_EQ (model0, model1);
5485 }
5486 
5487 /* Verify that constraints for
5488      x > 3 && y > 42
5489    and
5490      y > 42 && x > 3
5491    are equal after canonicalization.  */
5492 
5493 static void
test_canonicalization_3()5494 test_canonicalization_3 ()
5495 {
5496   tree int_3 = build_int_cst (integer_type_node, 3);
5497   tree int_42 = build_int_cst (integer_type_node, 42);
5498   tree x = build_global_decl ("x", integer_type_node);
5499   tree y = build_global_decl ("y", integer_type_node);
5500 
5501   region_model_manager mgr;
5502   region_model model0 (&mgr);
5503   model0.add_constraint (x, GT_EXPR, int_3, NULL);
5504   model0.add_constraint (y, GT_EXPR, int_42, NULL);
5505 
5506   region_model model1 (&mgr);
5507   model1.add_constraint (y, GT_EXPR, int_42, NULL);
5508   model1.add_constraint (x, GT_EXPR, int_3, NULL);
5509 
5510   model0.canonicalize ();
5511   model1.canonicalize ();
5512   ASSERT_EQ (model0, model1);
5513 }
5514 
5515 /* Verify that we can canonicalize a model containing NaN and other real
5516    constants.  */
5517 
5518 static void
test_canonicalization_4()5519 test_canonicalization_4 ()
5520 {
5521   auto_vec<tree> csts;
5522   append_interesting_constants (&csts);
5523 
5524   region_model_manager mgr;
5525   region_model model (&mgr);
5526 
5527   for (tree cst : csts)
5528     model.get_rvalue (cst, NULL);
5529 
5530   model.canonicalize ();
5531 }
5532 
5533 /* Assert that if we have two region_model instances
5534    with values VAL_A and VAL_B for EXPR that they are
5535    mergable.  Write the merged model to *OUT_MERGED_MODEL,
5536    and the merged svalue ptr to *OUT_MERGED_SVALUE.
5537    If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
5538    for that region_model.  */
5539 
5540 static void
assert_region_models_merge(tree expr,tree val_a,tree val_b,region_model * out_merged_model,const svalue ** out_merged_svalue)5541 assert_region_models_merge (tree expr, tree val_a, tree val_b,
5542 			     region_model *out_merged_model,
5543 			     const svalue **out_merged_svalue)
5544 {
5545   program_point point (program_point::origin ());
5546   test_region_model_context ctxt;
5547   region_model_manager *mgr = out_merged_model->get_manager ();
5548   region_model model0 (mgr);
5549   region_model model1 (mgr);
5550   if (val_a)
5551     model0.set_value (model0.get_lvalue (expr, &ctxt),
5552 		      model0.get_rvalue (val_a, &ctxt),
5553 		      &ctxt);
5554   if (val_b)
5555     model1.set_value (model1.get_lvalue (expr, &ctxt),
5556 		      model1.get_rvalue (val_b, &ctxt),
5557 		      &ctxt);
5558 
5559   /* They should be mergeable.  */
5560   ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
5561   *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
5562 }
5563 
5564 /* Verify that we can merge region_model instances.  */
5565 
5566 static void
test_state_merging()5567 test_state_merging ()
5568 {
5569   tree int_42 = build_int_cst (integer_type_node, 42);
5570   tree int_113 = build_int_cst (integer_type_node, 113);
5571   tree x = build_global_decl ("x", integer_type_node);
5572   tree y = build_global_decl ("y", integer_type_node);
5573   tree z = build_global_decl ("z", integer_type_node);
5574   tree p = build_global_decl ("p", ptr_type_node);
5575 
5576   tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
5577   tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
5578 
5579   auto_vec <tree> param_types;
5580   tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
5581   allocate_struct_function (test_fndecl, true);
5582 
5583   /* Param "a".  */
5584   tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5585 		       get_identifier ("a"),
5586 		       integer_type_node);
5587   DECL_CONTEXT (a) = test_fndecl;
5588   tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
5589 
5590   /* Param "q", a pointer.  */
5591   tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5592 		       get_identifier ("q"),
5593 		       ptr_type_node);
5594   DECL_CONTEXT (q) = test_fndecl;
5595 
5596   program_point point (program_point::origin ());
5597   region_model_manager mgr;
5598 
5599   {
5600     region_model model0 (&mgr);
5601     region_model model1 (&mgr);
5602     region_model merged (&mgr);
5603     /* Verify empty models can be merged.  */
5604     ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5605     ASSERT_EQ (model0, merged);
5606   }
5607 
5608   /* Verify that we can merge two contradictory constraints on the
5609      value for a global.  */
5610   /* TODO: verify that the merged model doesn't have a value for
5611      the global  */
5612   {
5613     region_model model0 (&mgr);
5614     region_model model1 (&mgr);
5615     region_model merged (&mgr);
5616     test_region_model_context ctxt;
5617     model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5618     model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
5619     ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5620     ASSERT_NE (model0, merged);
5621     ASSERT_NE (model1, merged);
5622   }
5623 
5624   /* Verify handling of a PARM_DECL.  */
5625   {
5626     test_region_model_context ctxt;
5627     region_model model0 (&mgr);
5628     region_model model1 (&mgr);
5629     ASSERT_EQ (model0.get_stack_depth (), 0);
5630     model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
5631     ASSERT_EQ (model0.get_stack_depth (), 1);
5632     model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
5633 
5634     placeholder_svalue test_sval (integer_type_node, "test sval");
5635     model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
5636     model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
5637     ASSERT_EQ (model0, model1);
5638 
5639     /* They should be mergeable, and the result should be the same.  */
5640     region_model merged (&mgr);
5641     ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5642     ASSERT_EQ (model0, merged);
5643     /* In particular, "a" should have the placeholder value.  */
5644     ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
5645   }
5646 
5647   /* Verify handling of a global.  */
5648   {
5649     test_region_model_context ctxt;
5650     region_model model0 (&mgr);
5651     region_model model1 (&mgr);
5652 
5653     placeholder_svalue test_sval (integer_type_node, "test sval");
5654     model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
5655     model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
5656     ASSERT_EQ (model0, model1);
5657 
5658     /* They should be mergeable, and the result should be the same.  */
5659     region_model merged (&mgr);
5660     ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5661     ASSERT_EQ (model0, merged);
5662     /* In particular, "x" should have the placeholder value.  */
5663     ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
5664   }
5665 
5666   /* Use global-handling to verify various combinations of values.  */
5667 
5668   /* Two equal constant values.  */
5669   {
5670     region_model merged (&mgr);
5671     const svalue *merged_x_sval;
5672     assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
5673 
5674     /* In particular, there should be a constant value for "x".  */
5675     ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
5676     ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
5677 	       int_42);
5678   }
5679 
5680   /* Two non-equal constant values.  */
5681   {
5682     region_model merged (&mgr);
5683     const svalue *merged_x_sval;
5684     assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
5685 
5686     /* In particular, there should be a "widening" value for "x".  */
5687     ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
5688   }
5689 
5690   /* Initial and constant.  */
5691   {
5692     region_model merged (&mgr);
5693     const svalue *merged_x_sval;
5694     assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
5695 
5696     /* In particular, there should be an unknown value for "x".  */
5697     ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5698   }
5699 
5700   /* Constant and initial.  */
5701   {
5702     region_model merged (&mgr);
5703     const svalue *merged_x_sval;
5704     assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
5705 
5706     /* In particular, there should be an unknown value for "x".  */
5707     ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5708   }
5709 
5710   /* Unknown and constant.  */
5711   // TODO
5712 
5713   /* Pointers: NULL and NULL.  */
5714   // TODO
5715 
5716   /* Pointers: NULL and non-NULL.  */
5717   // TODO
5718 
5719   /* Pointers: non-NULL and non-NULL: ptr to a local.  */
5720   {
5721     region_model model0 (&mgr);
5722     model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5723     model0.set_value (model0.get_lvalue (p, NULL),
5724 		      model0.get_rvalue (addr_of_a, NULL), NULL);
5725 
5726     region_model model1 (model0);
5727     ASSERT_EQ (model0, model1);
5728 
5729     /* They should be mergeable, and the result should be the same.  */
5730     region_model merged (&mgr);
5731     ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5732     ASSERT_EQ (model0, merged);
5733   }
5734 
5735   /* Pointers: non-NULL and non-NULL: ptr to a global.  */
5736   {
5737     region_model merged (&mgr);
5738     /* p == &y in both input models.  */
5739     const svalue *merged_p_sval;
5740     assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
5741 				&merged_p_sval);
5742 
5743     /* We should get p == &y in the merged model.  */
5744     ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
5745     const region_svalue *merged_p_ptr
5746       = merged_p_sval->dyn_cast_region_svalue ();
5747     const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
5748     ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
5749   }
5750 
5751   /* Pointers: non-NULL ptrs to different globals: should be unknown.  */
5752   {
5753     region_model merged (&mgr);
5754     /* x == &y vs x == &z in the input models; these are actually casts
5755        of the ptrs to "int".  */
5756     const svalue *merged_x_sval;
5757     // TODO:
5758     assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
5759 				&merged_x_sval);
5760 
5761     /* We should get x == unknown in the merged model.  */
5762     ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5763   }
5764 
5765   /* Pointers: non-NULL and non-NULL: ptr to a heap region.  */
5766   {
5767     test_region_model_context ctxt;
5768     region_model model0 (&mgr);
5769     tree size = build_int_cst (size_type_node, 1024);
5770     const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
5771     const region *new_reg
5772       = model0.create_region_for_heap_alloc (size_sval, &ctxt);
5773     const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
5774     model0.set_value (model0.get_lvalue (p, &ctxt),
5775 		      ptr_sval, &ctxt);
5776 
5777     region_model model1 (model0);
5778 
5779     ASSERT_EQ (model0, model1);
5780 
5781     region_model merged (&mgr);
5782     ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5783 
5784     /* The merged model ought to be identical.  */
5785     ASSERT_EQ (model0, merged);
5786   }
5787 
5788   /* Two regions sharing the same placeholder svalue should continue sharing
5789      it after self-merger.  */
5790   {
5791     test_region_model_context ctxt;
5792     region_model model0 (&mgr);
5793     placeholder_svalue placeholder_sval (integer_type_node, "test");
5794     model0.set_value (model0.get_lvalue (x, &ctxt),
5795 		      &placeholder_sval, &ctxt);
5796     model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
5797     region_model model1 (model0);
5798 
5799     /* They should be mergeable, and the result should be the same.  */
5800     region_model merged (&mgr);
5801     ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5802     ASSERT_EQ (model0, merged);
5803 
5804     /* In particular, we should have x == y.  */
5805     ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
5806 	       tristate (tristate::TS_TRUE));
5807   }
5808 
5809   {
5810     region_model model0 (&mgr);
5811     region_model model1 (&mgr);
5812     test_region_model_context ctxt;
5813     model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5814     model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
5815     region_model merged (&mgr);
5816     ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5817   }
5818 
5819   {
5820     region_model model0 (&mgr);
5821     region_model model1 (&mgr);
5822     test_region_model_context ctxt;
5823     model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5824     model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
5825     model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
5826     region_model merged (&mgr);
5827     ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5828   }
5829 
5830   // TODO: what can't we merge? need at least one such test
5831 
5832   /* TODO: various things
5833      - heap regions
5834      - value merging:
5835        - every combination, but in particular
5836 	   - pairs of regions
5837    */
5838 
5839   /* Views.  */
5840   {
5841     test_region_model_context ctxt;
5842     region_model model0 (&mgr);
5843 
5844     const region *x_reg = model0.get_lvalue (x, &ctxt);
5845     const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
5846     model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
5847 
5848     region_model model1 (model0);
5849     ASSERT_EQ (model1, model0);
5850 
5851     /* They should be mergeable, and the result should be the same.  */
5852     region_model merged (&mgr);
5853     ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5854   }
5855 
5856   /* Verify that we can merge a model in which a local in an older stack
5857      frame points to a local in a more recent stack frame.  */
5858   {
5859     region_model model0 (&mgr);
5860     model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5861     const region *q_in_first_frame = model0.get_lvalue (q, NULL);
5862 
5863     /* Push a second frame.  */
5864     const region *reg_2nd_frame
5865       = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5866 
5867     /* Have a pointer in the older frame point to a local in the
5868        more recent frame.  */
5869     const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
5870     model0.set_value (q_in_first_frame, sval_ptr, NULL);
5871 
5872     /* Verify that it's pointing at the newer frame.  */
5873     const region *reg_pointee = sval_ptr->maybe_get_region ();
5874     ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
5875 
5876     model0.canonicalize ();
5877 
5878     region_model model1 (model0);
5879     ASSERT_EQ (model0, model1);
5880 
5881     /* They should be mergeable, and the result should be the same
5882        (after canonicalization, at least).  */
5883     region_model merged (&mgr);
5884     ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5885     merged.canonicalize ();
5886     ASSERT_EQ (model0, merged);
5887   }
5888 
5889   /* Verify that we can merge a model in which a local points to a global.  */
5890   {
5891     region_model model0 (&mgr);
5892     model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5893     model0.set_value (model0.get_lvalue (q, NULL),
5894 		      model0.get_rvalue (addr_of_y, NULL), NULL);
5895 
5896     region_model model1 (model0);
5897     ASSERT_EQ (model0, model1);
5898 
5899     /* They should be mergeable, and the result should be the same
5900        (after canonicalization, at least).  */
5901     region_model merged (&mgr);
5902     ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5903     ASSERT_EQ (model0, merged);
5904   }
5905 }
5906 
5907 /* Verify that constraints are correctly merged when merging region_model
5908    instances.  */
5909 
5910 static void
test_constraint_merging()5911 test_constraint_merging ()
5912 {
5913   tree int_0 = build_int_cst (integer_type_node, 0);
5914   tree int_5 = build_int_cst (integer_type_node, 5);
5915   tree x = build_global_decl ("x", integer_type_node);
5916   tree y = build_global_decl ("y", integer_type_node);
5917   tree z = build_global_decl ("z", integer_type_node);
5918   tree n = build_global_decl ("n", integer_type_node);
5919 
5920   region_model_manager mgr;
5921   test_region_model_context ctxt;
5922 
5923   /* model0: 0 <= (x == y) < n.  */
5924   region_model model0 (&mgr);
5925   model0.add_constraint (x, EQ_EXPR, y, &ctxt);
5926   model0.add_constraint (x, GE_EXPR, int_0, NULL);
5927   model0.add_constraint (x, LT_EXPR, n, NULL);
5928 
5929   /* model1: z != 5 && (0 <= x < n).  */
5930   region_model model1 (&mgr);
5931   model1.add_constraint (z, NE_EXPR, int_5, NULL);
5932   model1.add_constraint (x, GE_EXPR, int_0, NULL);
5933   model1.add_constraint (x, LT_EXPR, n, NULL);
5934 
5935   /* They should be mergeable; the merged constraints should
5936      be: (0 <= x < n).  */
5937   program_point point (program_point::origin ());
5938   region_model merged (&mgr);
5939   ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5940 
5941   ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
5942 	     tristate (tristate::TS_TRUE));
5943   ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
5944 	     tristate (tristate::TS_TRUE));
5945 
5946   ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
5947 	     tristate (tristate::TS_UNKNOWN));
5948   ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
5949 	     tristate (tristate::TS_UNKNOWN));
5950 }
5951 
5952 /* Verify that widening_svalue::eval_condition_without_cm works as
5953    expected.  */
5954 
5955 static void
test_widening_constraints()5956 test_widening_constraints ()
5957 {
5958   program_point point (program_point::origin ());
5959   tree int_0 = build_int_cst (integer_type_node, 0);
5960   tree int_m1 = build_int_cst (integer_type_node, -1);
5961   tree int_1 = build_int_cst (integer_type_node, 1);
5962   tree int_256 = build_int_cst (integer_type_node, 256);
5963   region_model_manager mgr;
5964   test_region_model_context ctxt;
5965   const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
5966   const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
5967   const svalue *w_zero_then_one_sval
5968     = mgr.get_or_create_widening_svalue (integer_type_node, point,
5969 					  int_0_sval, int_1_sval);
5970   const widening_svalue *w_zero_then_one
5971     = w_zero_then_one_sval->dyn_cast_widening_svalue ();
5972   ASSERT_EQ (w_zero_then_one->get_direction (),
5973 	     widening_svalue::DIR_ASCENDING);
5974   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
5975 	     tristate::TS_FALSE);
5976   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
5977 	     tristate::TS_FALSE);
5978   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
5979 	     tristate::TS_UNKNOWN);
5980   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
5981 	     tristate::TS_UNKNOWN);
5982 
5983   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
5984 	     tristate::TS_FALSE);
5985   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
5986 	     tristate::TS_UNKNOWN);
5987   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
5988 	     tristate::TS_UNKNOWN);
5989   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
5990 	     tristate::TS_UNKNOWN);
5991 
5992   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
5993 	     tristate::TS_TRUE);
5994   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
5995 	     tristate::TS_UNKNOWN);
5996   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
5997 	     tristate::TS_UNKNOWN);
5998   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
5999 	     tristate::TS_UNKNOWN);
6000 
6001   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
6002 	     tristate::TS_TRUE);
6003   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
6004 	     tristate::TS_TRUE);
6005   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
6006 	     tristate::TS_UNKNOWN);
6007   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
6008 	     tristate::TS_UNKNOWN);
6009 
6010   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
6011 	     tristate::TS_FALSE);
6012   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
6013 	     tristate::TS_UNKNOWN);
6014   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
6015 	     tristate::TS_UNKNOWN);
6016   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
6017 	     tristate::TS_UNKNOWN);
6018 
6019   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
6020 	     tristate::TS_TRUE);
6021   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
6022 	     tristate::TS_UNKNOWN);
6023   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
6024 	     tristate::TS_UNKNOWN);
6025   ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
6026 	     tristate::TS_UNKNOWN);
6027 }
6028 
6029 /* Verify merging constraints for states simulating successive iterations
6030    of a loop.
6031    Simulate:
6032      for (i = 0; i < 256; i++)
6033        [...body...]
6034    i.e. this gimple:.
6035      i_15 = 0;
6036      goto <bb 4>;
6037 
6038    <bb 4> :
6039      i_11 = PHI <i_15(2), i_23(3)>
6040      if (i_11 <= 255)
6041        goto <bb 3>;
6042      else
6043        goto [AFTER LOOP]
6044 
6045    <bb 3> :
6046      [LOOP BODY]
6047      i_23 = i_11 + 1;
6048 
6049    and thus these ops (and resultant states):
6050      i_11 = PHI()
6051        {i_11: 0}
6052      add_constraint (i_11 <= 255) [for the true edge]
6053        {i_11: 0}  [constraint was a no-op]
6054      i_23 = i_11 + 1;
6055        {i_22: 1}
6056      i_11 = PHI()
6057        {i_11: WIDENED (at phi, 0, 1)}
6058      add_constraint (i_11 <= 255) [for the true edge]
6059        {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
6060      i_23 = i_11 + 1;
6061        {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
6062      i_11 = PHI(); merge with state at phi above
6063        {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
6064          [changing meaning of "WIDENED" here]
6065      if (i_11 <= 255)
6066         T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
6067         F: {i_11: 256}
6068  */
6069 
6070 static void
test_iteration_1()6071 test_iteration_1 ()
6072 {
6073   program_point point (program_point::origin ());
6074 
6075   tree int_0 = build_int_cst (integer_type_node, 0);
6076   tree int_1 = build_int_cst (integer_type_node, 1);
6077   tree int_256 = build_int_cst (integer_type_node, 256);
6078   tree int_257 = build_int_cst (integer_type_node, 257);
6079   tree i = build_global_decl ("i", integer_type_node);
6080 
6081   region_model_manager mgr;
6082   test_region_model_context ctxt;
6083 
6084   /* model0: i: 0.  */
6085   region_model model0 (&mgr);
6086   model0.set_value (i, int_0, &ctxt);
6087 
6088   /* model1: i: 1.  */
6089   region_model model1 (&mgr);
6090   model1.set_value (i, int_1, &ctxt);
6091 
6092   /* Should merge "i" to a widened value.  */
6093   region_model model2 (&mgr);
6094   ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
6095   const svalue *merged_i = model2.get_rvalue (i, &ctxt);
6096   ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
6097   const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
6098   ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
6099 
6100   /* Add constraint: i < 256  */
6101   model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
6102   ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
6103 	     tristate (tristate::TS_TRUE));
6104   ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
6105 	     tristate (tristate::TS_TRUE));
6106 
6107   /* Try merging with the initial state.  */
6108   region_model model3 (&mgr);
6109   ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
6110   /* Merging the merged value with the initial value should be idempotent,
6111      so that the analysis converges.  */
6112   ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
6113   /* Merger of 0 and a widening value with constraint < CST
6114      should retain the constraint, even though it was implicit
6115      for the 0 case.  */
6116   ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
6117 	     tristate (tristate::TS_TRUE));
6118   /* ...and we should have equality: the analysis should have converged.  */
6119   ASSERT_EQ (model3, model2);
6120 
6121   /* "i_23 = i_11 + 1;"  */
6122   region_model model4 (model3);
6123   ASSERT_EQ (model4, model2);
6124   model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
6125   const svalue *plus_one = model4.get_rvalue (i, &ctxt);
6126   ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
6127 
6128   /* Try merging with the "i: 1" state.  */
6129   region_model model5 (&mgr);
6130   ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
6131   ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
6132   ASSERT_EQ (model5, model4);
6133 
6134   /* "i_11 = PHI();" merge with state at phi above.
6135      For i, we should have a merger of WIDENING with WIDENING + 1,
6136      and this should be WIDENING again.  */
6137   region_model model6 (&mgr);
6138   ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
6139   const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
6140   ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
6141 
6142   ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
6143 }
6144 
6145 /* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
6146    all cast pointers to that region are also known to be non-NULL.  */
6147 
6148 static void
test_malloc_constraints()6149 test_malloc_constraints ()
6150 {
6151   region_model_manager mgr;
6152   region_model model (&mgr);
6153   tree p = build_global_decl ("p", ptr_type_node);
6154   tree char_star = build_pointer_type (char_type_node);
6155   tree q = build_global_decl ("q", char_star);
6156   tree null_ptr = build_int_cst (ptr_type_node, 0);
6157 
6158   const svalue *size_in_bytes
6159     = mgr.get_or_create_unknown_svalue (size_type_node);
6160   const region *reg = model.create_region_for_heap_alloc (size_in_bytes, NULL);
6161   const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
6162   model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6163   model.set_value (q, p, NULL);
6164 
6165   ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
6166   ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
6167   ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
6168   ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
6169 
6170   model.add_constraint (p, NE_EXPR, null_ptr, NULL);
6171 
6172   ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
6173   ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
6174   ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
6175   ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
6176 }
6177 
6178 /* Smoketest of getting and setting the value of a variable.  */
6179 
6180 static void
test_var()6181 test_var ()
6182 {
6183   /* "int i;"  */
6184   tree i = build_global_decl ("i", integer_type_node);
6185 
6186   tree int_17 = build_int_cst (integer_type_node, 17);
6187   tree int_m3 = build_int_cst (integer_type_node, -3);
6188 
6189   region_model_manager mgr;
6190   region_model model (&mgr);
6191 
6192   const region *i_reg = model.get_lvalue (i, NULL);
6193   ASSERT_EQ (i_reg->get_kind (), RK_DECL);
6194 
6195   /* Reading "i" should give a symbolic "initial value".  */
6196   const svalue *sval_init = model.get_rvalue (i, NULL);
6197   ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
6198   ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
6199   /* ..and doing it again should give the same "initial value".  */
6200   ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
6201 
6202   /* "i = 17;".  */
6203   model.set_value (i, int_17, NULL);
6204   ASSERT_EQ (model.get_rvalue (i, NULL),
6205 	     model.get_rvalue (int_17, NULL));
6206 
6207   /* "i = -3;".  */
6208   model.set_value (i, int_m3, NULL);
6209   ASSERT_EQ (model.get_rvalue (i, NULL),
6210 	     model.get_rvalue (int_m3, NULL));
6211 
6212   /* Verify get_offset for "i".  */
6213   {
6214     region_offset offset = i_reg->get_offset ();
6215     ASSERT_EQ (offset.get_base_region (), i_reg);
6216     ASSERT_EQ (offset.get_bit_offset (), 0);
6217   }
6218 }
6219 
6220 static void
test_array_2()6221 test_array_2 ()
6222 {
6223   /* "int arr[10];"  */
6224   tree tlen = size_int (10);
6225   tree arr_type
6226     = build_array_type (integer_type_node, build_index_type (tlen));
6227   tree arr = build_global_decl ("arr", arr_type);
6228 
6229   /* "int i;"  */
6230   tree i = build_global_decl ("i", integer_type_node);
6231 
6232   tree int_0 = build_int_cst (integer_type_node, 0);
6233   tree int_1 = build_int_cst (integer_type_node, 1);
6234 
6235   tree arr_0 = build4 (ARRAY_REF, integer_type_node,
6236 		       arr, int_0, NULL_TREE, NULL_TREE);
6237   tree arr_1 = build4 (ARRAY_REF, integer_type_node,
6238 		       arr, int_1, NULL_TREE, NULL_TREE);
6239   tree arr_i = build4 (ARRAY_REF, integer_type_node,
6240 		       arr, i, NULL_TREE, NULL_TREE);
6241 
6242   tree int_17 = build_int_cst (integer_type_node, 17);
6243   tree int_42 = build_int_cst (integer_type_node, 42);
6244   tree int_m3 = build_int_cst (integer_type_node, -3);
6245 
6246   region_model_manager mgr;
6247   region_model model (&mgr);
6248   /* "arr[0] = 17;".  */
6249   model.set_value (arr_0, int_17, NULL);
6250   /* "arr[1] = -3;".  */
6251   model.set_value (arr_1, int_m3, NULL);
6252 
6253   ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
6254   ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
6255 
6256   /* Overwrite a pre-existing binding: "arr[1] = 42;".  */
6257   model.set_value (arr_1, int_42, NULL);
6258   ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
6259 
6260   /* Verify get_offset for "arr[0]".  */
6261   {
6262     const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
6263     region_offset offset = arr_0_reg->get_offset ();
6264     ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
6265     ASSERT_EQ (offset.get_bit_offset (), 0);
6266   }
6267 
6268   /* Verify get_offset for "arr[1]".  */
6269   {
6270     const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
6271     region_offset offset = arr_1_reg->get_offset ();
6272     ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
6273     ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
6274   }
6275 
6276   /* "arr[i] = i;" - this should remove the earlier bindings.  */
6277   model.set_value (arr_i, i, NULL);
6278   ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
6279   ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
6280 
6281   /* "arr[0] = 17;" - this should remove the arr[i] binding.  */
6282   model.set_value (arr_0, int_17, NULL);
6283   ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
6284   ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
6285 }
6286 
6287 /* Smoketest of dereferencing a pointer via MEM_REF.  */
6288 
6289 static void
test_mem_ref()6290 test_mem_ref ()
6291 {
6292   /*
6293     x = 17;
6294     p = &x;
6295     *p;
6296    */
6297   tree x = build_global_decl ("x", integer_type_node);
6298   tree int_star = build_pointer_type (integer_type_node);
6299   tree p = build_global_decl ("p", int_star);
6300 
6301   tree int_17 = build_int_cst (integer_type_node, 17);
6302   tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
6303   tree offset_0 = build_int_cst (integer_type_node, 0);
6304   tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
6305 
6306   region_model_manager mgr;
6307   region_model model (&mgr);
6308 
6309   /* "x = 17;".  */
6310   model.set_value (x, int_17, NULL);
6311 
6312   /* "p = &x;".  */
6313   model.set_value (p, addr_of_x, NULL);
6314 
6315   const svalue *sval = model.get_rvalue (star_p, NULL);
6316   ASSERT_EQ (sval->maybe_get_constant (), int_17);
6317 }
6318 
6319 /* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
6320    Analogous to this code:
6321      void test_6 (int a[10])
6322      {
6323        __analyzer_eval (a[3] == 42); [should be UNKNOWN]
6324        a[3] = 42;
6325        __analyzer_eval (a[3] == 42); [should be TRUE]
6326      }
6327    from data-model-1.c, which looks like this at the gimple level:
6328        # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
6329        int *_1 = a_10(D) + 12;   # POINTER_PLUS_EXPR
6330        int _2 = *_1;             # MEM_REF
6331        _Bool _3 = _2 == 42;
6332        int _4 = (int) _3;
6333        __analyzer_eval (_4);
6334 
6335        # a[3] = 42;
6336        int *_5 = a_10(D) + 12;   # POINTER_PLUS_EXPR
6337        *_5 = 42;                 # MEM_REF
6338 
6339        # __analyzer_eval (a[3] == 42); [should be TRUE]
6340        int *_6 = a_10(D) + 12;   # POINTER_PLUS_EXPR
6341        int _7 = *_6;             # MEM_REF
6342        _Bool _8 = _7 == 42;
6343        int _9 = (int) _8;
6344        __analyzer_eval (_9);  */
6345 
6346 static void
test_POINTER_PLUS_EXPR_then_MEM_REF()6347 test_POINTER_PLUS_EXPR_then_MEM_REF ()
6348 {
6349   tree int_star = build_pointer_type (integer_type_node);
6350   tree a = build_global_decl ("a", int_star);
6351   tree offset_12 = build_int_cst (size_type_node, 12);
6352   tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
6353   tree offset_0 = build_int_cst (integer_type_node, 0);
6354   tree mem_ref = build2 (MEM_REF, integer_type_node,
6355 			 pointer_plus_expr, offset_0);
6356   region_model_manager mgr;
6357   region_model m (&mgr);
6358 
6359   tree int_42 = build_int_cst (integer_type_node, 42);
6360   m.set_value (mem_ref, int_42, NULL);
6361   ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
6362 }
6363 
6364 /* Verify that malloc works.  */
6365 
6366 static void
test_malloc()6367 test_malloc ()
6368 {
6369   tree int_star = build_pointer_type (integer_type_node);
6370   tree p = build_global_decl ("p", int_star);
6371   tree n = build_global_decl ("n", integer_type_node);
6372   tree n_times_4 = build2 (MULT_EXPR, size_type_node,
6373 			   n, build_int_cst (size_type_node, 4));
6374 
6375   region_model_manager mgr;
6376   test_region_model_context ctxt;
6377   region_model model (&mgr);
6378 
6379   /* "p = malloc (n * 4);".  */
6380   const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
6381   const region *reg = model.create_region_for_heap_alloc (size_sval, &ctxt);
6382   const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
6383   model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
6384   ASSERT_EQ (model.get_capacity (reg), size_sval);
6385 }
6386 
6387 /* Verify that alloca works.  */
6388 
6389 static void
test_alloca()6390 test_alloca ()
6391 {
6392   auto_vec <tree> param_types;
6393   tree fndecl = make_fndecl (integer_type_node,
6394 			     "test_fn",
6395 			     param_types);
6396   allocate_struct_function (fndecl, true);
6397 
6398 
6399   tree int_star = build_pointer_type (integer_type_node);
6400   tree p = build_global_decl ("p", int_star);
6401   tree n = build_global_decl ("n", integer_type_node);
6402   tree n_times_4 = build2 (MULT_EXPR, size_type_node,
6403 			   n, build_int_cst (size_type_node, 4));
6404 
6405   region_model_manager mgr;
6406   test_region_model_context ctxt;
6407   region_model model (&mgr);
6408 
6409   /* Push stack frame.  */
6410   const region *frame_reg
6411     = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
6412 			NULL, &ctxt);
6413   /* "p = alloca (n * 4);".  */
6414   const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
6415   const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
6416   ASSERT_EQ (reg->get_parent_region (), frame_reg);
6417   const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
6418   model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
6419   ASSERT_EQ (model.get_capacity (reg), size_sval);
6420 
6421   /* Verify that the pointers to the alloca region are replaced by
6422      poisoned values when the frame is popped.  */
6423   model.pop_frame (NULL, NULL, &ctxt);
6424   ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
6425 }
6426 
6427 /* Verify that svalue::involves_p works.  */
6428 
6429 static void
test_involves_p()6430 test_involves_p ()
6431 {
6432   region_model_manager mgr;
6433   tree int_star = build_pointer_type (integer_type_node);
6434   tree p = build_global_decl ("p", int_star);
6435   tree q = build_global_decl ("q", int_star);
6436 
6437   test_region_model_context ctxt;
6438   region_model model (&mgr);
6439   const svalue *p_init = model.get_rvalue (p, &ctxt);
6440   const svalue *q_init = model.get_rvalue (q, &ctxt);
6441 
6442   ASSERT_TRUE (p_init->involves_p (p_init));
6443   ASSERT_FALSE (p_init->involves_p (q_init));
6444 
6445   const region *star_p_reg = mgr.get_symbolic_region (p_init);
6446   const region *star_q_reg = mgr.get_symbolic_region (q_init);
6447 
6448   const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
6449   const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
6450 
6451   ASSERT_TRUE (init_star_p->involves_p (p_init));
6452   ASSERT_FALSE (p_init->involves_p (init_star_p));
6453   ASSERT_FALSE (init_star_p->involves_p (q_init));
6454   ASSERT_TRUE (init_star_q->involves_p (q_init));
6455   ASSERT_FALSE (init_star_q->involves_p (p_init));
6456 }
6457 
6458 /* Run all of the selftests within this file.  */
6459 
6460 void
analyzer_region_model_cc_tests()6461 analyzer_region_model_cc_tests ()
6462 {
6463   test_tree_cmp_on_constants ();
6464   test_dump ();
6465   test_struct ();
6466   test_array_1 ();
6467   test_get_representative_tree ();
6468   test_unique_constants ();
6469   test_unique_unknowns ();
6470   test_initial_svalue_folding ();
6471   test_unaryop_svalue_folding ();
6472   test_binop_svalue_folding ();
6473   test_sub_svalue_folding ();
6474   test_descendent_of_p ();
6475   test_bit_range_regions ();
6476   test_assignment ();
6477   test_compound_assignment ();
6478   test_stack_frames ();
6479   test_get_representative_path_var ();
6480   test_equality_1 ();
6481   test_canonicalization_2 ();
6482   test_canonicalization_3 ();
6483   test_canonicalization_4 ();
6484   test_state_merging ();
6485   test_constraint_merging ();
6486   test_widening_constraints ();
6487   test_iteration_1 ();
6488   test_malloc_constraints ();
6489   test_var ();
6490   test_array_2 ();
6491   test_mem_ref ();
6492   test_POINTER_PLUS_EXPR_then_MEM_REF ();
6493   test_malloc ();
6494   test_alloca ();
6495   test_involves_p ();
6496 }
6497 
6498 } // namespace selftest
6499 
6500 #endif /* CHECKING_P */
6501 
6502 } // namespace ana
6503 
6504 #endif /* #if ENABLE_ANALYZER */
6505