xref: /netbsd/external/gpl3/gcc/dist/gcc/analyzer/store.h (revision f0fbc68b)
1 /* Classes for modeling the state of memory.
2    Copyright (C) 2020-2022 Free Software Foundation, Inc.
3    Contributed by David Malcolm <dmalcolm@redhat.com>.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15 General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #ifndef GCC_ANALYZER_STORE_H
22 #define GCC_ANALYZER_STORE_H
23 
24 /* Implementation of the region-based ternary model described in:
25      "A Memory Model for Static Analysis of C Programs"
26       (Zhongxing Xu, Ted Kremenek, and Jian Zhang)
27      http://lcs.ios.ac.cn/~xuzb/canalyze/memmodel.pdf  */
28 
29 /* The store models memory as a collection of "clusters", where regions
30    are partitioned into clusters via their base region.
31 
32    For example, given:
33      int a, b, c;
34      struct coord { double x; double y; } verts[3];
35    then "verts[0].y" and "verts[1].x" both have "verts" as their base region.
36    Each of a, b, c, and verts will have their own clusters, so that we
37    know that writes to e.g. "verts[1].x".don't affect e.g. "a".
38 
39    Within each cluster we store a map of bindings to values, where the
40    binding keys can be either concrete or symbolic.
41 
42    Concrete bindings affect a specific range of bits relative to the start
43    of the base region of the cluster, whereas symbolic bindings affect
44    a specific subregion within the cluster.
45 
46    Consider (from the symbolic-1.c testcase):
47 
48      char arr[1024];
49      arr[2] = a;  (1)
50      arr[3] = b;  (2)
51        After (1) and (2), the cluster for "arr" has concrete bindings
52        for bits 16-23 and for bits 24-31, with svalues "INIT_VAL(a)"
53        and "INIT_VAL(b)" respectively:
54        cluster: {bits 16-23: "INIT_VAL(a)",
55                  bits 24-31: "INIT_VAL(b)";
56                  flags: {}}
57        Attempting to query unbound subregions e.g. arr[4] will
58        return "UNINITIALIZED".
59        "a" and "b" are each in their own clusters, with no explicit
60        bindings, and thus implicitly have value INIT_VAL(a) and INIT_VAL(b).
61 
62      arr[3] = c;  (3)
63        After (3), the concrete binding for bits 24-31 is replaced with the
64        svalue "INIT_VAL(c)":
65        cluster: {bits 16-23: "INIT_VAL(a)",  (from before)
66                  bits 24-31: "INIT_VAL(c)";  (updated)
67                  flags: {}}
68 
69      arr[i] = d;  (4)
70        After (4), we lose the concrete bindings and replace them with a
71        symbolic binding for "arr[i]", with svalue "INIT_VAL(d)".  We also
72        mark the cluster as having been "symbolically touched": future
73        attempts to query the values of subregions other than "arr[i]",
74        such as "arr[3]" are "UNKNOWN", since we don't know if the write
75        to arr[i] affected them.
76        cluster: {symbolic_key(arr[i]): "INIT_VAL(d)";
77                  flags: {TOUCHED}}
78 
79      arr[j] = e;  (5)
80        After (5), we lose the symbolic binding for "arr[i]" since we could
81        have overwritten it, and add a symbolic binding for "arr[j]".
82        cluster: {symbolic_key(arr[j]): "INIT_VAL(d)"; (different symbolic
83                  flags: {TOUCHED}}                     binding)
84 
85      arr[3] = f;  (6)
86        After (6), we lose the symbolic binding for "arr[j]" since we could
87        have overwritten it, and gain a concrete binding for bits 24-31
88        again, this time with svalue "INIT_VAL(e)":
89        cluster: {bits 24-31: "INIT_VAL(d)";
90                  flags: {TOUCHED}}
91        The cluster is still flagged as touched, so that we know that
92        accesses to other elements are "UNKNOWN" rather than
93        "UNINITIALIZED".
94 
95    Handling symbolic regions requires us to handle aliasing.
96 
97    In the first example above, each of a, b, c and verts are non-symbolic
98    base regions and so their clusters are "concrete clusters", whereas given:
99        struct coord *p, *q;
100    then "*p" and "*q" are symbolic base regions, and thus "*p" and "*q"
101    have "symbolic clusters".
102 
103    In the above, "verts[i].x" will have a symbolic *binding* within a
104    concrete cluster for "verts", whereas "*p" is a symbolic *cluster*.
105 
106    Writes to concrete clusters can't affect other concrete clusters,
107    but can affect symbolic clusters; e.g. after:
108        verts[0].x = 42;
109    we bind 42 in the cluster for "verts", but the clusters for "b" and "c"
110    can't be affected.  Any symbolic clusters for *p and for *q can be
111    affected, *p and *q could alias verts.
112 
113    Writes to a symbolic cluster can affect other clusters, both
114    concrete and symbolic; e.g. after:
115        p->x = 17;
116    we bind 17 within the cluster for "*p".  The concrete clusters for a, b,
117    c, and verts could be affected, depending on whether *p aliases them.
118    Similarly, the symbolic cluster to *q could be affected.  */
119 
120 namespace ana {
121 
122 /* A class for keeping track of aspects of a program_state that we don't
123    know about, to avoid false positives about leaks.
124 
125    Consider:
126 
127       p->field = malloc (1024);
128       q->field = NULL;
129 
130    where we don't know whether or not p and q point to the same memory,
131    and:
132 
133       p->field = malloc (1024);
134       unknown_fn (p);
135 
136    In both cases, the svalue for the address of the allocated buffer
137    goes from being bound to p->field to not having anything explicitly bound
138    to it.
139 
140    Given that we conservatively discard bindings due to possible aliasing or
141    calls to unknown function, the store loses references to svalues,
142    but these svalues could still be live.  We don't want to warn about
143    them leaking - they're effectively in a "maybe live" state.
144 
145    This "maybe live" information is somewhat transient.
146 
147    We don't want to store this "maybe live" information in the program_state,
148    region_model, or store, since we don't want to bloat these objects (and
149    potentially bloat the exploded_graph with more nodes).
150    However, we can't store it in the region_model_context, as these context
151    objects sometimes don't last long enough to be around when comparing the
152    old vs the new state.
153 
154    This class is a way to track a set of such svalues, so that we can
155    temporarily capture that they are in a "maybe live" state whilst
156    comparing old and new states.  */
157 
158 class uncertainty_t
159 {
160 public:
161   typedef hash_set<const svalue *>::iterator iterator;
162 
on_maybe_bound_sval(const svalue * sval)163   void on_maybe_bound_sval (const svalue *sval)
164   {
165     m_maybe_bound_svals.add (sval);
166   }
on_mutable_sval_at_unknown_call(const svalue * sval)167   void on_mutable_sval_at_unknown_call (const svalue *sval)
168   {
169     m_mutable_at_unknown_call_svals.add (sval);
170   }
171 
unknown_sm_state_p(const svalue * sval)172   bool unknown_sm_state_p (const svalue *sval)
173   {
174     return (m_maybe_bound_svals.contains (sval)
175 	    || m_mutable_at_unknown_call_svals.contains (sval));
176   }
177 
178   void dump_to_pp (pretty_printer *pp, bool simple) const;
179   void dump (bool simple) const;
180 
begin_maybe_bound_svals()181   iterator begin_maybe_bound_svals () const
182   {
183     return m_maybe_bound_svals.begin ();
184   }
end_maybe_bound_svals()185   iterator end_maybe_bound_svals () const
186   {
187     return m_maybe_bound_svals.end ();
188   }
189 
190 private:
191 
192   /* svalues that might or might not still be bound.  */
193   hash_set<const svalue *> m_maybe_bound_svals;
194 
195   /* svalues that have mutable sm-state at unknown calls.  */
196   hash_set<const svalue *> m_mutable_at_unknown_call_svals;
197 };
198 
199 class byte_range;
200 class concrete_binding;
201 class symbolic_binding;
202 
203 /* Abstract base class for describing ranges of bits within a binding_map
204    that can have svalues bound to them.  */
205 
206 class binding_key
207 {
208 public:
~binding_key()209   virtual ~binding_key () {}
210   virtual bool concrete_p () const = 0;
symbolic_p()211   bool symbolic_p () const { return !concrete_p (); }
212 
213   static const binding_key *make (store_manager *mgr, const region *r);
214 
215   virtual void dump_to_pp (pretty_printer *pp, bool simple) const = 0;
216   void dump (bool simple) const;
217   label_text get_desc (bool simple=true) const;
218 
219   static int cmp_ptrs (const void *, const void *);
220   static int cmp (const binding_key *, const binding_key *);
221 
dyn_cast_concrete_binding()222   virtual const concrete_binding *dyn_cast_concrete_binding () const
223   { return NULL; }
dyn_cast_symbolic_binding()224   virtual const symbolic_binding *dyn_cast_symbolic_binding () const
225   { return NULL; }
226 };
227 
228 /* A concrete range of bits.  */
229 
230 struct bit_range
231 {
bit_rangebit_range232   bit_range (bit_offset_t start_bit_offset, bit_size_t size_in_bits)
233   : m_start_bit_offset (start_bit_offset),
234     m_size_in_bits (size_in_bits)
235   {}
236 
237   void dump_to_pp (pretty_printer *pp) const;
238   void dump () const;
239 
get_start_bit_offsetbit_range240   bit_offset_t get_start_bit_offset () const
241   {
242     return m_start_bit_offset;
243   }
get_next_bit_offsetbit_range244   bit_offset_t get_next_bit_offset () const
245   {
246     return m_start_bit_offset + m_size_in_bits;
247   }
get_last_bit_offsetbit_range248   bit_offset_t get_last_bit_offset () const
249   {
250     return get_next_bit_offset () - 1;
251   }
252 
contains_pbit_range253   bool contains_p (bit_offset_t offset) const
254   {
255     return (offset >= get_start_bit_offset ()
256 	    && offset < get_next_bit_offset ());
257   }
258 
259   bool contains_p (const bit_range &other, bit_range *out) const;
260 
261   bool operator== (const bit_range &other) const
262   {
263     return (m_start_bit_offset == other.m_start_bit_offset
264 	    && m_size_in_bits == other.m_size_in_bits);
265   }
266 
intersects_pbit_range267   bool intersects_p (const bit_range &other) const
268   {
269     return (get_start_bit_offset () < other.get_next_bit_offset ()
270 	    && other.get_start_bit_offset () < get_next_bit_offset ());
271   }
272   bool intersects_p (const bit_range &other,
273 		     bit_range *out_this,
274 		     bit_range *out_other) const;
275 
276   static int cmp (const bit_range &br1, const bit_range &br2);
277 
278   bit_range operator- (bit_offset_t offset) const;
279 
280   static bool from_mask (unsigned HOST_WIDE_INT mask, bit_range *out);
281 
282   bool as_byte_range (byte_range *out) const;
283 
284   bit_offset_t m_start_bit_offset;
285   bit_size_t m_size_in_bits;
286 };
287 
288 /* A concrete range of bytes.  */
289 
290 struct byte_range
291 {
byte_rangebyte_range292   byte_range (byte_offset_t start_byte_offset, byte_size_t size_in_bytes)
293   : m_start_byte_offset (start_byte_offset),
294     m_size_in_bytes (size_in_bytes)
295   {}
296 
297   void dump_to_pp (pretty_printer *pp) const;
298   void dump () const;
299 
contains_pbyte_range300   bool contains_p (byte_offset_t offset) const
301   {
302     return (offset >= get_start_byte_offset ()
303 	    && offset < get_next_byte_offset ());
304   }
305   bool contains_p (const byte_range &other, byte_range *out) const;
306 
307   bool operator== (const byte_range &other) const
308   {
309     return (m_start_byte_offset == other.m_start_byte_offset
310 	    && m_size_in_bytes == other.m_size_in_bytes);
311   }
312 
get_start_byte_offsetbyte_range313   byte_offset_t get_start_byte_offset () const
314   {
315     return m_start_byte_offset;
316   }
get_next_byte_offsetbyte_range317   byte_offset_t get_next_byte_offset () const
318   {
319     return m_start_byte_offset + m_size_in_bytes;
320   }
get_last_byte_offsetbyte_range321   byte_offset_t get_last_byte_offset () const
322   {
323     return m_start_byte_offset + m_size_in_bytes - 1;
324   }
325 
as_bit_rangebyte_range326   bit_range as_bit_range () const
327   {
328     return bit_range (m_start_byte_offset * BITS_PER_UNIT,
329 		      m_size_in_bytes * BITS_PER_UNIT);
330   }
331 
332   static int cmp (const byte_range &br1, const byte_range &br2);
333 
334   byte_offset_t m_start_byte_offset;
335   byte_size_t m_size_in_bytes;
336 };
337 
338 /* Concrete subclass of binding_key, for describing a concrete range of
339    bits within the binding_map (e.g. "bits 8-15").  */
340 
341 class concrete_binding : public binding_key
342 {
343 public:
344   /* This class is its own key for the purposes of consolidation.  */
345   typedef concrete_binding key_t;
346 
concrete_binding(bit_offset_t start_bit_offset,bit_size_t size_in_bits)347   concrete_binding (bit_offset_t start_bit_offset, bit_size_t size_in_bits)
348   : m_bit_range (start_bit_offset, size_in_bits)
349   {}
concrete_p()350   bool concrete_p () const FINAL OVERRIDE { return true; }
351 
hash()352   hashval_t hash () const
353   {
354     inchash::hash hstate;
355     hstate.add_wide_int (m_bit_range.m_start_bit_offset);
356     hstate.add_wide_int (m_bit_range.m_size_in_bits);
357     return hstate.end ();
358   }
359   bool operator== (const concrete_binding &other) const
360   {
361     return m_bit_range == other.m_bit_range;
362   }
363 
364   void dump_to_pp (pretty_printer *pp, bool simple) const FINAL OVERRIDE;
365 
dyn_cast_concrete_binding()366   const concrete_binding *dyn_cast_concrete_binding () const FINAL OVERRIDE
367   { return this; }
368 
get_bit_range()369   const bit_range &get_bit_range () const { return m_bit_range; }
370 
get_start_bit_offset()371   bit_offset_t get_start_bit_offset () const
372   {
373     return m_bit_range.m_start_bit_offset;
374   }
get_size_in_bits()375   bit_size_t get_size_in_bits () const
376   {
377     return m_bit_range.m_size_in_bits;
378   }
379   /* Return the next bit offset after the end of this binding.  */
get_next_bit_offset()380   bit_offset_t get_next_bit_offset () const
381   {
382     return m_bit_range.get_next_bit_offset ();
383   }
384 
385   bool overlaps_p (const concrete_binding &other) const;
386 
387   static int cmp_ptr_ptr (const void *, const void *);
388 
mark_deleted()389   void mark_deleted () { m_bit_range.m_start_bit_offset = -1; }
mark_empty()390   void mark_empty () { m_bit_range.m_start_bit_offset = -2; }
is_deleted()391   bool is_deleted () const { return m_bit_range.m_start_bit_offset == -1; }
is_empty()392   bool is_empty () const { return m_bit_range.m_start_bit_offset == -2; }
393 
394 private:
395   bit_range m_bit_range;
396 };
397 
398 } // namespace ana
399 
400 template <> struct default_hash_traits<ana::concrete_binding>
401 : public member_function_hash_traits<ana::concrete_binding>
402 {
403   static const bool empty_zero_p = false;
404 };
405 
406 namespace ana {
407 
408 /* Concrete subclass of binding_key, for describing a symbolic set of
409    bits within the binding_map in terms of a region (e.g. "arr[i]").  */
410 
411 class symbolic_binding : public binding_key
412 {
413 public:
414   /* This class is its own key for the purposes of consolidation.  */
415   typedef symbolic_binding key_t;
416 
417   symbolic_binding (const region *region) : m_region (region) {}
418   bool concrete_p () const FINAL OVERRIDE { return false; }
419 
420   hashval_t hash () const
421   {
422     return (intptr_t)m_region;
423   }
424   bool operator== (const symbolic_binding &other) const
425   {
426     return m_region == other.m_region;
427   }
428 
429   void dump_to_pp (pretty_printer *pp, bool simple) const FINAL OVERRIDE;
430 
431   const symbolic_binding *dyn_cast_symbolic_binding () const FINAL OVERRIDE
432   { return this; }
433 
434   const region *get_region () const { return m_region; }
435 
436   static int cmp_ptr_ptr (const void *, const void *);
437 
438   void mark_deleted () { m_region = reinterpret_cast<const region *> (1); }
439   void mark_empty () { m_region = NULL; }
440   bool is_deleted () const
441   { return m_region == reinterpret_cast<const region *> (1); }
442   bool is_empty () const { return m_region == NULL; }
443 
444 private:
445   const region *m_region;
446 };
447 
448 } // namespace ana
449 
450 template <> struct default_hash_traits<ana::symbolic_binding>
451 : public member_function_hash_traits<ana::symbolic_binding>
452 {
453   static const bool empty_zero_p = true;
454 };
455 
456 namespace ana {
457 
458 /* A mapping from binding_keys to svalues, for use by binding_cluster
459    and compound_svalue.  */
460 
461 class binding_map
462 {
463 public:
464   typedef hash_map <const binding_key *, const svalue *> map_t;
465   typedef map_t::iterator iterator_t;
466 
467   binding_map () : m_map () {}
468   binding_map (const binding_map &other);
469   binding_map& operator=(const binding_map &other);
470 
471   bool operator== (const binding_map &other) const;
472   bool operator!= (const binding_map &other) const
473   {
474     return !(*this == other);
475   }
476 
477   hashval_t hash () const;
478 
479   const svalue *get (const binding_key *key) const
480   {
481     const svalue **slot = const_cast<map_t &> (m_map).get (key);
482     if (slot)
483       return *slot;
484     else
485       return NULL;
486   }
487   bool put (const binding_key *k, const svalue *v)
488   {
489     gcc_assert (v);
490     return m_map.put (k, v);
491   }
492 
493   void remove (const binding_key *k) { m_map.remove (k); }
494   void empty () { m_map.empty (); }
495 
496   iterator_t begin () const { return m_map.begin (); }
497   iterator_t end () const { return m_map.end (); }
498   size_t elements () const { return m_map.elements (); }
499 
500   void dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const;
501   void dump (bool simple) const;
502 
503   json::object *to_json () const;
504 
505   bool apply_ctor_to_region (const region *parent_reg, tree ctor,
506 			     region_model_manager *mgr);
507 
508   static int cmp (const binding_map &map1, const binding_map &map2);
509 
510   void remove_overlapping_bindings (store_manager *mgr,
511 				    const binding_key *drop_key,
512 				    uncertainty_t *uncertainty,
513 				    bool always_overlap);
514 
515 private:
516   void get_overlapping_bindings (const binding_key *key,
517 				 auto_vec<const binding_key *> *out);
518   bool apply_ctor_val_to_range (const region *parent_reg,
519 				region_model_manager *mgr,
520 				tree min_index, tree max_index,
521 				tree val);
522   bool apply_ctor_pair_to_child_region (const region *parent_reg,
523 					region_model_manager *mgr,
524 					tree index, tree val);
525 
526   map_t m_map;
527 };
528 
529 /* Concept: BindingVisitor, for use by binding_cluster::for_each_binding
530    and store::for_each_binding.
531 
532    Should implement:
533      void on_binding (const binding_key *key, const svalue *&sval);
534 */
535 
536 /* All of the bindings within a store for regions that share the same
537    base region.  */
538 
539 class binding_cluster
540 {
541 public:
542   friend class store;
543 
544   typedef hash_map <const binding_key *, const svalue *> map_t;
545   typedef map_t::iterator iterator_t;
546 
547   binding_cluster (const region *base_region)
548   : m_base_region (base_region), m_map (),
549     m_escaped (false), m_touched (false) {}
550   binding_cluster (const binding_cluster &other);
551   binding_cluster& operator=(const binding_cluster &other);
552 
553   bool operator== (const binding_cluster &other) const;
554   bool operator!= (const binding_cluster &other) const
555   {
556     return !(*this == other);
557   }
558 
559   hashval_t hash () const;
560 
561   bool symbolic_p () const;
562 
563   const region *get_base_region () const { return m_base_region; }
564 
565   void dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const;
566   void dump (bool simple) const;
567 
568   void validate () const;
569 
570   json::object *to_json () const;
571 
572   void bind (store_manager *mgr, const region *, const svalue *);
573 
574   void clobber_region (store_manager *mgr, const region *reg);
575   void purge_region (store_manager *mgr, const region *reg);
576   void fill_region (store_manager *mgr, const region *reg, const svalue *sval);
577   void zero_fill_region (store_manager *mgr, const region *reg);
578   void mark_region_as_unknown (store_manager *mgr,
579 			       const region *reg_to_bind,
580 			       const region *reg_for_overlap,
581 			       uncertainty_t *uncertainty);
582   void purge_state_involving (const svalue *sval,
583 			      region_model_manager *sval_mgr);
584 
585   const svalue *get_binding (store_manager *mgr, const region *reg) const;
586   const svalue *get_binding_recursive (store_manager *mgr,
587 					const region *reg) const;
588   const svalue *get_any_binding (store_manager *mgr,
589 				  const region *reg) const;
590   const svalue *maybe_get_compound_binding (store_manager *mgr,
591 					     const region *reg) const;
592 
593   void remove_overlapping_bindings (store_manager *mgr, const region *reg,
594 				    uncertainty_t *uncertainty);
595 
596   template <typename T>
597   void for_each_value (void (*cb) (const svalue *sval, T user_data),
598 		       T user_data) const
599   {
600     for (map_t::iterator iter = m_map.begin (); iter != m_map.end (); ++iter)
601       cb ((*iter).second, user_data);
602   }
603 
604   static bool can_merge_p (const binding_cluster *cluster_a,
605 			   const binding_cluster *cluster_b,
606 			   binding_cluster *out_cluster,
607 			   store *out_store,
608 			   store_manager *mgr,
609 			   model_merger *merger);
610   void make_unknown_relative_to (const binding_cluster *other_cluster,
611 				 store *out_store,
612 				 store_manager *mgr);
613 
614   void mark_as_escaped ();
615   void on_unknown_fncall (const gcall *call, store_manager *mgr,
616 			  const conjured_purge &p);
617   void on_asm (const gasm *stmt, store_manager *mgr,
618 	       const conjured_purge &p);
619 
620   bool escaped_p () const { return m_escaped; }
621   bool touched_p () const { return m_touched; }
622 
623   bool redundant_p () const;
624   bool empty_p () const { return m_map.elements () == 0; }
625 
626   void get_representative_path_vars (const region_model *model,
627 				     svalue_set *visited,
628 				     const region *base_reg,
629 				     const svalue *sval,
630 				     auto_vec<path_var> *out_pvs) const;
631 
632   const svalue *maybe_get_simple_value (store_manager *mgr) const;
633 
634   template <typename BindingVisitor>
635   void for_each_binding (BindingVisitor &v) const
636   {
637     for (map_t::iterator iter = m_map.begin (); iter != m_map.end (); ++iter)
638       {
639 	const binding_key *key = (*iter).first;
640 	const svalue *&sval = (*iter).second;
641 	v.on_binding (key, sval);
642       }
643   }
644 
645   iterator_t begin () const { return m_map.begin (); }
646   iterator_t end () const { return m_map.end (); }
647 
648   const binding_map &get_map () const { return m_map; }
649 
650 private:
651   const svalue *get_any_value (const binding_key *key) const;
652   void bind_compound_sval (store_manager *mgr,
653 			   const region *reg,
654 			   const compound_svalue *compound_sval);
655   void bind_key (const binding_key *key, const svalue *sval);
656 
657   const region *m_base_region;
658 
659   binding_map m_map;
660 
661   /* Has a pointer to this cluster "escaped" into a part of the program
662      we don't know about (via a call to a function with an unknown body,
663      or by being passed in as a pointer param of a "top-level" function call).
664      Such regions could be overwritten when other such functions are called,
665      even if the region is no longer reachable by pointers that we are
666      tracking. */
667   bool m_escaped;
668 
669   /* Has this cluster been written to via a symbolic binding?
670      If so, then we don't know anything about unbound subregions,
671      so we can't use initial_svalue, treat them as uninitialized, or
672      inherit values from a parent region.  */
673   bool m_touched;
674 };
675 
676 /* The mapping from regions to svalues.
677    This is actually expressed by subdividing into clusters, to better
678    handle aliasing.  */
679 
680 class store
681 {
682 public:
683   typedef hash_map <const region *, binding_cluster *> cluster_map_t;
684 
685   store ();
686   store (const store &other);
687   ~store ();
688 
689   store &operator= (const store &other);
690 
691   bool operator== (const store &other) const;
692   bool operator!= (const store &other) const
693   {
694     return !(*this == other);
695   }
696 
697   hashval_t hash () const;
698 
699   void dump_to_pp (pretty_printer *pp, bool summarize, bool multiline,
700 		   store_manager *mgr) const;
701   void dump (bool simple) const;
702   void summarize_to_pp (pretty_printer *pp, bool simple) const;
703 
704   void validate () const;
705 
706   json::object *to_json () const;
707 
708   const svalue *get_any_binding (store_manager *mgr, const region *reg) const;
709 
710   bool called_unknown_fn_p () const { return m_called_unknown_fn; }
711 
712   void set_value (store_manager *mgr, const region *lhs_reg,
713 		  const svalue *rhs_sval,
714 		  uncertainty_t *uncertainty);
715   void clobber_region (store_manager *mgr, const region *reg);
716   void purge_region (store_manager *mgr, const region *reg);
717   void fill_region (store_manager *mgr, const region *reg, const svalue *sval);
718   void zero_fill_region (store_manager *mgr, const region *reg);
719   void mark_region_as_unknown (store_manager *mgr, const region *reg,
720 			       uncertainty_t *uncertainty);
721   void purge_state_involving (const svalue *sval,
722 			      region_model_manager *sval_mgr);
723 
724   const binding_cluster *get_cluster (const region *base_reg) const;
725   binding_cluster *get_cluster (const region *base_reg);
726   binding_cluster *get_or_create_cluster (const region *base_reg);
727   void purge_cluster (const region *base_reg);
728 
729   template <typename T>
730   void for_each_cluster (void (*cb) (const region *base_reg, T user_data),
731 			 T user_data) const
732   {
733     for (cluster_map_t::iterator iter = m_cluster_map.begin ();
734 	 iter != m_cluster_map.end (); ++iter)
735       cb ((*iter).first, user_data);
736   }
737 
738   static bool can_merge_p (const store *store_a, const store *store_b,
739 			   store *out_store, store_manager *mgr,
740 			   model_merger *merger);
741 
742   void mark_as_escaped (const region *base_reg);
743   void on_unknown_fncall (const gcall *call, store_manager *mgr,
744 			  const conjured_purge &p);
745   bool escaped_p (const region *reg) const;
746 
747   void get_representative_path_vars (const region_model *model,
748 				     svalue_set *visited,
749 				     const svalue *sval,
750 				     auto_vec<path_var> *out_pvs) const;
751 
752   cluster_map_t::iterator begin () const { return m_cluster_map.begin (); }
753   cluster_map_t::iterator end () const { return m_cluster_map.end (); }
754 
755   tristate eval_alias (const region *base_reg_a,
756 		       const region *base_reg_b) const;
757 
758   template <typename BindingVisitor>
759   void for_each_binding (BindingVisitor &v)
760   {
761     for (cluster_map_t::iterator iter = m_cluster_map.begin ();
762 	 iter != m_cluster_map.end (); ++iter)
763       (*iter).second->for_each_binding (v);
764   }
765 
766   void canonicalize (store_manager *mgr);
767   void loop_replay_fixup (const store *other_store,
768 			  region_model_manager *mgr);
769 
770 private:
771   void remove_overlapping_bindings (store_manager *mgr, const region *reg,
772 				    uncertainty_t *uncertainty);
773   tristate eval_alias_1 (const region *base_reg_a,
774 			 const region *base_reg_b) const;
775 
776   cluster_map_t m_cluster_map;
777 
778   /* If this is true, then unknown code has been called, and so
779      any global variable that isn't currently modelled by the store
780      has unknown state, rather than being in an "initial state".
781      This is to avoid having to mark (and thus explicitly track)
782      every global when an unknown function is called; instead, they
783      can be tracked implicitly.  */
784   bool m_called_unknown_fn;
785 };
786 
787 /* A class responsible for owning and consolidating binding keys
788    (both concrete and symbolic).
789    Key instances are immutable as far as clients are concerned, so they
790    are provided as "const" ptrs.  */
791 
792 class store_manager
793 {
794 public:
795   store_manager (region_model_manager *mgr) : m_mgr (mgr) {}
796 
797   logger *get_logger () const;
798 
799   /* binding consolidation.  */
800   const concrete_binding *
801   get_concrete_binding (bit_offset_t start_bit_offset,
802 			bit_offset_t size_in_bits);
803   const concrete_binding *
804   get_concrete_binding (const bit_range &bits)
805   {
806     return get_concrete_binding (bits.get_start_bit_offset (),
807 				 bits.m_size_in_bits);
808   }
809   const symbolic_binding *
810   get_symbolic_binding (const region *region);
811 
812   region_model_manager *get_svalue_manager () const
813   {
814     return m_mgr;
815   }
816 
817   void log_stats (logger *logger, bool show_objs) const;
818 
819 private:
820   region_model_manager *m_mgr;
821   consolidation_map<concrete_binding> m_concrete_binding_key_mgr;
822   consolidation_map<symbolic_binding> m_symbolic_binding_key_mgr;
823 };
824 
825 } // namespace ana
826 
827 #endif /* GCC_ANALYZER_STORE_H */
828