1 /* Consolidation of svalues and regions.
2    Copyright (C) 2020-2021 Free Software Foundation, Inc.
3    Contributed by David Malcolm <dmalcolm@redhat.com>.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15 General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "diagnostic-core.h"
26 #include "gimple-pretty-print.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "gimple-iterator.h"
31 #include "diagnostic-core.h"
32 #include "graphviz.h"
33 #include "options.h"
34 #include "cgraph.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "fold-const.h"
40 #include "tree-pretty-print.h"
41 #include "tristate.h"
42 #include "bitmap.h"
43 #include "selftest.h"
44 #include "function.h"
45 #include "json.h"
46 #include "analyzer/analyzer.h"
47 #include "analyzer/analyzer-logging.h"
48 #include "ordered-hash-map.h"
49 #include "options.h"
50 #include "cgraph.h"
51 #include "cfg.h"
52 #include "digraph.h"
53 #include "analyzer/supergraph.h"
54 #include "sbitmap.h"
55 #include "analyzer/call-string.h"
56 #include "analyzer/program-point.h"
57 #include "analyzer/store.h"
58 #include "analyzer/region-model.h"
59 #include "analyzer/constraint-manager.h"
60 
61 #if ENABLE_ANALYZER
62 
63 namespace ana {
64 
65 /* class region_model_manager.  */
66 
67 /* region_model_manager's ctor.  */
68 
region_model_manager()69 region_model_manager::region_model_manager ()
70 : m_next_region_id (0),
71   m_root_region (alloc_region_id ()),
72   m_stack_region (alloc_region_id (), &m_root_region),
73   m_heap_region (alloc_region_id (), &m_root_region),
74   m_unknown_NULL (NULL),
75   m_check_complexity (true),
76   m_max_complexity (0, 0),
77   m_code_region (alloc_region_id (), &m_root_region),
78   m_fndecls_map (), m_labels_map (),
79   m_globals_region (alloc_region_id (), &m_root_region),
80   m_globals_map (),
81   m_store_mgr (this),
82   m_range_mgr (new bounded_ranges_manager ())
83 {
84 }
85 
86 /* region_model_manager's dtor.  Delete all of the managed svalues
87    and regions.  */
88 
~region_model_manager()89 region_model_manager::~region_model_manager ()
90 {
91   /* Delete consolidated svalues.  */
92   for (constants_map_t::iterator iter = m_constants_map.begin ();
93        iter != m_constants_map.end (); ++iter)
94     delete (*iter).second;
95   for (unknowns_map_t::iterator iter = m_unknowns_map.begin ();
96        iter != m_unknowns_map.end (); ++iter)
97     delete (*iter).second;
98   delete m_unknown_NULL;
99   for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin ();
100        iter != m_setjmp_values_map.end (); ++iter)
101     delete (*iter).second;
102   for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin ();
103        iter != m_poisoned_values_map.end (); ++iter)
104     delete (*iter).second;
105   for (initial_values_map_t::iterator iter = m_initial_values_map.begin ();
106        iter != m_initial_values_map.end (); ++iter)
107     delete (*iter).second;
108   for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin ();
109        iter != m_pointer_values_map.end (); ++iter)
110     delete (*iter).second;
111   for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin ();
112        iter != m_unaryop_values_map.end (); ++iter)
113     delete (*iter).second;
114   for (binop_values_map_t::iterator iter = m_binop_values_map.begin ();
115        iter != m_binop_values_map.end (); ++iter)
116     delete (*iter).second;
117   for (sub_values_map_t::iterator iter = m_sub_values_map.begin ();
118        iter != m_sub_values_map.end (); ++iter)
119     delete (*iter).second;
120   for (unmergeable_values_map_t::iterator iter
121 	 = m_unmergeable_values_map.begin ();
122        iter != m_unmergeable_values_map.end (); ++iter)
123     delete (*iter).second;
124   for (widening_values_map_t::iterator iter = m_widening_values_map.begin ();
125        iter != m_widening_values_map.end (); ++iter)
126     delete (*iter).second;
127   for (compound_values_map_t::iterator iter = m_compound_values_map.begin ();
128        iter != m_compound_values_map.end (); ++iter)
129     delete (*iter).second;
130   for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin ();
131        iter != m_conjured_values_map.end (); ++iter)
132     delete (*iter).second;
133 
134   /* Delete consolidated regions.  */
135   for (fndecls_map_t::iterator iter = m_fndecls_map.begin ();
136        iter != m_fndecls_map.end (); ++iter)
137     delete (*iter).second;
138   for (labels_map_t::iterator iter = m_labels_map.begin ();
139        iter != m_labels_map.end (); ++iter)
140     delete (*iter).second;
141   for (globals_map_t::iterator iter = m_globals_map.begin ();
142        iter != m_globals_map.end (); ++iter)
143     delete (*iter).second;
144   for (string_map_t::iterator iter = m_string_map.begin ();
145        iter != m_string_map.end (); ++iter)
146     delete (*iter).second;
147 
148   delete m_range_mgr;
149 }
150 
151 /* Return true if C exceeds the complexity limit for svalues.  */
152 
153 bool
too_complex_p(const complexity & c) const154 region_model_manager::too_complex_p (const complexity &c) const
155 {
156   if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth)
157     return true;
158   return false;
159 }
160 
161 /* If SVAL exceeds the complexity limit for svalues, delete it
162    and return true.
163    Otherwise update m_max_complexity and return false.  */
164 
165 bool
reject_if_too_complex(svalue * sval)166 region_model_manager::reject_if_too_complex (svalue *sval)
167 {
168   if (!m_check_complexity)
169     return false;
170 
171   const complexity &c = sval->get_complexity ();
172   if (!too_complex_p (c))
173     {
174       if (m_max_complexity.m_num_nodes < c.m_num_nodes)
175 	m_max_complexity.m_num_nodes = c.m_num_nodes;
176       if (m_max_complexity.m_max_depth < c.m_max_depth)
177 	m_max_complexity.m_max_depth = c.m_max_depth;
178       return false;
179     }
180 
181   delete sval;
182   return true;
183 }
184 
185 /* Macro for imposing a complexity limit on svalues, for use within
186    region_model_manager member functions.
187 
188    If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
189    value of the same type.
190    Otherwise update m_max_complexity and carry on.  */
191 
192 #define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL)			\
193   do {								\
194     svalue *sval_ = (SVAL);					\
195     tree type_ = sval_->get_type ();				\
196     if (reject_if_too_complex (sval_))				\
197       return get_or_create_unknown_svalue (type_);		\
198   } while (0)
199 
200 /* svalue consolidation.  */
201 
202 /* Return the svalue * for a constant_svalue for CST_EXPR,
203    creating it if necessary.
204    The constant_svalue instances are reused, based on pointer equality
205    of trees  */
206 
207 const svalue *
get_or_create_constant_svalue(tree cst_expr)208 region_model_manager::get_or_create_constant_svalue (tree cst_expr)
209 {
210   gcc_assert (cst_expr);
211 
212   constant_svalue **slot = m_constants_map.get (cst_expr);
213   if (slot)
214     return *slot;
215   constant_svalue *cst_sval = new constant_svalue (cst_expr);
216   RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
217   m_constants_map.put (cst_expr, cst_sval);
218   return cst_sval;
219 }
220 
221 /* Return the svalue * for a constant_svalue for the INTEGER_CST
222    for VAL of type TYPE, creating it if necessary.  */
223 
224 const svalue *
get_or_create_int_cst(tree type,poly_int64 val)225 region_model_manager::get_or_create_int_cst (tree type, poly_int64 val)
226 {
227   gcc_assert (type);
228   tree tree_cst = build_int_cst (type, val);
229   return get_or_create_constant_svalue (tree_cst);
230 }
231 
232 /* Return the svalue * for a unknown_svalue for TYPE (which can be NULL),
233    creating it if necessary.
234    The unknown_svalue instances are reused, based on pointer equality
235    of the types  */
236 
237 const svalue *
get_or_create_unknown_svalue(tree type)238 region_model_manager::get_or_create_unknown_svalue (tree type)
239 {
240   /* Special-case NULL, so that the hash_map can use NULL as the
241      "empty" value.  */
242   if (type == NULL_TREE)
243     {
244       if (!m_unknown_NULL)
245 	m_unknown_NULL = new unknown_svalue (type);
246       return m_unknown_NULL;
247     }
248 
249   unknown_svalue **slot = m_unknowns_map.get (type);
250   if (slot)
251     return *slot;
252   unknown_svalue *sval = new unknown_svalue (type);
253   m_unknowns_map.put (type, sval);
254   return sval;
255 }
256 
257 /* Return the svalue * for the initial value of REG, creating it if
258    necessary.  */
259 
260 const svalue *
get_or_create_initial_value(const region * reg)261 region_model_manager::get_or_create_initial_value (const region *reg)
262 {
263   if (!reg->can_have_initial_svalue_p ())
264     return get_or_create_poisoned_svalue (POISON_KIND_UNINIT,
265 					  reg->get_type ());
266 
267   /* The initial value of a cast is a cast of the initial value.  */
268   if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
269     {
270       const region *original_reg = cast_reg->get_original_region ();
271       return get_or_create_cast (cast_reg->get_type (),
272 				 get_or_create_initial_value (original_reg));
273     }
274 
275   /* INIT_VAL (*UNKNOWN_PTR) -> UNKNOWN_VAL.  */
276   if (reg->symbolic_for_unknown_ptr_p ())
277     return get_or_create_unknown_svalue (reg->get_type ());
278 
279   if (initial_svalue **slot = m_initial_values_map.get (reg))
280     return *slot;
281   initial_svalue *initial_sval = new initial_svalue (reg->get_type (), reg);
282   RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval);
283   m_initial_values_map.put (reg, initial_sval);
284   return initial_sval;
285 }
286 
287 /* Return the svalue * for R using type TYPE, creating it if
288    necessary.  */
289 
290 const svalue *
get_or_create_setjmp_svalue(const setjmp_record & r,tree type)291 region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r,
292 						   tree type)
293 {
294   setjmp_svalue::key_t key (r, type);
295   if (setjmp_svalue **slot = m_setjmp_values_map.get (key))
296     return *slot;
297   setjmp_svalue *setjmp_sval = new setjmp_svalue (r, type);
298   RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval);
299   m_setjmp_values_map.put (key, setjmp_sval);
300   return setjmp_sval;
301 }
302 
303 /* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
304    necessary.  */
305 
306 const svalue *
get_or_create_poisoned_svalue(enum poison_kind kind,tree type)307 region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind,
308 						     tree type)
309 {
310   poisoned_svalue::key_t key (kind, type);
311   if (poisoned_svalue **slot = m_poisoned_values_map.get (key))
312     return *slot;
313   poisoned_svalue *poisoned_sval = new poisoned_svalue (kind, type);
314   RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval);
315   m_poisoned_values_map.put (key, poisoned_sval);
316   return poisoned_sval;
317 }
318 
319 /* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
320    creating it if necessary.  */
321 
322 const svalue *
get_ptr_svalue(tree ptr_type,const region * pointee)323 region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee)
324 {
325   /* If this is a symbolic region from dereferencing a pointer, and the types
326      match, then return the original pointer.  */
327   if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ())
328     if (ptr_type == sym_reg->get_pointer ()->get_type ())
329       return sym_reg->get_pointer ();
330 
331   region_svalue::key_t key (ptr_type, pointee);
332   if (region_svalue **slot = m_pointer_values_map.get (key))
333     return *slot;
334   region_svalue *sval = new region_svalue (ptr_type, pointee);
335   RETURN_UNKNOWN_IF_TOO_COMPLEX (sval);
336   m_pointer_values_map.put (key, sval);
337   return sval;
338 }
339 
340 /* Subroutine of region_model_manager::get_or_create_unaryop.
341    Attempt to fold the inputs and return a simpler svalue *.
342    Otherwise, return NULL.  */
343 
344 const svalue *
maybe_fold_unaryop(tree type,enum tree_code op,const svalue * arg)345 region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op,
346 					  const svalue *arg)
347 {
348   /* Ops on "unknown" are also unknown.  */
349   if (arg->get_kind () == SK_UNKNOWN)
350     return get_or_create_unknown_svalue (type);
351   /* Likewise for "poisoned".  */
352   else if (const poisoned_svalue *poisoned_sval
353 	     = arg->dyn_cast_poisoned_svalue ())
354     return get_or_create_poisoned_svalue (poisoned_sval->get_poison_kind (),
355 					  type);
356 
357   gcc_assert (arg->can_have_associated_state_p ());
358 
359   switch (op)
360     {
361     default: break;
362     case VIEW_CONVERT_EXPR:
363     case NOP_EXPR:
364       {
365 	/* Handle redundant casts.  */
366 	if (arg->get_type ()
367 	    && useless_type_conversion_p (arg->get_type (), type))
368 	  return arg;
369 
370 	/* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
371 	     => "cast<TYPE> (innermost_arg)",
372 	   unless INNER_TYPE is narrower than TYPE.  */
373 	if (const svalue *innermost_arg = arg->maybe_undo_cast ())
374 	  {
375 	    tree inner_type = arg->get_type ();
376 	    if (TYPE_SIZE (type)
377 		&& TYPE_SIZE (inner_type)
378 		&& (fold_binary (LE_EXPR, boolean_type_node,
379 				 TYPE_SIZE (type), TYPE_SIZE (inner_type))
380 		    == boolean_true_node))
381 	      return maybe_fold_unaryop (type, op, innermost_arg);
382 	  }
383 	/* Avoid creating symbolic regions for pointer casts by
384 	   simplifying (T*)(&REGION) to ((T*)&REGION).  */
385 	if (const region_svalue *region_sval = arg->dyn_cast_region_svalue ())
386 	  if (POINTER_TYPE_P (type)
387 	      && region_sval->get_type ()
388 	      && POINTER_TYPE_P (region_sval->get_type ()))
389 	    return get_ptr_svalue (type, region_sval->get_pointee ());
390       }
391       break;
392     case TRUTH_NOT_EXPR:
393       {
394 	/* Invert comparisons e.g. "!(x == y)" => "x != y".  */
395 	if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
396 	  if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison)
397 	    {
398 	      enum tree_code inv_op
399 		= invert_tree_comparison (binop->get_op (),
400 					  HONOR_NANS (binop->get_type ()));
401 	      if (inv_op != ERROR_MARK)
402 		return get_or_create_binop (binop->get_type (), inv_op,
403 					    binop->get_arg0 (),
404 					    binop->get_arg1 ());
405 	    }
406       }
407       break;
408     }
409 
410   /* Constants.  */
411   if (tree cst = arg->maybe_get_constant ())
412     if (tree result = fold_unary (op, type, cst))
413       return get_or_create_constant_svalue (result);
414 
415   return NULL;
416 }
417 
418 /* Return the svalue * for an unary operation OP on ARG with a result of
419    type TYPE, creating it if necessary.  */
420 
421 const svalue *
get_or_create_unaryop(tree type,enum tree_code op,const svalue * arg)422 region_model_manager::get_or_create_unaryop (tree type, enum tree_code op,
423 					     const svalue *arg)
424 {
425   if (const svalue *folded = maybe_fold_unaryop  (type, op, arg))
426     return folded;
427   unaryop_svalue::key_t key (type, op, arg);
428   if (unaryop_svalue **slot = m_unaryop_values_map.get (key))
429     return *slot;
430   unaryop_svalue *unaryop_sval = new unaryop_svalue (type, op, arg);
431   RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval);
432   m_unaryop_values_map.put (key, unaryop_sval);
433   return unaryop_sval;
434 }
435 
436 /* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
437    Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
438    of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
439    and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
440    on.  */
441 
442 static enum tree_code
get_code_for_cast(tree dst_type,tree src_type)443 get_code_for_cast (tree dst_type, tree src_type)
444 {
445   gcc_assert (dst_type);
446   if (!src_type)
447     return NOP_EXPR;
448 
449   if (TREE_CODE (src_type) == REAL_TYPE)
450     {
451       if (TREE_CODE (dst_type) == INTEGER_TYPE)
452 	return FIX_TRUNC_EXPR;
453       else
454 	return VIEW_CONVERT_EXPR;
455     }
456 
457   return NOP_EXPR;
458 }
459 
460 /* Return the svalue * for a cast of ARG to type TYPE, creating it
461    if necessary.  */
462 
463 const svalue *
get_or_create_cast(tree type,const svalue * arg)464 region_model_manager::get_or_create_cast (tree type, const svalue *arg)
465 {
466   gcc_assert (type);
467   enum tree_code op = get_code_for_cast (type, arg->get_type ());
468   return get_or_create_unaryop (type, op, arg);
469 }
470 
471 /* Subroutine of region_model_manager::maybe_fold_binop for handling
472    (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
473    optimize_bit_field_compare, where CST is from ARG1.
474 
475    Support masking out bits from a compound_svalue for comparing a bitfield
476    against a value, as generated by optimize_bit_field_compare for
477    BITFIELD == VALUE.
478 
479    If COMPOUND_SVAL has a value for the appropriate bits, return it,
480    shifted accordingly.
481    Otherwise return NULL.  */
482 
483 const svalue *
484 region_model_manager::
maybe_undo_optimize_bit_field_compare(tree type,const compound_svalue * compound_sval,tree cst,const svalue * arg1)485 maybe_undo_optimize_bit_field_compare (tree type,
486 				       const compound_svalue *compound_sval,
487 				       tree cst,
488 				       const svalue *arg1)
489 {
490   if (type != unsigned_char_type_node)
491     return NULL;
492 
493   const binding_map &map = compound_sval->get_map ();
494   unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
495   /* If "mask" is a contiguous range of set bits, see if the
496      compound_sval has a value for those bits.  */
497   bit_range bits (0, 0);
498   if (!bit_range::from_mask (mask, &bits))
499     return NULL;
500 
501   bit_range bound_bits (bits);
502   if (BYTES_BIG_ENDIAN)
503     bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
504 			    bits.m_size_in_bits);
505   const concrete_binding *conc
506     = get_store_manager ()->get_concrete_binding (bound_bits);
507   const svalue *sval = map.get (conc);
508   if (!sval)
509     return NULL;
510 
511   /* We have a value;
512      shift it by the correct number of bits.  */
513   const svalue *lhs = get_or_create_cast (type, sval);
514   HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
515   const svalue *shift_sval = get_or_create_int_cst (type, bit_offset);
516   const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
517 						    lhs, shift_sval);
518   /* Reapply the mask (needed for negative
519      signed bitfields).  */
520   return get_or_create_binop (type, BIT_AND_EXPR,
521 			      shifted_sval, arg1);
522 }
523 
524 /* Subroutine of region_model_manager::get_or_create_binop.
525    Attempt to fold the inputs and return a simpler svalue *.
526    Otherwise, return NULL.  */
527 
528 const svalue *
maybe_fold_binop(tree type,enum tree_code op,const svalue * arg0,const svalue * arg1)529 region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
530 					const svalue *arg0,
531 					const svalue *arg1)
532 {
533   tree cst0 = arg0->maybe_get_constant ();
534   tree cst1 = arg1->maybe_get_constant ();
535   /* (CST OP CST).  */
536   if (cst0 && cst1)
537     {
538       if (tree result = fold_binary (op, type, cst0, cst1))
539 	if (CONSTANT_CLASS_P (result))
540 	  return get_or_create_constant_svalue (result);
541     }
542 
543   if (FLOAT_TYPE_P (type)
544       || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ()))
545       || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ())))
546     return NULL;
547 
548   switch (op)
549     {
550     default:
551       break;
552     case POINTER_PLUS_EXPR:
553     case PLUS_EXPR:
554       /* (VAL + 0) -> VAL.  */
555       if (cst1 && zerop (cst1) && type == arg0->get_type ())
556 	return arg0;
557       break;
558     case MINUS_EXPR:
559       /* (VAL - 0) -> VAL.  */
560       if (cst1 && zerop (cst1) && type == arg0->get_type ())
561 	return arg0;
562       break;
563     case MULT_EXPR:
564       /* (VAL * 0).  */
565       if (cst1 && zerop (cst1) && INTEGRAL_TYPE_P (type))
566 	return get_or_create_constant_svalue (build_int_cst (type, 0));
567       /* (VAL * 1) -> VAL.  */
568       if (cst1 && integer_onep (cst1))
569 	return arg0;
570       break;
571     case BIT_AND_EXPR:
572       if (cst1)
573 	{
574 	  if (zerop (cst1) && INTEGRAL_TYPE_P (type))
575 	    /* "(ARG0 & 0)" -> "0".  */
576 	    return get_or_create_constant_svalue (build_int_cst (type, 0));
577 
578 	  if (const compound_svalue *compound_sval
579 		= arg0->dyn_cast_compound_svalue ())
580 	    if (const svalue *sval
581 		= maybe_undo_optimize_bit_field_compare (type,
582 							 compound_sval,
583 							 cst1, arg1))
584 	      return sval;
585 	}
586       break;
587     case TRUTH_ANDIF_EXPR:
588     case TRUTH_AND_EXPR:
589       if (cst1)
590 	{
591 	  if (zerop (cst1) && INTEGRAL_TYPE_P (type))
592 	    /* "(ARG0 && 0)" -> "0".  */
593 	    return get_or_create_constant_svalue (build_int_cst (type, 0));
594 	  else
595 	    /* "(ARG0 && nonzero-cst)" -> "ARG0".  */
596 	    return get_or_create_cast (type, arg0);
597 	}
598       break;
599     case TRUTH_ORIF_EXPR:
600     case TRUTH_OR_EXPR:
601       if (cst1)
602 	{
603 	  if (zerop (cst1))
604 	    /* "(ARG0 || 0)" -> "ARG0".  */
605 	    return get_or_create_cast (type, arg0);
606 	  else
607 	    /* "(ARG0 && nonzero-cst)" -> "nonzero-cst".  */
608 	    return get_or_create_cast (type, arg1);
609 	}
610       break;
611     }
612 
613   /* For associative ops, fold "(X op CST_A) op CST_B)" to
614      "X op (CST_A op CST_B)".  */
615   if (cst1 && associative_tree_code (op))
616     if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
617       if (binop->get_op () == op
618 	  && binop->get_arg1 ()->maybe_get_constant ()
619 	  && type == binop->get_type ()
620 	  && type == binop->get_arg0 ()->get_type ()
621 	  && type == binop->get_arg1 ()->get_type ())
622 	return get_or_create_binop
623 	  (type, op, binop->get_arg0 (),
624 	   get_or_create_binop (type, op,
625 				binop->get_arg1 (), arg1));
626 
627   /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
628      can fold:
629        "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
630      e.g. in data-model-1.c: test_4c.  */
631   if (cst1 && op == POINTER_PLUS_EXPR)
632     if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
633       if (binop->get_op () == POINTER_PLUS_EXPR)
634 	if (binop->get_arg1 ()->maybe_get_constant ())
635 	  return get_or_create_binop
636 	    (type, op, binop->get_arg0 (),
637 	     get_or_create_binop (size_type_node, op,
638 				  binop->get_arg1 (), arg1));
639 
640   /* etc.  */
641 
642   return NULL;
643 }
644 
645 /* Return the svalue * for an binary operation OP on ARG0 and ARG1
646    with a result of type TYPE, creating it if necessary.  */
647 
648 const svalue *
get_or_create_binop(tree type,enum tree_code op,const svalue * arg0,const svalue * arg1)649 region_model_manager::get_or_create_binop (tree type, enum tree_code op,
650 					   const svalue *arg0,
651 					   const svalue *arg1)
652 {
653   /* For commutative ops, put any constant on the RHS.  */
654   if (arg0->maybe_get_constant () && commutative_tree_code (op))
655     std::swap (arg0, arg1);
656 
657   if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1))
658     return folded;
659 
660   /* Ops on "unknown"/"poisoned" are unknown (unless we were able to fold
661      it via an identity in maybe_fold_binop).  */
662   if (!arg0->can_have_associated_state_p ()
663       || !arg1->can_have_associated_state_p ())
664     return get_or_create_unknown_svalue (type);
665 
666   binop_svalue::key_t key (type, op, arg0, arg1);
667   if (binop_svalue **slot = m_binop_values_map.get (key))
668     return *slot;
669   binop_svalue *binop_sval = new binop_svalue (type, op, arg0, arg1);
670   RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval);
671   m_binop_values_map.put (key, binop_sval);
672   return binop_sval;
673 }
674 
675 /* Subroutine of region_model_manager::get_or_create_sub_svalue.
676    Return a folded svalue, or NULL.  */
677 
678 const svalue *
maybe_fold_sub_svalue(tree type,const svalue * parent_svalue,const region * subregion)679 region_model_manager::maybe_fold_sub_svalue (tree type,
680 					     const svalue *parent_svalue,
681 					     const region *subregion)
682 {
683   /* Subvalues of "unknown"/"poisoned" are unknown.  */
684   if (!parent_svalue->can_have_associated_state_p ())
685     return get_or_create_unknown_svalue (type);
686 
687   /* If we have a subregion of a zero-fill, it's zero.  */
688   if (const unaryop_svalue *unary
689       = parent_svalue->dyn_cast_unaryop_svalue ())
690     {
691       if (unary->get_op () == NOP_EXPR
692 	  || unary->get_op () == VIEW_CONVERT_EXPR)
693 	if (tree cst = unary->get_arg ()->maybe_get_constant ())
694 	  if (zerop (cst))
695 	    {
696 	      const svalue *cst_sval
697 		= get_or_create_constant_svalue (cst);
698 	      return get_or_create_cast (type, cst_sval);
699 	    }
700     }
701 
702   /* Handle getting individual chars from a STRING_CST.  */
703   if (tree cst = parent_svalue->maybe_get_constant ())
704     if (TREE_CODE (cst) == STRING_CST)
705       if (const element_region *element_reg
706 	    = subregion->dyn_cast_element_region ())
707 	{
708 	  const svalue *idx_sval = element_reg->get_index ();
709 	  if (tree cst_idx = idx_sval->maybe_get_constant ())
710 	    if (const svalue *char_sval
711 		= maybe_get_char_from_string_cst (cst, cst_idx))
712 	      return get_or_create_cast (type, char_sval);
713 	}
714 
715   if (const initial_svalue *init_sval
716 	= parent_svalue->dyn_cast_initial_svalue ())
717     {
718       /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
719 	 i.e.
720 	 Subvalue(InitialValue(R1), FieldRegion(R2, F))
721 	 -> InitialValue(FieldRegion(R1, F)).  */
722       if (const field_region *field_reg = subregion->dyn_cast_field_region ())
723 	{
724 	  const region *field_reg_new
725 	    = get_field_region (init_sval->get_region (),
726 				field_reg->get_field ());
727 	  return get_or_create_initial_value (field_reg_new);
728 	}
729       /* SUB(INIT(r)[ELEMENT] -> INIT(e[ELEMENT])
730 	 i.e.
731 	 Subvalue(InitialValue(R1), ElementRegion(R2, IDX))
732 	 -> InitialValue(ElementRegion(R1, IDX)).  */
733       if (const element_region *element_reg = subregion->dyn_cast_element_region ())
734 	{
735 	  const region *element_reg_new
736 	    = get_element_region (init_sval->get_region (),
737 				  element_reg->get_type (),
738 				  element_reg->get_index ());
739 	  return get_or_create_initial_value (element_reg_new);
740 	}
741     }
742 
743   if (const repeated_svalue *repeated_sval
744 	= parent_svalue->dyn_cast_repeated_svalue ())
745     return get_or_create_cast (type, repeated_sval->get_inner_svalue ());
746 
747   return NULL;
748 }
749 
750 /* Return the svalue * for extracting a subvalue of type TYPE from
751    PARENT_SVALUE based on SUBREGION, creating it if necessary.  */
752 
753 const svalue *
get_or_create_sub_svalue(tree type,const svalue * parent_svalue,const region * subregion)754 region_model_manager::get_or_create_sub_svalue (tree type,
755 						const svalue *parent_svalue,
756 						const region *subregion)
757 {
758   if (const svalue *folded
759 	= maybe_fold_sub_svalue (type, parent_svalue, subregion))
760     return folded;
761 
762   sub_svalue::key_t key (type, parent_svalue, subregion);
763   if (sub_svalue **slot = m_sub_values_map.get (key))
764     return *slot;
765   sub_svalue *sub_sval
766     = new sub_svalue (type, parent_svalue, subregion);
767   RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval);
768   m_sub_values_map.put (key, sub_sval);
769   return sub_sval;
770 }
771 
772 /* Subroutine of region_model_manager::get_or_create_repeated_svalue.
773    Return a folded svalue, or NULL.  */
774 
775 const svalue *
maybe_fold_repeated_svalue(tree type,const svalue * outer_size,const svalue * inner_svalue)776 region_model_manager::maybe_fold_repeated_svalue (tree type,
777 						  const svalue *outer_size,
778 						  const svalue *inner_svalue)
779 {
780   /* Repeated "unknown"/"poisoned" is unknown.  */
781   if (!outer_size->can_have_associated_state_p ()
782       || !inner_svalue->can_have_associated_state_p ())
783     return get_or_create_unknown_svalue (type);
784 
785   /* If INNER_SVALUE is the same size as OUTER_SIZE,
786      turn into simply a cast.  */
787   if (tree cst_outer_num_bytes = outer_size->maybe_get_constant ())
788     {
789       HOST_WIDE_INT num_bytes_inner_svalue
790 	= int_size_in_bytes (inner_svalue->get_type ());
791       if (num_bytes_inner_svalue != -1)
792 	if (num_bytes_inner_svalue
793 	    == (HOST_WIDE_INT)tree_to_uhwi (cst_outer_num_bytes))
794 	  {
795 	    if (type)
796 	      return get_or_create_cast (type, inner_svalue);
797 	    else
798 	      return inner_svalue;
799 	  }
800     }
801 
802   /* Handle zero-fill of a specific type.  */
803   if (tree cst = inner_svalue->maybe_get_constant ())
804     if (zerop (cst) && type)
805       return get_or_create_cast (type, inner_svalue);
806 
807   return NULL;
808 }
809 
810 /* Return the svalue * of type TYPE in which INNER_SVALUE is repeated
811    enough times to be of size OUTER_SIZE, creating it if necessary.
812    e.g. for filling buffers with a constant value.  */
813 
814 const svalue *
get_or_create_repeated_svalue(tree type,const svalue * outer_size,const svalue * inner_svalue)815 region_model_manager::get_or_create_repeated_svalue (tree type,
816 						     const svalue *outer_size,
817 						     const svalue *inner_svalue)
818 {
819   if (const svalue *folded
820 	= maybe_fold_repeated_svalue (type, outer_size, inner_svalue))
821     return folded;
822 
823   repeated_svalue::key_t key (type, outer_size, inner_svalue);
824   if (repeated_svalue **slot = m_repeated_values_map.get (key))
825     return *slot;
826   repeated_svalue *repeated_sval
827     = new repeated_svalue (type, outer_size, inner_svalue);
828   RETURN_UNKNOWN_IF_TOO_COMPLEX (repeated_sval);
829   m_repeated_values_map.put (key, repeated_sval);
830   return repeated_sval;
831 }
832 
833 /* Attempt to get the bit_range for FIELD within a RECORD_TYPE.
834    Return true and write the result to OUT if successful.
835    Return false otherwise.  */
836 
837 static bool
get_bit_range_for_field(tree field,bit_range * out)838 get_bit_range_for_field (tree field, bit_range *out)
839 {
840   bit_size_t bit_size;
841   if (!int_size_in_bits (TREE_TYPE (field), &bit_size))
842     return false;
843   int field_bit_offset = int_bit_position (field);
844   *out = bit_range (field_bit_offset, bit_size);
845   return true;
846 }
847 
848 /* Attempt to get the byte_range for FIELD within a RECORD_TYPE.
849    Return true and write the result to OUT if successful.
850    Return false otherwise.  */
851 
852 static bool
get_byte_range_for_field(tree field,byte_range * out)853 get_byte_range_for_field (tree field, byte_range *out)
854 {
855   bit_range field_bits (0, 0);
856   if (!get_bit_range_for_field (field, &field_bits))
857     return false;
858   return field_bits.as_byte_range (out);
859 }
860 
861 /* Attempt to determine if there is a specific field within RECORD_TYPE
862    at BYTES.  If so, return it, and write the location of BYTES relative
863    to the field to *OUT_RANGE_WITHIN_FIELD.
864    Otherwise, return NULL_TREE.
865    For example, given:
866      struct foo { uint32 a; uint32; b};
867    and
868      bytes = {bytes 6-7} (of foo)
869    we have bytes 3-4 of field b.  */
870 
871 static tree
get_field_at_byte_range(tree record_type,const byte_range & bytes,byte_range * out_range_within_field)872 get_field_at_byte_range (tree record_type, const byte_range &bytes,
873 			 byte_range *out_range_within_field)
874 {
875   bit_offset_t bit_offset = bytes.m_start_byte_offset * BITS_PER_UNIT;
876 
877   tree field = get_field_at_bit_offset (record_type, bit_offset);
878   if (!field)
879     return NULL_TREE;
880 
881   byte_range field_bytes (0,0);
882   if (!get_byte_range_for_field (field, &field_bytes))
883     return NULL_TREE;
884 
885   /* Is BYTES fully within field_bytes?  */
886   byte_range bytes_within_field (0,0);
887   if (!field_bytes.contains_p (bytes, &bytes_within_field))
888     return NULL_TREE;
889 
890   *out_range_within_field = bytes_within_field;
891   return field;
892 }
893 
894 /* Subroutine of region_model_manager::get_or_create_bits_within.
895    Return a folded svalue, or NULL.  */
896 
897 const svalue *
maybe_fold_bits_within_svalue(tree type,const bit_range & bits,const svalue * inner_svalue)898 region_model_manager::maybe_fold_bits_within_svalue (tree type,
899 						     const bit_range &bits,
900 						     const svalue *inner_svalue)
901 {
902   tree inner_type = inner_svalue->get_type ();
903   /* Fold:
904        BITS_WITHIN ((0, sizeof (VAL), VAL))
905      to:
906        CAST(TYPE, VAL).  */
907   if (bits.m_start_bit_offset == 0 && inner_type)
908     {
909       bit_size_t inner_type_size;
910       if (int_size_in_bits (inner_type, &inner_type_size))
911 	if (inner_type_size == bits.m_size_in_bits)
912 	  {
913 	    if (type)
914 	      return get_or_create_cast (type, inner_svalue);
915 	    else
916 	      return inner_svalue;
917 	  }
918     }
919 
920   /* Kind-specific folding.  */
921   if (const svalue *sval
922       = inner_svalue->maybe_fold_bits_within (type, bits, this))
923     return sval;
924 
925   byte_range bytes (0,0);
926   if (bits.as_byte_range (&bytes) && inner_type)
927     switch (TREE_CODE (inner_type))
928       {
929       default:
930 	break;
931       case ARRAY_TYPE:
932 	{
933 	  /* Fold:
934 	       BITS_WITHIN (range, KIND(REG))
935 	     to:
936 	       BITS_WITHIN (range - offsetof(ELEMENT), KIND(REG.ELEMENT))
937 	     if range1 is a byte-range fully within one ELEMENT.  */
938 	  tree element_type = TREE_TYPE (inner_type);
939 	  HOST_WIDE_INT element_byte_size
940 	    = int_size_in_bytes (element_type);
941 	  if (element_byte_size > 0)
942 	    {
943 	      HOST_WIDE_INT start_idx
944 		= (bytes.get_start_byte_offset ().to_shwi ()
945 		   / element_byte_size);
946 	      HOST_WIDE_INT last_idx
947 		= (bytes.get_last_byte_offset ().to_shwi ()
948 		   / element_byte_size);
949 	      if (start_idx == last_idx)
950 		{
951 		  if (const initial_svalue *initial_sval
952 		      = inner_svalue->dyn_cast_initial_svalue ())
953 		    {
954 		      bit_offset_t start_of_element
955 			= start_idx * element_byte_size * BITS_PER_UNIT;
956 		      bit_range bits_within_element
957 			(bits.m_start_bit_offset - start_of_element,
958 			 bits.m_size_in_bits);
959 		      const svalue *idx_sval
960 			= get_or_create_int_cst (integer_type_node, start_idx);
961 		      const region *element_reg =
962 			get_element_region (initial_sval->get_region (),
963 					    element_type, idx_sval);
964 		      const svalue *element_reg_sval
965 			= get_or_create_initial_value (element_reg);
966 		      return get_or_create_bits_within (type,
967 							bits_within_element,
968 							element_reg_sval);
969 		    }
970 		}
971 	    }
972 	}
973 	break;
974       case RECORD_TYPE:
975 	{
976 	  /* Fold:
977 	       BYTES_WITHIN (range, KIND(REG))
978 	     to:
979 	       BYTES_WITHIN (range - offsetof(FIELD), KIND(REG.FIELD))
980 	     if range1 is fully within FIELD.  */
981 	  byte_range bytes_within_field (0, 0);
982 	  if (tree field = get_field_at_byte_range (inner_type, bytes,
983 						    &bytes_within_field))
984 	    {
985 	      if (const initial_svalue *initial_sval
986 		  = inner_svalue->dyn_cast_initial_svalue ())
987 		{
988 		  const region *field_reg =
989 		    get_field_region (initial_sval->get_region (), field);
990 		  const svalue *initial_reg_sval
991 		    = get_or_create_initial_value (field_reg);
992 		  return get_or_create_bits_within
993 		    (type,
994 		     bytes_within_field.as_bit_range (),
995 		     initial_reg_sval);
996 		}
997 	    }
998 	}
999 	break;
1000       }
1001   return NULL;
1002 }
1003 
1004 /* Return the svalue * of type TYPE for extracting BITS from INNER_SVALUE,
1005    creating it if necessary.  */
1006 
1007 const svalue *
get_or_create_bits_within(tree type,const bit_range & bits,const svalue * inner_svalue)1008 region_model_manager::get_or_create_bits_within (tree type,
1009 						 const bit_range &bits,
1010 						 const svalue *inner_svalue)
1011 {
1012   if (const svalue *folded
1013 	= maybe_fold_bits_within_svalue (type, bits, inner_svalue))
1014     return folded;
1015 
1016   bits_within_svalue::key_t key (type, bits, inner_svalue);
1017   if (bits_within_svalue **slot = m_bits_within_values_map.get (key))
1018     return *slot;
1019   bits_within_svalue *bits_within_sval
1020     = new bits_within_svalue (type, bits, inner_svalue);
1021   RETURN_UNKNOWN_IF_TOO_COMPLEX (bits_within_sval);
1022   m_bits_within_values_map.put (key, bits_within_sval);
1023   return bits_within_sval;
1024 }
1025 
1026 /* Return the svalue * that decorates ARG as being unmergeable,
1027    creating it if necessary.  */
1028 
1029 const svalue *
get_or_create_unmergeable(const svalue * arg)1030 region_model_manager::get_or_create_unmergeable (const svalue *arg)
1031 {
1032   if (arg->get_kind () == SK_UNMERGEABLE)
1033     return arg;
1034 
1035   if (unmergeable_svalue **slot = m_unmergeable_values_map.get (arg))
1036     return *slot;
1037   unmergeable_svalue *unmergeable_sval = new unmergeable_svalue (arg);
1038   RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval);
1039   m_unmergeable_values_map.put (arg, unmergeable_sval);
1040   return unmergeable_sval;
1041 }
1042 
1043 /* Return the svalue * of type TYPE for the merger of value BASE_SVAL
1044    and ITER_SVAL at POINT, creating it if necessary.  */
1045 
1046 const svalue *
get_or_create_widening_svalue(tree type,const program_point & point,const svalue * base_sval,const svalue * iter_sval)1047 region_model_manager::get_or_create_widening_svalue (tree type,
1048 						     const program_point &point,
1049 						     const svalue *base_sval,
1050 						     const svalue *iter_sval)
1051 {
1052   gcc_assert (base_sval->get_kind () != SK_WIDENING);
1053   gcc_assert (iter_sval->get_kind () != SK_WIDENING);
1054   widening_svalue::key_t key (type, point, base_sval, iter_sval);
1055   if (widening_svalue **slot = m_widening_values_map.get (key))
1056     return *slot;
1057   widening_svalue *widening_sval
1058     = new widening_svalue (type, point, base_sval, iter_sval);
1059   RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval);
1060   m_widening_values_map.put (key, widening_sval);
1061   return widening_sval;
1062 }
1063 
1064 /* Return the svalue * of type TYPE for the compound values in MAP,
1065    creating it if necessary.  */
1066 
1067 const svalue *
get_or_create_compound_svalue(tree type,const binding_map & map)1068 region_model_manager::get_or_create_compound_svalue (tree type,
1069 						     const binding_map &map)
1070 {
1071   compound_svalue::key_t tmp_key (type, &map);
1072   if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
1073     return *slot;
1074   compound_svalue *compound_sval
1075     = new compound_svalue (type, map);
1076   RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
1077   /* Use make_key rather than reusing the key, so that we use a
1078      ptr to compound_sval's binding_map, rather than the MAP param.  */
1079   m_compound_values_map.put (compound_sval->make_key (), compound_sval);
1080   return compound_sval;
1081 }
1082 
1083 /* Return the svalue * of type TYPE for the value conjured for ID_REG
1084    at STMT, creating it if necessary.  */
1085 
1086 const svalue *
get_or_create_conjured_svalue(tree type,const gimple * stmt,const region * id_reg)1087 region_model_manager::get_or_create_conjured_svalue (tree type,
1088 						     const gimple *stmt,
1089 						     const region *id_reg)
1090 {
1091   conjured_svalue::key_t key (type, stmt, id_reg);
1092   if (conjured_svalue **slot = m_conjured_values_map.get (key))
1093     return *slot;
1094   conjured_svalue *conjured_sval
1095     = new conjured_svalue (type, stmt, id_reg);
1096   RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval);
1097   m_conjured_values_map.put (key, conjured_sval);
1098   return conjured_sval;
1099 }
1100 
1101 /* Subroutine of region_model_manager::get_or_create_asm_output_svalue.
1102    Return a folded svalue, or NULL.  */
1103 
1104 const svalue *
1105 region_model_manager::
maybe_fold_asm_output_svalue(tree type,const vec<const svalue * > & inputs)1106 maybe_fold_asm_output_svalue (tree type,
1107 			      const vec<const svalue *> &inputs)
1108 {
1109   /* Unknown inputs should lead to unknown results.  */
1110   for (const auto &iter : inputs)
1111     if (iter->get_kind () == SK_UNKNOWN)
1112       return get_or_create_unknown_svalue (type);
1113 
1114   return NULL;
1115 }
1116 
1117 /* Return the svalue * of type TYPE for OUTPUT_IDX of the deterministic
1118    asm stmt ASM_STMT, given INPUTS as inputs.  */
1119 
1120 const svalue *
1121 region_model_manager::
get_or_create_asm_output_svalue(tree type,const gasm * asm_stmt,unsigned output_idx,const vec<const svalue * > & inputs)1122 get_or_create_asm_output_svalue (tree type,
1123 				 const gasm *asm_stmt,
1124 				 unsigned output_idx,
1125 				 const vec<const svalue *> &inputs)
1126 {
1127   gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1128 
1129   if (const svalue *folded
1130 	= maybe_fold_asm_output_svalue (type, inputs))
1131     return folded;
1132 
1133   const char *asm_string = gimple_asm_string (asm_stmt);
1134   const unsigned noutputs = gimple_asm_noutputs (asm_stmt);
1135 
1136   asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1137   if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1138     return *slot;
1139   asm_output_svalue *asm_output_sval
1140     = new asm_output_svalue (type, asm_string, output_idx, noutputs, inputs);
1141   RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1142   m_asm_output_values_map.put (key, asm_output_sval);
1143   return asm_output_sval;
1144 }
1145 
1146 /* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
1147    attempt to get the character at that offset, returning either
1148    the svalue for the character constant, or NULL if unsuccessful.  */
1149 
1150 const svalue *
maybe_get_char_from_string_cst(tree string_cst,tree byte_offset_cst)1151 region_model_manager::maybe_get_char_from_string_cst (tree string_cst,
1152 						      tree byte_offset_cst)
1153 {
1154   gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1155 
1156   /* Adapted from fold_read_from_constant_string.  */
1157   scalar_int_mode char_mode;
1158   if (TREE_CODE (byte_offset_cst) == INTEGER_CST
1159       && compare_tree_int (byte_offset_cst,
1160 			   TREE_STRING_LENGTH (string_cst)) < 0
1161       && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))),
1162 		      &char_mode)
1163       && GET_MODE_SIZE (char_mode) == 1)
1164     {
1165       tree char_cst
1166 	= build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)),
1167 			      (TREE_STRING_POINTER (string_cst)
1168 			       [TREE_INT_CST_LOW (byte_offset_cst)]));
1169       return get_or_create_constant_svalue (char_cst);
1170     }
1171   return NULL;
1172 }
1173 
1174 /* region consolidation.  */
1175 
1176 /* Return the region for FNDECL, creating it if necessary.  */
1177 
1178 const function_region *
get_region_for_fndecl(tree fndecl)1179 region_model_manager::get_region_for_fndecl (tree fndecl)
1180 {
1181   gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
1182 
1183   function_region **slot = m_fndecls_map.get (fndecl);
1184   if (slot)
1185     return *slot;
1186   function_region *reg
1187     = new function_region (alloc_region_id (), &m_code_region, fndecl);
1188   m_fndecls_map.put (fndecl, reg);
1189   return reg;
1190 }
1191 
1192 /* Return the region for LABEL, creating it if necessary.  */
1193 
1194 const label_region *
get_region_for_label(tree label)1195 region_model_manager::get_region_for_label (tree label)
1196 {
1197   gcc_assert (TREE_CODE (label) == LABEL_DECL);
1198 
1199   label_region **slot = m_labels_map.get (label);
1200   if (slot)
1201     return *slot;
1202 
1203   tree fndecl = DECL_CONTEXT (label);
1204   gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
1205 
1206   const function_region *func_reg = get_region_for_fndecl (fndecl);
1207   label_region *reg
1208     = new label_region (alloc_region_id (), func_reg, label);
1209   m_labels_map.put (label, reg);
1210   return reg;
1211 }
1212 
1213 /* Return the region for EXPR, creating it if necessary.  */
1214 
1215 const decl_region *
get_region_for_global(tree expr)1216 region_model_manager::get_region_for_global (tree expr)
1217 {
1218   gcc_assert (TREE_CODE (expr) == VAR_DECL);
1219 
1220   decl_region **slot = m_globals_map.get (expr);
1221   if (slot)
1222     return *slot;
1223   decl_region *reg
1224     = new decl_region (alloc_region_id (), &m_globals_region, expr);
1225   m_globals_map.put (expr, reg);
1226   return reg;
1227 }
1228 
1229 /* Return the region that describes accessing field FIELD of PARENT,
1230    creating it if necessary.  */
1231 
1232 const region *
get_field_region(const region * parent,tree field)1233 region_model_manager::get_field_region (const region *parent, tree field)
1234 {
1235   gcc_assert (TREE_CODE (field) == FIELD_DECL);
1236 
1237   /* (*UNKNOWN_PTR).field is (*UNKNOWN_PTR_OF_&FIELD_TYPE).  */
1238   if (parent->symbolic_for_unknown_ptr_p ())
1239     {
1240       tree ptr_to_field_type = build_pointer_type (TREE_TYPE (field));
1241       const svalue *unknown_ptr_to_field
1242 	= get_or_create_unknown_svalue (ptr_to_field_type);
1243       return get_symbolic_region (unknown_ptr_to_field);
1244     }
1245 
1246   field_region::key_t key (parent, field);
1247   if (field_region *reg = m_field_regions.get (key))
1248     return reg;
1249 
1250   field_region *field_reg
1251     = new field_region (alloc_region_id (), parent, field);
1252   m_field_regions.put (key, field_reg);
1253   return field_reg;
1254 }
1255 
1256 /* Return the region that describes accessing the element of type
1257    ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary.  */
1258 
1259 const region *
get_element_region(const region * parent,tree element_type,const svalue * index)1260 region_model_manager::get_element_region (const region *parent,
1261 					  tree element_type,
1262 					  const svalue *index)
1263 {
1264   element_region::key_t key (parent, element_type, index);
1265   if (element_region *reg = m_element_regions.get (key))
1266     return reg;
1267 
1268   element_region *element_reg
1269     = new element_region (alloc_region_id (), parent, element_type, index);
1270   m_element_regions.put (key, element_reg);
1271   return element_reg;
1272 }
1273 
1274 /* Return the region that describes accessing the subregion of type
1275    ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
1276    necessary.  */
1277 
1278 const region *
get_offset_region(const region * parent,tree type,const svalue * byte_offset)1279 region_model_manager::get_offset_region (const region *parent,
1280 					 tree type,
1281 					 const svalue *byte_offset)
1282 {
1283   /* If BYTE_OFFSET is zero, return PARENT.  */
1284   if (tree cst_offset = byte_offset->maybe_get_constant ())
1285     if (zerop (cst_offset))
1286       return get_cast_region (parent, type);
1287 
1288   /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
1289      to   OFFSET_REGION(REG, (X + Y)).  */
1290   if (const offset_region *parent_offset_reg
1291 	= parent->dyn_cast_offset_region ())
1292     {
1293       const svalue *sval_x = parent_offset_reg->get_byte_offset ();
1294       const svalue *sval_sum
1295 	= get_or_create_binop (byte_offset->get_type (),
1296 			       PLUS_EXPR, sval_x, byte_offset);
1297       return get_offset_region (parent->get_parent_region (), type, sval_sum);
1298     }
1299 
1300   offset_region::key_t key (parent, type, byte_offset);
1301   if (offset_region *reg = m_offset_regions.get (key))
1302     return reg;
1303 
1304   offset_region *offset_reg
1305     = new offset_region (alloc_region_id (), parent, type, byte_offset);
1306   m_offset_regions.put (key, offset_reg);
1307   return offset_reg;
1308 }
1309 
1310 /* Return the region that describes accessing the subregion of type
1311    TYPE of size BYTE_SIZE_SVAL within PARENT, creating it if necessary.  */
1312 
1313 const region *
get_sized_region(const region * parent,tree type,const svalue * byte_size_sval)1314 region_model_manager::get_sized_region (const region *parent,
1315 					tree type,
1316 					const svalue *byte_size_sval)
1317 {
1318   if (byte_size_sval->get_type () != size_type_node)
1319     byte_size_sval = get_or_create_cast (size_type_node, byte_size_sval);
1320 
1321   /* If PARENT is already that size, return it.  */
1322   const svalue *parent_byte_size_sval = parent->get_byte_size_sval (this);
1323   if (tree parent_size_cst = parent_byte_size_sval->maybe_get_constant ())
1324     if (tree size_cst = byte_size_sval->maybe_get_constant ())
1325       {
1326 	tree comparison
1327 	  = fold_binary (EQ_EXPR, boolean_type_node, parent_size_cst, size_cst);
1328 	if (comparison == boolean_true_node)
1329 	  return parent;
1330       }
1331 
1332   sized_region::key_t key (parent, type, byte_size_sval);
1333   if (sized_region *reg = m_sized_regions.get (key))
1334     return reg;
1335 
1336   sized_region *sized_reg
1337     = new sized_region (alloc_region_id (), parent, type, byte_size_sval);
1338   m_sized_regions.put (key, sized_reg);
1339   return sized_reg;
1340 }
1341 
1342 /* Return the region that describes accessing PARENT_REGION as if
1343    it were of type TYPE, creating it if necessary.  */
1344 
1345 const region *
get_cast_region(const region * original_region,tree type)1346 region_model_manager::get_cast_region (const region *original_region,
1347 				       tree type)
1348 {
1349   /* If types match, return ORIGINAL_REGION.  */
1350   if (type == original_region->get_type ())
1351     return original_region;
1352 
1353   cast_region::key_t key (original_region, type);
1354   if (cast_region *reg = m_cast_regions.get (key))
1355     return reg;
1356 
1357   cast_region *cast_reg
1358     = new cast_region (alloc_region_id (), original_region, type);
1359   m_cast_regions.put (key, cast_reg);
1360   return cast_reg;
1361 }
1362 
1363 /* Return the frame_region for call to FUN from CALLING_FRAME, creating it
1364    if necessary.  CALLING_FRAME may be NULL.  */
1365 
1366 const frame_region *
get_frame_region(const frame_region * calling_frame,function * fun)1367 region_model_manager::get_frame_region (const frame_region *calling_frame,
1368 					function *fun)
1369 {
1370   int index = calling_frame ? calling_frame->get_index () + 1 : 0;
1371 
1372   frame_region::key_t key (calling_frame, fun);
1373   if (frame_region *reg = m_frame_regions.get (key))
1374     return reg;
1375 
1376   frame_region *frame_reg
1377     = new frame_region (alloc_region_id (), &m_stack_region, calling_frame,
1378 			 fun, index);
1379   m_frame_regions.put (key, frame_reg);
1380   return frame_reg;
1381 }
1382 
1383 /* Return the region that describes dereferencing SVAL, creating it
1384    if necessary.  */
1385 
1386 const region *
get_symbolic_region(const svalue * sval)1387 region_model_manager::get_symbolic_region (const svalue *sval)
1388 {
1389   symbolic_region::key_t key (&m_root_region, sval);
1390   if (symbolic_region *reg = m_symbolic_regions.get (key))
1391     return reg;
1392 
1393   symbolic_region *symbolic_reg
1394     = new symbolic_region (alloc_region_id (), &m_root_region, sval);
1395   m_symbolic_regions.put (key, symbolic_reg);
1396   return symbolic_reg;
1397 }
1398 
1399 /* Return the region that describes accessing STRING_CST, creating it
1400    if necessary.  */
1401 
1402 const string_region *
get_region_for_string(tree string_cst)1403 region_model_manager::get_region_for_string (tree string_cst)
1404 {
1405   gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1406 
1407   string_region **slot = m_string_map.get (string_cst);
1408   if (slot)
1409     return *slot;
1410   string_region *reg
1411     = new string_region (alloc_region_id (), &m_root_region, string_cst);
1412   m_string_map.put (string_cst, reg);
1413   return reg;
1414 }
1415 
1416 /* If we see a tree code we don't know how to handle, rather than
1417    ICE or generate bogus results, create a dummy region, and notify
1418    CTXT so that it can mark the new state as being not properly
1419    modelled.  The exploded graph can then stop exploring that path,
1420    since any diagnostics we might issue will have questionable
1421    validity.  */
1422 
1423 const region *
1424 region_model_manager::
get_region_for_unexpected_tree_code(region_model_context * ctxt,tree t,const dump_location_t & loc)1425 get_region_for_unexpected_tree_code (region_model_context *ctxt,
1426 				     tree t,
1427 				     const dump_location_t &loc)
1428 {
1429   tree type = TYPE_P (t) ? t : TREE_TYPE (t);
1430   region *new_reg
1431     = new unknown_region (alloc_region_id (), &m_root_region, type);
1432   if (ctxt)
1433     ctxt->on_unexpected_tree_code (t, loc);
1434   return new_reg;
1435 }
1436 
1437 /* Return a new region describing a heap-allocated block of memory.  */
1438 
1439 const region *
create_region_for_heap_alloc()1440 region_model_manager::create_region_for_heap_alloc ()
1441 {
1442   region *reg
1443     = new heap_allocated_region (alloc_region_id (), &m_heap_region);
1444   m_managed_dynamic_regions.safe_push (reg);
1445   return reg;
1446 }
1447 
1448 /* Return a new region describing a block of memory allocated within FRAME.  */
1449 
1450 const region *
create_region_for_alloca(const frame_region * frame)1451 region_model_manager::create_region_for_alloca (const frame_region *frame)
1452 {
1453   gcc_assert (frame);
1454   region *reg = new alloca_region (alloc_region_id (), frame);
1455   m_managed_dynamic_regions.safe_push (reg);
1456   return reg;
1457 }
1458 
1459 /* Log OBJ to LOGGER.  */
1460 
1461 template <typename T>
1462 static void
log_managed_object(logger * logger,const T * obj)1463 log_managed_object (logger *logger, const T *obj)
1464 {
1465   logger->start_log_line ();
1466   pretty_printer *pp = logger->get_printer ();
1467   pp_string (pp, "    ");
1468   obj->dump_to_pp (pp, true);
1469   logger->end_log_line ();
1470 }
1471 
1472 /* Specialization for frame_region, which also logs the count of locals
1473    managed by the frame_region.  */
1474 
1475 template <>
1476 void
log_managed_object(logger * logger,const frame_region * obj)1477 log_managed_object (logger *logger, const frame_region *obj)
1478 {
1479   logger->start_log_line ();
1480   pretty_printer *pp = logger->get_printer ();
1481   pp_string (pp, "    ");
1482   obj->dump_to_pp (pp, true);
1483   pp_printf (pp, " [with %i region(s) for locals]", obj->get_num_locals ());
1484   logger->end_log_line ();
1485 }
1486 
1487 /* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
1488    If SHOW_OBJS is true, also dump the objects themselves.  */
1489 
1490 template <typename K, typename T>
1491 static void
log_uniq_map(logger * logger,bool show_objs,const char * title,const hash_map<K,T * > & uniq_map)1492 log_uniq_map (logger *logger, bool show_objs, const char *title,
1493 	      const hash_map<K, T*> &uniq_map)
1494 {
1495   logger->log ("  # %s: %li", title, uniq_map.elements ());
1496   if (!show_objs)
1497     return;
1498   auto_vec<const T *> vec_objs (uniq_map.elements ());
1499   for (typename hash_map<K, T*>::iterator iter = uniq_map.begin ();
1500        iter != uniq_map.end (); ++iter)
1501     vec_objs.quick_push ((*iter).second);
1502 
1503   vec_objs.qsort (T::cmp_ptr_ptr);
1504 
1505   unsigned i;
1506   const T *obj;
1507   FOR_EACH_VEC_ELT (vec_objs, i, obj)
1508     log_managed_object<T> (logger, obj);
1509 }
1510 
1511 /* Dump the number of objects that were managed by MAP to LOGGER.
1512    If SHOW_OBJS is true, also dump the objects themselves.  */
1513 
1514 template <typename T>
1515 static void
log_uniq_map(logger * logger,bool show_objs,const char * title,const consolidation_map<T> & map)1516 log_uniq_map (logger *logger, bool show_objs, const char *title,
1517 	      const consolidation_map<T> &map)
1518 {
1519   logger->log ("  # %s: %li", title, map.elements ());
1520   if (!show_objs)
1521     return;
1522 
1523   auto_vec<const T *> vec_objs (map.elements ());
1524   for (typename consolidation_map<T>::iterator iter = map.begin ();
1525        iter != map.end (); ++iter)
1526     vec_objs.quick_push ((*iter).second);
1527 
1528   vec_objs.qsort (T::cmp_ptr_ptr);
1529 
1530   unsigned i;
1531   const T *obj;
1532   FOR_EACH_VEC_ELT (vec_objs, i, obj)
1533     log_managed_object<T> (logger, obj);
1534 }
1535 
1536 /* Dump the number of objects of each class that were managed by this
1537    manager to LOGGER.
1538    If SHOW_OBJS is true, also dump the objects themselves.  */
1539 
1540 void
log_stats(logger * logger,bool show_objs) const1541 region_model_manager::log_stats (logger *logger, bool show_objs) const
1542 {
1543   LOG_SCOPE (logger);
1544   logger->log ("svalue consolidation");
1545   log_uniq_map (logger, show_objs, "constant_svalue", m_constants_map);
1546   log_uniq_map (logger, show_objs, "unknown_svalue", m_unknowns_map);
1547   if (m_unknown_NULL)
1548     log_managed_object (logger, m_unknown_NULL);
1549   log_uniq_map (logger, show_objs, "poisoned_svalue", m_poisoned_values_map);
1550   log_uniq_map (logger, show_objs, "setjmp_svalue", m_setjmp_values_map);
1551   log_uniq_map (logger, show_objs, "initial_svalue", m_initial_values_map);
1552   log_uniq_map (logger, show_objs, "region_svalue", m_pointer_values_map);
1553   log_uniq_map (logger, show_objs, "unaryop_svalue", m_unaryop_values_map);
1554   log_uniq_map (logger, show_objs, "binop_svalue", m_binop_values_map);
1555   log_uniq_map (logger, show_objs, "sub_svalue", m_sub_values_map);
1556   log_uniq_map (logger, show_objs, "repeated_svalue", m_repeated_values_map);
1557   log_uniq_map (logger, show_objs, "bits_within_svalue",
1558 		m_bits_within_values_map);
1559   log_uniq_map (logger, show_objs, "unmergeable_svalue",
1560 		m_unmergeable_values_map);
1561   log_uniq_map (logger, show_objs, "widening_svalue", m_widening_values_map);
1562   log_uniq_map (logger, show_objs, "compound_svalue", m_compound_values_map);
1563   log_uniq_map (logger, show_objs, "conjured_svalue", m_conjured_values_map);
1564   log_uniq_map (logger, show_objs, "asm_output_svalue",
1565 		m_asm_output_values_map);
1566 
1567   logger->log ("max accepted svalue num_nodes: %i",
1568 	       m_max_complexity.m_num_nodes);
1569   logger->log ("max accepted svalue max_depth: %i",
1570 	       m_max_complexity.m_max_depth);
1571 
1572   logger->log ("region consolidation");
1573   logger->log ("  next region id: %i", m_next_region_id);
1574   log_uniq_map (logger, show_objs, "function_region", m_fndecls_map);
1575   log_uniq_map (logger, show_objs, "label_region", m_labels_map);
1576   log_uniq_map (logger, show_objs, "decl_region for globals", m_globals_map);
1577   log_uniq_map (logger, show_objs, "field_region", m_field_regions);
1578   log_uniq_map (logger, show_objs, "element_region", m_element_regions);
1579   log_uniq_map (logger, show_objs, "offset_region", m_offset_regions);
1580   log_uniq_map (logger, show_objs, "sized_region", m_sized_regions);
1581   log_uniq_map (logger, show_objs, "cast_region", m_cast_regions);
1582   log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
1583   log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
1584   log_uniq_map (logger, show_objs, "string_region", m_string_map);
1585   logger->log ("  # managed dynamic regions: %i",
1586 	       m_managed_dynamic_regions.length ());
1587   m_store_mgr.log_stats (logger, show_objs);
1588   m_range_mgr->log_stats (logger, show_objs);
1589 }
1590 
1591 /* Dump the number of objects of each class that were managed by this
1592    manager to LOGGER.
1593    If SHOW_OBJS is true, also dump the objects themselves.
1594    This is here so it can use log_uniq_map.  */
1595 
1596 void
log_stats(logger * logger,bool show_objs) const1597 store_manager::log_stats (logger *logger, bool show_objs) const
1598 {
1599   LOG_SCOPE (logger);
1600   log_uniq_map (logger, show_objs, "concrete_binding",
1601 		m_concrete_binding_key_mgr);
1602   log_uniq_map (logger, show_objs, "symbolic_binding",
1603 		m_symbolic_binding_key_mgr);
1604 }
1605 
1606 } // namespace ana
1607 
1608 #endif /* #if ENABLE_ANALYZER */
1609