1 /* __builtin_object_size (ptr, object_size_type) computation
2    Copyright (C) 2004-2021 Free Software Foundation, Inc.
3    Contributed by Jakub Jelinek <jakub@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "stringpool.h"
36 #include "attribs.h"
37 
38 struct object_size_info
39 {
40   int object_size_type;
41   unsigned char pass;
42   bool changed;
43   bitmap visited, reexamine;
44   unsigned int *depths;
45   unsigned int *stack, *tos;
46 };
47 
48 enum
49 {
50   OST_SUBOBJECT = 1,
51   OST_MINIMUM = 2,
52   OST_END = 4,
53 };
54 
55 static tree compute_object_offset (const_tree, const_tree);
56 static bool addr_object_size (struct object_size_info *,
57 			      const_tree, int, unsigned HOST_WIDE_INT *);
58 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
59 static tree pass_through_call (const gcall *);
60 static void collect_object_sizes_for (struct object_size_info *, tree);
61 static void expr_object_size (struct object_size_info *, tree, tree);
62 static bool merge_object_sizes (struct object_size_info *, tree, tree,
63 				unsigned HOST_WIDE_INT);
64 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
65 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
66 static void init_offset_limit (void);
67 static void check_for_plus_in_loops (struct object_size_info *, tree);
68 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
69 				       unsigned int);
70 
71 /* object_sizes[0] is upper bound for number of bytes till the end of
72    the object.
73    object_sizes[1] is upper bound for number of bytes till the end of
74    the subobject (innermost array or field with address taken).
75    object_sizes[2] is lower bound for number of bytes till the end of
76    the object and object_sizes[3] lower bound for subobject.  */
77 static vec<unsigned HOST_WIDE_INT> object_sizes[OST_END];
78 
79 /* Bitmaps what object sizes have been computed already.  */
80 static bitmap computed[OST_END];
81 
82 /* Maximum value of offset we consider to be addition.  */
83 static unsigned HOST_WIDE_INT offset_limit;
84 
85 static inline unsigned HOST_WIDE_INT
unknown(int object_size_type)86 unknown (int object_size_type)
87 {
88   return ((unsigned HOST_WIDE_INT) -((object_size_type >> 1) ^ 1));
89 }
90 
91 /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names.  */
92 
93 static inline void
object_sizes_grow(int object_size_type)94 object_sizes_grow (int object_size_type)
95 {
96   if (num_ssa_names > object_sizes[object_size_type].length ())
97     object_sizes[object_size_type].safe_grow (num_ssa_names, true);
98 }
99 
100 /* Release object_sizes[OBJECT_SIZE_TYPE].  */
101 
102 static inline void
object_sizes_release(int object_size_type)103 object_sizes_release (int object_size_type)
104 {
105   object_sizes[object_size_type].release ();
106 }
107 
108 /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown.  */
109 
110 static inline bool
object_sizes_unknown_p(int object_size_type,unsigned varno)111 object_sizes_unknown_p (int object_size_type, unsigned varno)
112 {
113   return (object_sizes[object_size_type][varno]
114 	  == unknown (object_size_type));
115 }
116 
117 /* Return size for VARNO corresponding to OSI.  */
118 
119 static inline unsigned HOST_WIDE_INT
object_sizes_get(struct object_size_info * osi,unsigned varno)120 object_sizes_get (struct object_size_info *osi, unsigned varno)
121 {
122   return object_sizes[osi->object_size_type][varno];
123 }
124 
125 /* Set size for VARNO corresponding to OSI to VAL.  */
126 
127 static inline bool
object_sizes_set_force(struct object_size_info * osi,unsigned varno,unsigned HOST_WIDE_INT val)128 object_sizes_set_force (struct object_size_info *osi, unsigned varno,
129 			unsigned HOST_WIDE_INT val)
130 {
131   object_sizes[osi->object_size_type][varno] = val;
132   return true;
133 }
134 
135 /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
136    maximum.  */
137 
138 static inline bool
object_sizes_set(struct object_size_info * osi,unsigned varno,unsigned HOST_WIDE_INT val)139 object_sizes_set (struct object_size_info *osi, unsigned varno,
140 		  unsigned HOST_WIDE_INT val)
141 {
142   int object_size_type = osi->object_size_type;
143   if ((object_size_type & OST_MINIMUM) == 0)
144     {
145       if (object_sizes[object_size_type][varno] < val)
146 	return object_sizes_set_force (osi, varno, val);
147     }
148   else
149     {
150       if (object_sizes[object_size_type][varno] > val)
151 	return object_sizes_set_force (osi, varno, val);
152     }
153   return false;
154 }
155 
156 /* Initialize OFFSET_LIMIT variable.  */
157 static void
init_offset_limit(void)158 init_offset_limit (void)
159 {
160   if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
161     offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
162   else
163     offset_limit = -1;
164   offset_limit /= 2;
165 }
166 
167 
168 /* Compute offset of EXPR within VAR.  Return error_mark_node
169    if unknown.  */
170 
171 static tree
compute_object_offset(const_tree expr,const_tree var)172 compute_object_offset (const_tree expr, const_tree var)
173 {
174   enum tree_code code = PLUS_EXPR;
175   tree base, off, t;
176 
177   if (expr == var)
178     return size_zero_node;
179 
180   switch (TREE_CODE (expr))
181     {
182     case COMPONENT_REF:
183       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
184       if (base == error_mark_node)
185 	return base;
186 
187       t = TREE_OPERAND (expr, 1);
188       off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
189 			size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
190 				  / BITS_PER_UNIT));
191       break;
192 
193     case REALPART_EXPR:
194     CASE_CONVERT:
195     case VIEW_CONVERT_EXPR:
196     case NON_LVALUE_EXPR:
197       return compute_object_offset (TREE_OPERAND (expr, 0), var);
198 
199     case IMAGPART_EXPR:
200       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
201       if (base == error_mark_node)
202 	return base;
203 
204       off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
205       break;
206 
207     case ARRAY_REF:
208       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
209       if (base == error_mark_node)
210 	return base;
211 
212       t = TREE_OPERAND (expr, 1);
213       tree low_bound, unit_size;
214       low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
215       unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
216       if (! integer_zerop (low_bound))
217 	t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
218       if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
219 	{
220 	  code = MINUS_EXPR;
221 	  t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
222 	}
223       t = fold_convert (sizetype, t);
224       off = size_binop (MULT_EXPR, unit_size, t);
225       break;
226 
227     case MEM_REF:
228       gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
229       return wide_int_to_tree (sizetype, mem_ref_offset (expr));
230 
231     default:
232       return error_mark_node;
233     }
234 
235   return size_binop (code, base, off);
236 }
237 
238 /* Returns the size of the object designated by DECL considering its
239    initializer if it either has one or if it would not affect its size,
240    otherwise the size of the object without the initializer when MIN
241    is true, else null.  An object's initializer affects the object's
242    size if it's a struct type with a flexible array member.  */
243 
244 tree
decl_init_size(tree decl,bool min)245 decl_init_size (tree decl, bool min)
246 {
247   tree size = DECL_SIZE_UNIT (decl);
248   tree type = TREE_TYPE (decl);
249   if (TREE_CODE (type) != RECORD_TYPE)
250     return size;
251 
252   tree last = last_field (type);
253   if (!last)
254     return size;
255 
256   tree last_type = TREE_TYPE (last);
257   if (TREE_CODE (last_type) != ARRAY_TYPE
258       || TYPE_SIZE (last_type))
259     return size;
260 
261   /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
262      of the initializer and sometimes doesn't.  */
263   size = TYPE_SIZE_UNIT (type);
264   tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
265   tree compsize = component_ref_size (ref);
266   if (!compsize)
267     return min ? size : NULL_TREE;
268 
269   /* The size includes tail padding and initializer elements.  */
270   tree pos = byte_position (last);
271   size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
272   return size;
273 }
274 
275 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
276    OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
277    If unknown, return unknown (object_size_type).  */
278 
279 static bool
addr_object_size(struct object_size_info * osi,const_tree ptr,int object_size_type,unsigned HOST_WIDE_INT * psize)280 addr_object_size (struct object_size_info *osi, const_tree ptr,
281 		  int object_size_type, unsigned HOST_WIDE_INT *psize)
282 {
283   tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
284 
285   gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
286 
287   /* Set to unknown and overwrite just before returning if the size
288      could be determined.  */
289   *psize = unknown (object_size_type);
290 
291   pt_var = TREE_OPERAND (ptr, 0);
292   while (handled_component_p (pt_var))
293     pt_var = TREE_OPERAND (pt_var, 0);
294 
295   if (!pt_var)
296     return false;
297 
298   if (TREE_CODE (pt_var) == MEM_REF)
299     {
300       unsigned HOST_WIDE_INT sz;
301 
302       if (!osi || (object_size_type & OST_SUBOBJECT) != 0
303 	  || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
304 	{
305 	  compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
306 				       object_size_type & ~OST_SUBOBJECT, &sz);
307 	}
308       else
309 	{
310 	  tree var = TREE_OPERAND (pt_var, 0);
311 	  if (osi->pass == 0)
312 	    collect_object_sizes_for (osi, var);
313 	  if (bitmap_bit_p (computed[object_size_type],
314 			    SSA_NAME_VERSION (var)))
315 	    sz = object_sizes_get (osi, SSA_NAME_VERSION (var));
316 	  else
317 	    sz = unknown (object_size_type);
318 	}
319       if (sz != unknown (object_size_type))
320 	{
321 	  offset_int mem_offset;
322 	  if (mem_ref_offset (pt_var).is_constant (&mem_offset))
323 	    {
324 	      offset_int dsz = wi::sub (sz, mem_offset);
325 	      if (wi::neg_p (dsz))
326 		sz = 0;
327 	      else if (wi::fits_uhwi_p (dsz))
328 		sz = dsz.to_uhwi ();
329 	      else
330 		sz = unknown (object_size_type);
331 	    }
332 	  else
333 	    sz = unknown (object_size_type);
334 	}
335 
336       if (sz != unknown (object_size_type) && sz < offset_limit)
337 	pt_var_size = size_int (sz);
338     }
339   else if (DECL_P (pt_var))
340     {
341       pt_var_size = decl_init_size (pt_var, object_size_type & OST_MINIMUM);
342       if (!pt_var_size)
343 	return false;
344     }
345   else if (TREE_CODE (pt_var) == STRING_CST)
346     pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
347   else
348     return false;
349 
350   if (pt_var_size)
351     {
352       /* Validate the size determined above.  */
353       if (!tree_fits_uhwi_p (pt_var_size)
354 	  || tree_to_uhwi (pt_var_size) >= offset_limit)
355 	return false;
356     }
357 
358   if (pt_var != TREE_OPERAND (ptr, 0))
359     {
360       tree var;
361 
362       if (object_size_type & OST_SUBOBJECT)
363 	{
364 	  var = TREE_OPERAND (ptr, 0);
365 
366 	  while (var != pt_var
367 		 && TREE_CODE (var) != BIT_FIELD_REF
368 		 && TREE_CODE (var) != COMPONENT_REF
369 		 && TREE_CODE (var) != ARRAY_REF
370 		 && TREE_CODE (var) != ARRAY_RANGE_REF
371 		 && TREE_CODE (var) != REALPART_EXPR
372 		 && TREE_CODE (var) != IMAGPART_EXPR)
373 	    var = TREE_OPERAND (var, 0);
374 	  if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
375 	    var = TREE_OPERAND (var, 0);
376 	  if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
377 	      || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
378 	      || (pt_var_size
379 		  && tree_int_cst_lt (pt_var_size,
380 				      TYPE_SIZE_UNIT (TREE_TYPE (var)))))
381 	    var = pt_var;
382 	  else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
383 	    {
384 	      tree v = var;
385 	      /* For &X->fld, compute object size only if fld isn't the last
386 		 field, as struct { int i; char c[1]; } is often used instead
387 		 of flexible array member.  */
388 	      while (v && v != pt_var)
389 		switch (TREE_CODE (v))
390 		  {
391 		  case ARRAY_REF:
392 		    if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
393 			&& TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
394 		      {
395 			tree domain
396 			  = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
397 			if (domain
398 			    && TYPE_MAX_VALUE (domain)
399 			    && TREE_CODE (TYPE_MAX_VALUE (domain))
400 			       == INTEGER_CST
401 			    && tree_int_cst_lt (TREE_OPERAND (v, 1),
402 						TYPE_MAX_VALUE (domain)))
403 			  {
404 			    v = NULL_TREE;
405 			    break;
406 			  }
407 		      }
408 		    v = TREE_OPERAND (v, 0);
409 		    break;
410 		  case REALPART_EXPR:
411 		  case IMAGPART_EXPR:
412 		    v = NULL_TREE;
413 		    break;
414 		  case COMPONENT_REF:
415 		    if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
416 		      {
417 			v = NULL_TREE;
418 			break;
419 		      }
420 		    while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
421 		      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
422 			  != UNION_TYPE
423 			  && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
424 			  != QUAL_UNION_TYPE)
425 			break;
426 		      else
427 			v = TREE_OPERAND (v, 0);
428 		    if (TREE_CODE (v) == COMPONENT_REF
429 			&& TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
430 			   == RECORD_TYPE)
431 		      {
432 			tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
433 			for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
434 			  if (TREE_CODE (fld_chain) == FIELD_DECL)
435 			    break;
436 
437 			if (fld_chain)
438 			  {
439 			    v = NULL_TREE;
440 			    break;
441 			  }
442 			v = TREE_OPERAND (v, 0);
443 		      }
444 		    while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
445 		      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
446 			  != UNION_TYPE
447 			  && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
448 			  != QUAL_UNION_TYPE)
449 			break;
450 		      else
451 			v = TREE_OPERAND (v, 0);
452 		    if (v != pt_var)
453 		      v = NULL_TREE;
454 		    else
455 		      v = pt_var;
456 		    break;
457 		  default:
458 		    v = pt_var;
459 		    break;
460 		  }
461 	      if (v == pt_var)
462 		var = pt_var;
463 	    }
464 	}
465       else
466 	var = pt_var;
467 
468       if (var != pt_var)
469 	var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
470       else if (!pt_var_size)
471 	return false;
472       else
473 	var_size = pt_var_size;
474       bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
475       if (bytes != error_mark_node)
476 	{
477 	  if (TREE_CODE (bytes) == INTEGER_CST
478 	      && tree_int_cst_lt (var_size, bytes))
479 	    bytes = size_zero_node;
480 	  else
481 	    bytes = size_binop (MINUS_EXPR, var_size, bytes);
482 	}
483       if (var != pt_var
484 	  && pt_var_size
485 	  && TREE_CODE (pt_var) == MEM_REF
486 	  && bytes != error_mark_node)
487 	{
488 	  tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
489 	  if (bytes2 != error_mark_node)
490 	    {
491 	      if (TREE_CODE (bytes2) == INTEGER_CST
492 		  && tree_int_cst_lt (pt_var_size, bytes2))
493 		bytes2 = size_zero_node;
494 	      else
495 		bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
496 	      bytes = size_binop (MIN_EXPR, bytes, bytes2);
497 	    }
498 	}
499     }
500   else if (!pt_var_size)
501     return false;
502   else
503     bytes = pt_var_size;
504 
505   if (tree_fits_uhwi_p (bytes))
506     {
507       *psize = tree_to_uhwi (bytes);
508       return true;
509     }
510 
511   return false;
512 }
513 
514 
515 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
516    Handles calls to functions declared with attribute alloc_size.
517    OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
518    If unknown, return unknown (object_size_type).  */
519 
520 static unsigned HOST_WIDE_INT
alloc_object_size(const gcall * call,int object_size_type)521 alloc_object_size (const gcall *call, int object_size_type)
522 {
523   gcc_assert (is_gimple_call (call));
524 
525   tree calltype;
526   if (tree callfn = gimple_call_fndecl (call))
527     calltype = TREE_TYPE (callfn);
528   else
529     calltype = gimple_call_fntype (call);
530 
531   if (!calltype)
532     return unknown (object_size_type);
533 
534   /* Set to positions of alloc_size arguments.  */
535   int arg1 = -1, arg2 = -1;
536   tree alloc_size = lookup_attribute ("alloc_size",
537 				      TYPE_ATTRIBUTES (calltype));
538   if (alloc_size && TREE_VALUE (alloc_size))
539     {
540       tree p = TREE_VALUE (alloc_size);
541 
542       arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
543       if (TREE_CHAIN (p))
544         arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
545     }
546 
547   if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
548       || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
549       || (arg2 >= 0
550 	  && (arg2 >= (int)gimple_call_num_args (call)
551 	      || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
552     return unknown (object_size_type);
553 
554   tree bytes = NULL_TREE;
555   if (arg2 >= 0)
556     bytes = size_binop (MULT_EXPR,
557 	fold_convert (sizetype, gimple_call_arg (call, arg1)),
558 	fold_convert (sizetype, gimple_call_arg (call, arg2)));
559   else if (arg1 >= 0)
560     bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
561 
562   if (bytes && tree_fits_uhwi_p (bytes))
563     return tree_to_uhwi (bytes);
564 
565   return unknown (object_size_type);
566 }
567 
568 
569 /* If object size is propagated from one of function's arguments directly
570    to its return value, return that argument for GIMPLE_CALL statement CALL.
571    Otherwise return NULL.  */
572 
573 static tree
pass_through_call(const gcall * call)574 pass_through_call (const gcall *call)
575 {
576   unsigned rf = gimple_call_return_flags (call);
577   if (rf & ERF_RETURNS_ARG)
578     {
579       unsigned argnum = rf & ERF_RETURN_ARG_MASK;
580       if (argnum < gimple_call_num_args (call))
581 	return gimple_call_arg (call, argnum);
582     }
583 
584   /* __builtin_assume_aligned is intentionally not marked RET1.  */
585   if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
586     return gimple_call_arg (call, 0);
587 
588   return NULL_TREE;
589 }
590 
591 
592 /* Compute __builtin_object_size value for PTR and set *PSIZE to
593    the resulting value.  If the declared object is known and PDECL
594    is nonnull, sets *PDECL to the object's DECL.  OBJECT_SIZE_TYPE
595    is the second argument   to __builtin_object_size.
596    Returns true on success and false when the object size could not
597    be determined.  */
598 
599 bool
compute_builtin_object_size(tree ptr,int object_size_type,unsigned HOST_WIDE_INT * psize)600 compute_builtin_object_size (tree ptr, int object_size_type,
601 			     unsigned HOST_WIDE_INT *psize)
602 {
603   gcc_assert (object_size_type >= 0 && object_size_type < OST_END);
604 
605   /* Set to unknown and overwrite just before returning if the size
606      could be determined.  */
607   *psize = unknown (object_size_type);
608 
609   if (! offset_limit)
610     init_offset_limit ();
611 
612   if (TREE_CODE (ptr) == ADDR_EXPR)
613     return addr_object_size (NULL, ptr, object_size_type, psize);
614 
615   if (TREE_CODE (ptr) != SSA_NAME
616       || !POINTER_TYPE_P (TREE_TYPE (ptr)))
617       return false;
618 
619   if (computed[object_size_type] == NULL)
620     {
621       if (optimize || object_size_type & OST_SUBOBJECT)
622 	return false;
623 
624       /* When not optimizing, rather than failing, make a small effort
625 	 to determine the object size without the full benefit of
626 	 the (costly) computation below.  */
627       gimple *def = SSA_NAME_DEF_STMT (ptr);
628       if (gimple_code (def) == GIMPLE_ASSIGN)
629 	{
630 	  tree_code code = gimple_assign_rhs_code (def);
631 	  if (code == POINTER_PLUS_EXPR)
632 	    {
633 	      tree offset = gimple_assign_rhs2 (def);
634 	      ptr = gimple_assign_rhs1 (def);
635 
636 	      if (tree_fits_shwi_p (offset)
637 		  && compute_builtin_object_size (ptr, object_size_type,
638 						  psize))
639 		{
640 		  /* Return zero when the offset is out of bounds.  */
641 		  unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
642 		  *psize = off < *psize ? *psize - off : 0;
643 		  return true;
644 		}
645 	    }
646 	}
647       return false;
648     }
649 
650   struct object_size_info osi;
651   osi.object_size_type = object_size_type;
652   if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
653     {
654       bitmap_iterator bi;
655       unsigned int i;
656 
657       object_sizes_grow (object_size_type);
658       if (dump_file)
659 	{
660 	  fprintf (dump_file, "Computing %s %sobject size for ",
661 		   (object_size_type & OST_MINIMUM) ? "minimum" : "maximum",
662 		   (object_size_type & OST_SUBOBJECT) ? "sub" : "");
663 	  print_generic_expr (dump_file, ptr, dump_flags);
664 	  fprintf (dump_file, ":\n");
665 	}
666 
667       osi.visited = BITMAP_ALLOC (NULL);
668       osi.reexamine = BITMAP_ALLOC (NULL);
669       osi.depths = NULL;
670       osi.stack = NULL;
671       osi.tos = NULL;
672 
673       /* First pass: walk UD chains, compute object sizes that
674 	 can be computed.  osi.reexamine bitmap at the end will
675 	 contain what variables were found in dependency cycles
676 	 and therefore need to be reexamined.  */
677       osi.pass = 0;
678       osi.changed = false;
679       collect_object_sizes_for (&osi, ptr);
680 
681       /* Second pass: keep recomputing object sizes of variables
682 	 that need reexamination, until no object sizes are
683 	 increased or all object sizes are computed.  */
684       if (! bitmap_empty_p (osi.reexamine))
685 	{
686 	  bitmap reexamine = BITMAP_ALLOC (NULL);
687 
688 	  /* If looking for minimum instead of maximum object size,
689 	     detect cases where a pointer is increased in a loop.
690 	     Although even without this detection pass 2 would eventually
691 	     terminate, it could take a long time.  If a pointer is
692 	     increasing this way, we need to assume 0 object size.
693 	     E.g. p = &buf[0]; while (cond) p = p + 4;  */
694 	  if (object_size_type & OST_MINIMUM)
695 	    {
696 	      osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
697 	      osi.stack = XNEWVEC (unsigned int, num_ssa_names);
698 	      osi.tos = osi.stack;
699 	      osi.pass = 1;
700 	      /* collect_object_sizes_for is changing
701 		 osi.reexamine bitmap, so iterate over a copy.  */
702 	      bitmap_copy (reexamine, osi.reexamine);
703 	      EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
704 		if (bitmap_bit_p (osi.reexamine, i))
705 		  check_for_plus_in_loops (&osi, ssa_name (i));
706 
707 	      free (osi.depths);
708 	      osi.depths = NULL;
709 	      free (osi.stack);
710 	      osi.stack = NULL;
711 	      osi.tos = NULL;
712 	    }
713 
714 	  do
715 	    {
716 	      osi.pass = 2;
717 	      osi.changed = false;
718 	      /* collect_object_sizes_for is changing
719 		 osi.reexamine bitmap, so iterate over a copy.  */
720 	      bitmap_copy (reexamine, osi.reexamine);
721 	      EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
722 		if (bitmap_bit_p (osi.reexamine, i))
723 		  {
724 		    collect_object_sizes_for (&osi, ssa_name (i));
725 		    if (dump_file && (dump_flags & TDF_DETAILS))
726 		      {
727 			fprintf (dump_file, "Reexamining ");
728 			print_generic_expr (dump_file, ssa_name (i),
729 					    dump_flags);
730 			fprintf (dump_file, "\n");
731 		      }
732 		  }
733 	    }
734 	  while (osi.changed);
735 
736 	  BITMAP_FREE (reexamine);
737 	}
738       EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
739 	bitmap_set_bit (computed[object_size_type], i);
740 
741       /* Debugging dumps.  */
742       if (dump_file)
743 	{
744 	  EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
745 	    if (!object_sizes_unknown_p (object_size_type, i))
746 	      {
747 		print_generic_expr (dump_file, ssa_name (i),
748 				    dump_flags);
749 		fprintf (dump_file,
750 			 ": %s %sobject size "
751 			 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
752 			 ((object_size_type & OST_MINIMUM) ? "minimum"
753 			  : "maximum"),
754 			 (object_size_type & OST_SUBOBJECT) ? "sub" : "",
755 			 object_sizes_get (&osi, i));
756 	      }
757 	}
758 
759       BITMAP_FREE (osi.reexamine);
760       BITMAP_FREE (osi.visited);
761     }
762 
763   *psize = object_sizes_get (&osi, SSA_NAME_VERSION (ptr));
764   return *psize != unknown (object_size_type);
765 }
766 
767 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME.  */
768 
769 static void
expr_object_size(struct object_size_info * osi,tree ptr,tree value)770 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
771 {
772   int object_size_type = osi->object_size_type;
773   unsigned int varno = SSA_NAME_VERSION (ptr);
774   unsigned HOST_WIDE_INT bytes;
775 
776   gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
777   gcc_assert (osi->pass == 0);
778 
779   if (TREE_CODE (value) == WITH_SIZE_EXPR)
780     value = TREE_OPERAND (value, 0);
781 
782   /* Pointer variables should have been handled by merge_object_sizes.  */
783   gcc_assert (TREE_CODE (value) != SSA_NAME
784 	      || !POINTER_TYPE_P (TREE_TYPE (value)));
785 
786   if (TREE_CODE (value) == ADDR_EXPR)
787     addr_object_size (osi, value, object_size_type, &bytes);
788   else
789     bytes = unknown (object_size_type);
790 
791   object_sizes_set (osi, varno, bytes);
792 }
793 
794 
795 /* Compute object_sizes for PTR, defined to the result of a call.  */
796 
797 static void
call_object_size(struct object_size_info * osi,tree ptr,gcall * call)798 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
799 {
800   int object_size_type = osi->object_size_type;
801   unsigned int varno = SSA_NAME_VERSION (ptr);
802   unsigned HOST_WIDE_INT bytes;
803 
804   gcc_assert (is_gimple_call (call));
805 
806   gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
807   gcc_assert (osi->pass == 0);
808 
809   bytes = alloc_object_size (call, object_size_type);
810 
811   object_sizes_set (osi, varno, bytes);
812 }
813 
814 
815 /* Compute object_sizes for PTR, defined to an unknown value.  */
816 
817 static void
unknown_object_size(struct object_size_info * osi,tree ptr)818 unknown_object_size (struct object_size_info *osi, tree ptr)
819 {
820   int object_size_type = osi->object_size_type;
821   unsigned int varno = SSA_NAME_VERSION (ptr);
822 
823   gcc_checking_assert (!object_sizes_unknown_p (object_size_type, varno));
824   gcc_checking_assert (osi->pass == 0);
825 
826   object_sizes_set (osi, varno, unknown (object_size_type));
827 }
828 
829 
830 /* Merge object sizes of ORIG + OFFSET into DEST.  Return true if
831    the object size might need reexamination later.  */
832 
833 static bool
merge_object_sizes(struct object_size_info * osi,tree dest,tree orig,unsigned HOST_WIDE_INT offset)834 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
835 		    unsigned HOST_WIDE_INT offset)
836 {
837   int object_size_type = osi->object_size_type;
838   unsigned int varno = SSA_NAME_VERSION (dest);
839   unsigned HOST_WIDE_INT orig_bytes;
840 
841   if (object_sizes_unknown_p (object_size_type, varno))
842     return false;
843   if (offset >= offset_limit)
844     {
845       object_sizes_set (osi, varno, unknown (object_size_type));
846       return false;
847     }
848 
849   if (osi->pass == 0)
850     collect_object_sizes_for (osi, orig);
851 
852   orig_bytes = object_sizes_get (osi, SSA_NAME_VERSION (orig));
853   if (orig_bytes != unknown (object_size_type))
854     orig_bytes = (offset > orig_bytes)
855 		 ? HOST_WIDE_INT_0U : orig_bytes - offset;
856 
857   if (object_sizes_set (osi, varno, orig_bytes))
858     osi->changed = true;
859 
860   return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
861 }
862 
863 
864 /* Compute object_sizes for VAR, defined to the result of an assignment
865    with operator POINTER_PLUS_EXPR.  Return true if the object size might
866    need reexamination  later.  */
867 
868 static bool
plus_stmt_object_size(struct object_size_info * osi,tree var,gimple * stmt)869 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
870 {
871   int object_size_type = osi->object_size_type;
872   unsigned int varno = SSA_NAME_VERSION (var);
873   unsigned HOST_WIDE_INT bytes;
874   tree op0, op1;
875 
876   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
877     {
878       op0 = gimple_assign_rhs1 (stmt);
879       op1 = gimple_assign_rhs2 (stmt);
880     }
881   else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
882     {
883       tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
884       gcc_assert (TREE_CODE (rhs) == MEM_REF);
885       op0 = TREE_OPERAND (rhs, 0);
886       op1 = TREE_OPERAND (rhs, 1);
887     }
888   else
889     gcc_unreachable ();
890 
891   if (object_sizes_unknown_p (object_size_type, varno))
892     return false;
893 
894   /* Handle PTR + OFFSET here.  */
895   if (TREE_CODE (op1) == INTEGER_CST
896       && (TREE_CODE (op0) == SSA_NAME
897 	  || TREE_CODE (op0) == ADDR_EXPR))
898     {
899       if (! tree_fits_uhwi_p (op1))
900 	bytes = unknown (object_size_type);
901       else if (TREE_CODE (op0) == SSA_NAME)
902 	return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
903       else
904 	{
905 	  unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
906 
907           /* op0 will be ADDR_EXPR here.  */
908 	  addr_object_size (osi, op0, object_size_type, &bytes);
909 	  if (bytes == unknown (object_size_type))
910 	    ;
911 	  else if (off > offset_limit)
912 	    bytes = unknown (object_size_type);
913 	  else if (off > bytes)
914 	    bytes = 0;
915 	  else
916 	    bytes -= off;
917 	}
918     }
919   else
920     bytes = unknown (object_size_type);
921 
922   object_sizes_set (osi, varno, bytes);
923   return false;
924 }
925 
926 
927 /* Compute object_sizes for VAR, defined at STMT, which is
928    a COND_EXPR.  Return true if the object size might need reexamination
929    later.  */
930 
931 static bool
cond_expr_object_size(struct object_size_info * osi,tree var,gimple * stmt)932 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
933 {
934   tree then_, else_;
935   int object_size_type = osi->object_size_type;
936   unsigned int varno = SSA_NAME_VERSION (var);
937   bool reexamine = false;
938 
939   gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
940 
941   if (object_sizes_unknown_p (object_size_type, varno))
942     return false;
943 
944   then_ = gimple_assign_rhs2 (stmt);
945   else_ = gimple_assign_rhs3 (stmt);
946 
947   if (TREE_CODE (then_) == SSA_NAME)
948     reexamine |= merge_object_sizes (osi, var, then_, 0);
949   else
950     expr_object_size (osi, var, then_);
951 
952   if (object_sizes_unknown_p (object_size_type, varno))
953     return reexamine;
954 
955   if (TREE_CODE (else_) == SSA_NAME)
956     reexamine |= merge_object_sizes (osi, var, else_, 0);
957   else
958     expr_object_size (osi, var, else_);
959 
960   return reexamine;
961 }
962 
963 /* Compute object sizes for VAR.
964    For ADDR_EXPR an object size is the number of remaining bytes
965    to the end of the object (where what is considered an object depends on
966    OSI->object_size_type).
967    For allocation GIMPLE_CALL like malloc or calloc object size is the size
968    of the allocation.
969    For POINTER_PLUS_EXPR where second operand is a constant integer,
970    object size is object size of the first operand minus the constant.
971    If the constant is bigger than the number of remaining bytes until the
972    end of the object, object size is 0, but if it is instead a pointer
973    subtraction, object size is unknown (object_size_type).
974    To differentiate addition from subtraction, ADDR_EXPR returns
975    unknown (object_size_type) for all objects bigger than half of the address
976    space, and constants less than half of the address space are considered
977    addition, while bigger constants subtraction.
978    For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
979    object size is object size of that argument.
980    Otherwise, object size is the maximum of object sizes of variables
981    that it might be set to.  */
982 
983 static void
collect_object_sizes_for(struct object_size_info * osi,tree var)984 collect_object_sizes_for (struct object_size_info *osi, tree var)
985 {
986   int object_size_type = osi->object_size_type;
987   unsigned int varno = SSA_NAME_VERSION (var);
988   gimple *stmt;
989   bool reexamine;
990 
991   if (bitmap_bit_p (computed[object_size_type], varno))
992     return;
993 
994   if (osi->pass == 0)
995     {
996       if (bitmap_set_bit (osi->visited, varno))
997 	{
998 	  /* Initialize to 0 for maximum size and M1U for minimum size so that
999 	     it gets immediately overridden.  */
1000 	  object_sizes_set_force (osi, varno,
1001 				  unknown (object_size_type ^ OST_MINIMUM));
1002 	}
1003       else
1004 	{
1005 	  /* Found a dependency loop.  Mark the variable for later
1006 	     re-examination.  */
1007 	  bitmap_set_bit (osi->reexamine, varno);
1008 	  if (dump_file && (dump_flags & TDF_DETAILS))
1009 	    {
1010 	      fprintf (dump_file, "Found a dependency loop at ");
1011 	      print_generic_expr (dump_file, var, dump_flags);
1012 	      fprintf (dump_file, "\n");
1013 	    }
1014 	  return;
1015 	}
1016     }
1017 
1018   if (dump_file && (dump_flags & TDF_DETAILS))
1019     {
1020       fprintf (dump_file, "Visiting use-def links for ");
1021       print_generic_expr (dump_file, var, dump_flags);
1022       fprintf (dump_file, "\n");
1023     }
1024 
1025   stmt = SSA_NAME_DEF_STMT (var);
1026   reexamine = false;
1027 
1028   switch (gimple_code (stmt))
1029     {
1030     case GIMPLE_ASSIGN:
1031       {
1032 	tree rhs = gimple_assign_rhs1 (stmt);
1033         if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1034 	    || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1035 		&& TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1036           reexamine = plus_stmt_object_size (osi, var, stmt);
1037 	else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1038 	  reexamine = cond_expr_object_size (osi, var, stmt);
1039         else if (gimple_assign_single_p (stmt)
1040                  || gimple_assign_unary_nop_p (stmt))
1041           {
1042             if (TREE_CODE (rhs) == SSA_NAME
1043                 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1044               reexamine = merge_object_sizes (osi, var, rhs, 0);
1045             else
1046               expr_object_size (osi, var, rhs);
1047           }
1048         else
1049 	  unknown_object_size (osi, var);
1050         break;
1051       }
1052 
1053     case GIMPLE_CALL:
1054       {
1055 	gcall *call_stmt = as_a <gcall *> (stmt);
1056         tree arg = pass_through_call (call_stmt);
1057         if (arg)
1058           {
1059             if (TREE_CODE (arg) == SSA_NAME
1060                 && POINTER_TYPE_P (TREE_TYPE (arg)))
1061               reexamine = merge_object_sizes (osi, var, arg, 0);
1062             else
1063               expr_object_size (osi, var, arg);
1064           }
1065         else
1066           call_object_size (osi, var, call_stmt);
1067 	break;
1068       }
1069 
1070     case GIMPLE_ASM:
1071       /* Pointers defined by __asm__ statements can point anywhere.  */
1072       object_sizes_set (osi, varno, unknown (object_size_type));
1073       break;
1074 
1075     case GIMPLE_NOP:
1076       if (SSA_NAME_VAR (var)
1077 	  && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1078 	expr_object_size (osi, var, SSA_NAME_VAR (var));
1079       else
1080 	/* Uninitialized SSA names point nowhere.  */
1081 	object_sizes_set (osi, varno, unknown (object_size_type));
1082       break;
1083 
1084     case GIMPLE_PHI:
1085       {
1086 	unsigned i;
1087 
1088 	for (i = 0; i < gimple_phi_num_args (stmt); i++)
1089 	  {
1090 	    tree rhs = gimple_phi_arg (stmt, i)->def;
1091 
1092 	    if (object_sizes_unknown_p (object_size_type, varno))
1093 	      break;
1094 
1095 	    if (TREE_CODE (rhs) == SSA_NAME)
1096 	      reexamine |= merge_object_sizes (osi, var, rhs, 0);
1097 	    else if (osi->pass == 0)
1098 	      expr_object_size (osi, var, rhs);
1099 	  }
1100 	break;
1101       }
1102 
1103     default:
1104       gcc_unreachable ();
1105     }
1106 
1107   if (! reexamine || object_sizes_unknown_p (object_size_type, varno))
1108     {
1109       bitmap_set_bit (computed[object_size_type], varno);
1110       bitmap_clear_bit (osi->reexamine, varno);
1111     }
1112   else
1113     {
1114       bitmap_set_bit (osi->reexamine, varno);
1115       if (dump_file && (dump_flags & TDF_DETAILS))
1116 	{
1117 	  fprintf (dump_file, "Need to reexamine ");
1118 	  print_generic_expr (dump_file, var, dump_flags);
1119 	  fprintf (dump_file, "\n");
1120 	}
1121     }
1122 }
1123 
1124 
1125 /* Helper function for check_for_plus_in_loops.  Called recursively
1126    to detect loops.  */
1127 
1128 static void
check_for_plus_in_loops_1(struct object_size_info * osi,tree var,unsigned int depth)1129 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1130 			   unsigned int depth)
1131 {
1132   gimple *stmt = SSA_NAME_DEF_STMT (var);
1133   unsigned int varno = SSA_NAME_VERSION (var);
1134 
1135   if (osi->depths[varno])
1136     {
1137       if (osi->depths[varno] != depth)
1138 	{
1139 	  unsigned int *sp;
1140 
1141 	  /* Found a loop involving pointer addition.  */
1142 	  for (sp = osi->tos; sp > osi->stack; )
1143 	    {
1144 	      --sp;
1145 	      bitmap_clear_bit (osi->reexamine, *sp);
1146 	      bitmap_set_bit (computed[osi->object_size_type], *sp);
1147 	      object_sizes_set_force (osi, *sp, 0);
1148 	      if (*sp == varno)
1149 		break;
1150 	    }
1151 	}
1152       return;
1153     }
1154   else if (! bitmap_bit_p (osi->reexamine, varno))
1155     return;
1156 
1157   osi->depths[varno] = depth;
1158   *osi->tos++ = varno;
1159 
1160   switch (gimple_code (stmt))
1161     {
1162 
1163     case GIMPLE_ASSIGN:
1164       {
1165         if ((gimple_assign_single_p (stmt)
1166              || gimple_assign_unary_nop_p (stmt))
1167             && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1168           {
1169             tree rhs = gimple_assign_rhs1 (stmt);
1170 
1171             check_for_plus_in_loops_1 (osi, rhs, depth);
1172           }
1173         else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1174           {
1175             tree basevar = gimple_assign_rhs1 (stmt);
1176             tree cst = gimple_assign_rhs2 (stmt);
1177 
1178             gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1179 
1180             check_for_plus_in_loops_1 (osi, basevar,
1181                                        depth + !integer_zerop (cst));
1182           }
1183         else
1184           gcc_unreachable ();
1185         break;
1186       }
1187 
1188     case GIMPLE_CALL:
1189       {
1190 	gcall *call_stmt = as_a <gcall *> (stmt);
1191         tree arg = pass_through_call (call_stmt);
1192         if (arg)
1193           {
1194             if (TREE_CODE (arg) == SSA_NAME)
1195               check_for_plus_in_loops_1 (osi, arg, depth);
1196             else
1197               gcc_unreachable ();
1198           }
1199         break;
1200       }
1201 
1202     case GIMPLE_PHI:
1203       {
1204 	unsigned i;
1205 
1206 	for (i = 0; i < gimple_phi_num_args (stmt); i++)
1207 	  {
1208 	    tree rhs = gimple_phi_arg (stmt, i)->def;
1209 
1210 	    if (TREE_CODE (rhs) == SSA_NAME)
1211 	      check_for_plus_in_loops_1 (osi, rhs, depth);
1212 	  }
1213 	break;
1214       }
1215 
1216     default:
1217       gcc_unreachable ();
1218     }
1219 
1220   osi->depths[varno] = 0;
1221   osi->tos--;
1222 }
1223 
1224 
1225 /* Check if some pointer we are computing object size of is being increased
1226    within a loop.  If yes, assume all the SSA variables participating in
1227    that loop have minimum object sizes 0.  */
1228 
1229 static void
check_for_plus_in_loops(struct object_size_info * osi,tree var)1230 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1231 {
1232   gimple *stmt = SSA_NAME_DEF_STMT (var);
1233 
1234   /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1235      and looked for a POINTER_PLUS_EXPR in the pass-through
1236      argument, if any.  In GIMPLE, however, such an expression
1237      is not a valid call operand.  */
1238 
1239   if (is_gimple_assign (stmt)
1240       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1241     {
1242       tree basevar = gimple_assign_rhs1 (stmt);
1243       tree cst = gimple_assign_rhs2 (stmt);
1244 
1245       gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1246 
1247       if (integer_zerop (cst))
1248         return;
1249 
1250       osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1251       *osi->tos++ = SSA_NAME_VERSION (basevar);
1252       check_for_plus_in_loops_1 (osi, var, 2);
1253       osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1254       osi->tos--;
1255     }
1256 }
1257 
1258 
1259 /* Initialize data structures for the object size computation.  */
1260 
1261 void
init_object_sizes(void)1262 init_object_sizes (void)
1263 {
1264   int object_size_type;
1265 
1266   if (computed[0])
1267     return;
1268 
1269   for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
1270     {
1271       object_sizes_grow (object_size_type);
1272       computed[object_size_type] = BITMAP_ALLOC (NULL);
1273     }
1274 
1275   init_offset_limit ();
1276 }
1277 
1278 
1279 /* Destroy data structures after the object size computation.  */
1280 
1281 void
fini_object_sizes(void)1282 fini_object_sizes (void)
1283 {
1284   int object_size_type;
1285 
1286   for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
1287     {
1288       object_sizes_release (object_size_type);
1289       BITMAP_FREE (computed[object_size_type]);
1290     }
1291 }
1292 
1293 /* Dummy valueize function.  */
1294 
1295 static tree
do_valueize(tree t)1296 do_valueize (tree t)
1297 {
1298   return t;
1299 }
1300 
1301 static unsigned int
object_sizes_execute(function * fun,bool insert_min_max_p)1302 object_sizes_execute (function *fun, bool insert_min_max_p)
1303 {
1304   basic_block bb;
1305   FOR_EACH_BB_FN (bb, fun)
1306     {
1307       gimple_stmt_iterator i;
1308       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1309 	{
1310 	  tree result;
1311 	  gimple *call = gsi_stmt (i);
1312 	  if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1313 	    continue;
1314 
1315 	  tree lhs = gimple_call_lhs (call);
1316 	  if (!lhs)
1317 	    continue;
1318 
1319 	  init_object_sizes ();
1320 
1321 	  /* If insert_min_max_p, only attempt to fold
1322 	     __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1323 	     and rather than folding the builtin to the constant if any,
1324 	     create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1325 	     call result and the computed constant.  */
1326 	  if (insert_min_max_p)
1327 	    {
1328 	      tree ost = gimple_call_arg (call, 1);
1329 	      if (tree_fits_uhwi_p (ost))
1330 		{
1331 		  unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1332 		  tree ptr = gimple_call_arg (call, 0);
1333 		  if ((object_size_type & OST_SUBOBJECT)
1334 		      && (TREE_CODE (ptr) == ADDR_EXPR
1335 			  || TREE_CODE (ptr) == SSA_NAME))
1336 		    {
1337 		      tree type = TREE_TYPE (lhs);
1338 		      unsigned HOST_WIDE_INT bytes;
1339 		      if (compute_builtin_object_size (ptr, object_size_type,
1340 						       &bytes)
1341 			  && wi::fits_to_tree_p (bytes, type))
1342 			{
1343 			  tree tem = make_ssa_name (type);
1344 			  gimple_call_set_lhs (call, tem);
1345 			  enum tree_code code
1346 			    = (object_size_type & OST_MINIMUM
1347 			       ? MAX_EXPR : MIN_EXPR);
1348 			  tree cst = build_int_cstu (type, bytes);
1349 			  gimple *g
1350 			    = gimple_build_assign (lhs, code, tem, cst);
1351 			  gsi_insert_after (&i, g, GSI_NEW_STMT);
1352 			  update_stmt (call);
1353 			}
1354 		    }
1355 		}
1356 	      continue;
1357 	    }
1358 
1359 	  result = gimple_fold_stmt_to_constant (call, do_valueize);
1360 	  if (!result)
1361 	    {
1362 	      tree ost = gimple_call_arg (call, 1);
1363 
1364 	      if (tree_fits_uhwi_p (ost))
1365 		{
1366 		  unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1367 
1368 		  if (object_size_type & OST_MINIMUM)
1369 		    result = build_zero_cst (size_type_node);
1370 		  else if (object_size_type < OST_END)
1371 		    result = fold_convert (size_type_node,
1372 					   integer_minus_one_node);
1373 		}
1374 
1375 	      if (!result)
1376 		continue;
1377 	    }
1378 
1379 	  gcc_assert (TREE_CODE (result) == INTEGER_CST);
1380 
1381 	  if (dump_file && (dump_flags & TDF_DETAILS))
1382 	    {
1383 	      fprintf (dump_file, "Simplified\n  ");
1384 	      print_gimple_stmt (dump_file, call, 0, dump_flags);
1385 	      fprintf (dump_file, " to ");
1386 	      print_generic_expr (dump_file, result);
1387 	      fprintf (dump_file, "\n");
1388 	    }
1389 
1390 	  /* Propagate into all uses and fold those stmts.  */
1391 	  if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1392 	    replace_uses_by (lhs, result);
1393 	  else
1394 	    replace_call_with_value (&i, result);
1395 	}
1396     }
1397 
1398   fini_object_sizes ();
1399   return 0;
1400 }
1401 
1402 /* Simple pass to optimize all __builtin_object_size () builtins.  */
1403 
1404 namespace {
1405 
1406 const pass_data pass_data_object_sizes =
1407 {
1408   GIMPLE_PASS, /* type */
1409   "objsz", /* name */
1410   OPTGROUP_NONE, /* optinfo_flags */
1411   TV_NONE, /* tv_id */
1412   ( PROP_cfg | PROP_ssa ), /* properties_required */
1413   PROP_objsz, /* properties_provided */
1414   0, /* properties_destroyed */
1415   0, /* todo_flags_start */
1416   0, /* todo_flags_finish */
1417 };
1418 
1419 class pass_object_sizes : public gimple_opt_pass
1420 {
1421 public:
pass_object_sizes(gcc::context * ctxt)1422   pass_object_sizes (gcc::context *ctxt)
1423     : gimple_opt_pass (pass_data_object_sizes, ctxt)
1424   {}
1425 
1426   /* opt_pass methods: */
clone()1427   opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
execute(function * fun)1428   virtual unsigned int execute (function *fun)
1429   {
1430     return object_sizes_execute (fun, false);
1431   }
1432 }; // class pass_object_sizes
1433 
1434 } // anon namespace
1435 
1436 gimple_opt_pass *
make_pass_object_sizes(gcc::context * ctxt)1437 make_pass_object_sizes (gcc::context *ctxt)
1438 {
1439   return new pass_object_sizes (ctxt);
1440 }
1441 
1442 /* Early version of pass to optimize all __builtin_object_size () builtins.  */
1443 
1444 namespace {
1445 
1446 const pass_data pass_data_early_object_sizes =
1447 {
1448   GIMPLE_PASS, /* type */
1449   "early_objsz", /* name */
1450   OPTGROUP_NONE, /* optinfo_flags */
1451   TV_NONE, /* tv_id */
1452   ( PROP_cfg | PROP_ssa ), /* properties_required */
1453   0, /* properties_provided */
1454   0, /* properties_destroyed */
1455   0, /* todo_flags_start */
1456   0, /* todo_flags_finish */
1457 };
1458 
1459 class pass_early_object_sizes : public gimple_opt_pass
1460 {
1461 public:
pass_early_object_sizes(gcc::context * ctxt)1462   pass_early_object_sizes (gcc::context *ctxt)
1463     : gimple_opt_pass (pass_data_early_object_sizes, ctxt)
1464   {}
1465 
1466   /* opt_pass methods: */
execute(function * fun)1467   virtual unsigned int execute (function *fun)
1468   {
1469     return object_sizes_execute (fun, true);
1470   }
1471 }; // class pass_object_sizes
1472 
1473 } // anon namespace
1474 
1475 gimple_opt_pass *
make_pass_early_object_sizes(gcc::context * ctxt)1476 make_pass_early_object_sizes (gcc::context *ctxt)
1477 {
1478   return new pass_early_object_sizes (ctxt);
1479 }
1480