1 /* __builtin_object_size (ptr, object_size_type) computation
2    Copyright (C) 2004-2014 Free Software Foundation, Inc.
3    Contributed by Jakub Jelinek <jakub@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tree-object-size.h"
27 #include "diagnostic-core.h"
28 #include "gimple-pretty-print.h"
29 #include "bitmap.h"
30 #include "basic-block.h"
31 #include "tree-ssa-alias.h"
32 #include "internal-fn.h"
33 #include "gimple-fold.h"
34 #include "gimple-expr.h"
35 #include "is-a.h"
36 #include "gimple.h"
37 #include "gimple-iterator.h"
38 #include "gimple-ssa.h"
39 #include "stringpool.h"
40 #include "tree-ssanames.h"
41 #include "tree-pass.h"
42 #include "tree-ssa-propagate.h"
43 #include "tree-phinodes.h"
44 #include "ssa-iterators.h"
45 
46 struct object_size_info
47 {
48   int object_size_type;
49   bitmap visited, reexamine;
50   int pass;
51   bool changed;
52   unsigned int *depths;
53   unsigned int *stack, *tos;
54 };
55 
56 static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
57 
58 static tree compute_object_offset (const_tree, const_tree);
59 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
60 						const_tree, int);
61 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
62 static tree pass_through_call (const_gimple);
63 static void collect_object_sizes_for (struct object_size_info *, tree);
64 static void expr_object_size (struct object_size_info *, tree, tree);
65 static bool merge_object_sizes (struct object_size_info *, tree, tree,
66 				unsigned HOST_WIDE_INT);
67 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
68 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
69 static unsigned int compute_object_sizes (void);
70 static void init_offset_limit (void);
71 static void check_for_plus_in_loops (struct object_size_info *, tree);
72 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
73 				       unsigned int);
74 
75 /* object_sizes[0] is upper bound for number of bytes till the end of
76    the object.
77    object_sizes[1] is upper bound for number of bytes till the end of
78    the subobject (innermost array or field with address taken).
79    object_sizes[2] is lower bound for number of bytes till the end of
80    the object and object_sizes[3] lower bound for subobject.  */
81 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
82 
83 /* Bitmaps what object sizes have been computed already.  */
84 static bitmap computed[4];
85 
86 /* Maximum value of offset we consider to be addition.  */
87 static unsigned HOST_WIDE_INT offset_limit;
88 
89 
90 /* Initialize OFFSET_LIMIT variable.  */
91 static void
init_offset_limit(void)92 init_offset_limit (void)
93 {
94   if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
95     offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
96   else
97     offset_limit = -1;
98   offset_limit /= 2;
99 }
100 
101 
102 /* Compute offset of EXPR within VAR.  Return error_mark_node
103    if unknown.  */
104 
105 static tree
compute_object_offset(const_tree expr,const_tree var)106 compute_object_offset (const_tree expr, const_tree var)
107 {
108   enum tree_code code = PLUS_EXPR;
109   tree base, off, t;
110 
111   if (expr == var)
112     return size_zero_node;
113 
114   switch (TREE_CODE (expr))
115     {
116     case COMPONENT_REF:
117       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
118       if (base == error_mark_node)
119 	return base;
120 
121       t = TREE_OPERAND (expr, 1);
122       off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
123 			size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
124 				  / BITS_PER_UNIT));
125       break;
126 
127     case REALPART_EXPR:
128     CASE_CONVERT:
129     case VIEW_CONVERT_EXPR:
130     case NON_LVALUE_EXPR:
131       return compute_object_offset (TREE_OPERAND (expr, 0), var);
132 
133     case IMAGPART_EXPR:
134       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
135       if (base == error_mark_node)
136 	return base;
137 
138       off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
139       break;
140 
141     case ARRAY_REF:
142       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
143       if (base == error_mark_node)
144 	return base;
145 
146       t = TREE_OPERAND (expr, 1);
147       if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
148 	{
149 	  code = MINUS_EXPR;
150 	  t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
151 	}
152       t = fold_convert (sizetype, t);
153       off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
154       break;
155 
156     case MEM_REF:
157       gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
158       return double_int_to_tree (sizetype, mem_ref_offset (expr));
159 
160     default:
161       return error_mark_node;
162     }
163 
164   return size_binop (code, base, off);
165 }
166 
167 
168 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
169    OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
170    If unknown, return unknown[object_size_type].  */
171 
172 static unsigned HOST_WIDE_INT
addr_object_size(struct object_size_info * osi,const_tree ptr,int object_size_type)173 addr_object_size (struct object_size_info *osi, const_tree ptr,
174 		  int object_size_type)
175 {
176   tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
177 
178   gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
179 
180   pt_var = TREE_OPERAND (ptr, 0);
181   while (handled_component_p (pt_var))
182     pt_var = TREE_OPERAND (pt_var, 0);
183 
184   if (pt_var
185       && TREE_CODE (pt_var) == MEM_REF)
186     {
187       unsigned HOST_WIDE_INT sz;
188 
189       if (!osi || (object_size_type & 1) != 0
190 	  || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
191 	{
192 	  sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
193 					    object_size_type & ~1);
194 	}
195       else
196 	{
197 	  tree var = TREE_OPERAND (pt_var, 0);
198 	  if (osi->pass == 0)
199 	    collect_object_sizes_for (osi, var);
200 	  if (bitmap_bit_p (computed[object_size_type],
201 			    SSA_NAME_VERSION (var)))
202 	    sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
203 	  else
204 	    sz = unknown[object_size_type];
205 	}
206       if (sz != unknown[object_size_type])
207 	{
208 	  double_int dsz = double_int::from_uhwi (sz) - mem_ref_offset (pt_var);
209 	  if (dsz.is_negative ())
210 	    sz = 0;
211 	  else if (dsz.fits_uhwi ())
212 	    sz = dsz.to_uhwi ();
213 	  else
214 	    sz = unknown[object_size_type];
215 	}
216 
217       if (sz != unknown[object_size_type] && sz < offset_limit)
218 	pt_var_size = size_int (sz);
219     }
220   else if (pt_var
221 	   && DECL_P (pt_var)
222 	   && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
223 	   && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
224     pt_var_size = DECL_SIZE_UNIT (pt_var);
225   else if (pt_var
226 	   && TREE_CODE (pt_var) == STRING_CST
227 	   && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
228 	   && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
229 	   && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
230 	      < offset_limit)
231     pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
232   else
233     return unknown[object_size_type];
234 
235   if (pt_var != TREE_OPERAND (ptr, 0))
236     {
237       tree var;
238 
239       if (object_size_type & 1)
240 	{
241 	  var = TREE_OPERAND (ptr, 0);
242 
243 	  while (var != pt_var
244 		 && TREE_CODE (var) != BIT_FIELD_REF
245 		 && TREE_CODE (var) != COMPONENT_REF
246 		 && TREE_CODE (var) != ARRAY_REF
247 		 && TREE_CODE (var) != ARRAY_RANGE_REF
248 		 && TREE_CODE (var) != REALPART_EXPR
249 		 && TREE_CODE (var) != IMAGPART_EXPR)
250 	    var = TREE_OPERAND (var, 0);
251 	  if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
252 	    var = TREE_OPERAND (var, 0);
253 	  if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
254 	      || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
255 	      || (pt_var_size
256 		  && tree_int_cst_lt (pt_var_size,
257 				      TYPE_SIZE_UNIT (TREE_TYPE (var)))))
258 	    var = pt_var;
259 	  else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
260 	    {
261 	      tree v = var;
262 	      /* For &X->fld, compute object size only if fld isn't the last
263 		 field, as struct { int i; char c[1]; } is often used instead
264 		 of flexible array member.  */
265 	      while (v && v != pt_var)
266 		switch (TREE_CODE (v))
267 		  {
268 		  case ARRAY_REF:
269 		    if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
270 			&& TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
271 		      {
272 			tree domain
273 			  = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
274 			if (domain
275 			    && TYPE_MAX_VALUE (domain)
276 			    && TREE_CODE (TYPE_MAX_VALUE (domain))
277 			       == INTEGER_CST
278 			    && tree_int_cst_lt (TREE_OPERAND (v, 1),
279 						TYPE_MAX_VALUE (domain)))
280 			  {
281 			    v = NULL_TREE;
282 			    break;
283 			  }
284 		      }
285 		    v = TREE_OPERAND (v, 0);
286 		    break;
287 		  case REALPART_EXPR:
288 		  case IMAGPART_EXPR:
289 		    v = NULL_TREE;
290 		    break;
291 		  case COMPONENT_REF:
292 		    if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
293 		      {
294 			v = NULL_TREE;
295 			break;
296 		      }
297 		    while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
298 		      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
299 			  != UNION_TYPE
300 			  && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
301 			  != QUAL_UNION_TYPE)
302 			break;
303 		      else
304 			v = TREE_OPERAND (v, 0);
305 		    if (TREE_CODE (v) == COMPONENT_REF
306 			&& TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
307 			   == RECORD_TYPE)
308 		      {
309 			tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
310 			for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
311 			  if (TREE_CODE (fld_chain) == FIELD_DECL)
312 			    break;
313 
314 			if (fld_chain)
315 			  {
316 			    v = NULL_TREE;
317 			    break;
318 			  }
319 			v = TREE_OPERAND (v, 0);
320 		      }
321 		    while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
322 		      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
323 			  != UNION_TYPE
324 			  && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
325 			  != QUAL_UNION_TYPE)
326 			break;
327 		      else
328 			v = TREE_OPERAND (v, 0);
329 		    if (v != pt_var)
330 		      v = NULL_TREE;
331 		    else
332 		      v = pt_var;
333 		    break;
334 		  default:
335 		    v = pt_var;
336 		    break;
337 		  }
338 	      if (v == pt_var)
339 		var = pt_var;
340 	    }
341 	}
342       else
343 	var = pt_var;
344 
345       if (var != pt_var)
346 	var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
347       else if (!pt_var_size)
348 	return unknown[object_size_type];
349       else
350 	var_size = pt_var_size;
351       bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
352       if (bytes != error_mark_node)
353 	{
354 	  if (TREE_CODE (bytes) == INTEGER_CST
355 	      && tree_int_cst_lt (var_size, bytes))
356 	    bytes = size_zero_node;
357 	  else
358 	    bytes = size_binop (MINUS_EXPR, var_size, bytes);
359 	}
360       if (var != pt_var
361 	  && pt_var_size
362 	  && TREE_CODE (pt_var) == MEM_REF
363 	  && bytes != error_mark_node)
364 	{
365 	  tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
366 	  if (bytes2 != error_mark_node)
367 	    {
368 	      if (TREE_CODE (bytes2) == INTEGER_CST
369 		  && tree_int_cst_lt (pt_var_size, bytes2))
370 		bytes2 = size_zero_node;
371 	      else
372 		bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
373 	      bytes = size_binop (MIN_EXPR, bytes, bytes2);
374 	    }
375 	}
376     }
377   else if (!pt_var_size)
378     return unknown[object_size_type];
379   else
380     bytes = pt_var_size;
381 
382   if (tree_fits_uhwi_p (bytes))
383     return tree_to_uhwi (bytes);
384 
385   return unknown[object_size_type];
386 }
387 
388 
389 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
390    Handles various allocation calls.  OBJECT_SIZE_TYPE is the second
391    argument from __builtin_object_size.  If unknown, return
392    unknown[object_size_type].  */
393 
394 static unsigned HOST_WIDE_INT
alloc_object_size(const_gimple call,int object_size_type)395 alloc_object_size (const_gimple call, int object_size_type)
396 {
397   tree callee, bytes = NULL_TREE;
398   tree alloc_size;
399   int arg1 = -1, arg2 = -1;
400 
401   gcc_assert (is_gimple_call (call));
402 
403   callee = gimple_call_fndecl (call);
404   if (!callee)
405     return unknown[object_size_type];
406 
407   alloc_size = lookup_attribute ("alloc_size",
408 				 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
409   if (alloc_size && TREE_VALUE (alloc_size))
410     {
411       tree p = TREE_VALUE (alloc_size);
412 
413       arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
414       if (TREE_CHAIN (p))
415         arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
416     }
417 
418   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
419     switch (DECL_FUNCTION_CODE (callee))
420       {
421       case BUILT_IN_CALLOC:
422 	arg2 = 1;
423 	/* fall through */
424       case BUILT_IN_MALLOC:
425       case BUILT_IN_ALLOCA:
426       case BUILT_IN_ALLOCA_WITH_ALIGN:
427 	arg1 = 0;
428       default:
429 	break;
430       }
431 
432   if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
433       || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
434       || (arg2 >= 0
435 	  && (arg2 >= (int)gimple_call_num_args (call)
436 	      || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
437     return unknown[object_size_type];
438 
439   if (arg2 >= 0)
440     bytes = size_binop (MULT_EXPR,
441 	fold_convert (sizetype, gimple_call_arg (call, arg1)),
442 	fold_convert (sizetype, gimple_call_arg (call, arg2)));
443   else if (arg1 >= 0)
444     bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
445 
446   if (bytes && tree_fits_uhwi_p (bytes))
447     return tree_to_uhwi (bytes);
448 
449   return unknown[object_size_type];
450 }
451 
452 
453 /* If object size is propagated from one of function's arguments directly
454    to its return value, return that argument for GIMPLE_CALL statement CALL.
455    Otherwise return NULL.  */
456 
457 static tree
pass_through_call(const_gimple call)458 pass_through_call (const_gimple call)
459 {
460   tree callee = gimple_call_fndecl (call);
461 
462   if (callee
463       && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
464     switch (DECL_FUNCTION_CODE (callee))
465       {
466       case BUILT_IN_MEMCPY:
467       case BUILT_IN_MEMMOVE:
468       case BUILT_IN_MEMSET:
469       case BUILT_IN_STRCPY:
470       case BUILT_IN_STRNCPY:
471       case BUILT_IN_STRCAT:
472       case BUILT_IN_STRNCAT:
473       case BUILT_IN_MEMCPY_CHK:
474       case BUILT_IN_MEMMOVE_CHK:
475       case BUILT_IN_MEMSET_CHK:
476       case BUILT_IN_STRCPY_CHK:
477       case BUILT_IN_STRNCPY_CHK:
478       case BUILT_IN_STPNCPY_CHK:
479       case BUILT_IN_STRCAT_CHK:
480       case BUILT_IN_STRNCAT_CHK:
481       case BUILT_IN_ASSUME_ALIGNED:
482 	if (gimple_call_num_args (call) >= 1)
483 	  return gimple_call_arg (call, 0);
484 	break;
485       default:
486 	break;
487       }
488 
489   return NULL_TREE;
490 }
491 
492 
493 /* Compute __builtin_object_size value for PTR.  OBJECT_SIZE_TYPE is the
494    second argument from __builtin_object_size.  */
495 
496 unsigned HOST_WIDE_INT
compute_builtin_object_size(tree ptr,int object_size_type)497 compute_builtin_object_size (tree ptr, int object_size_type)
498 {
499   gcc_assert (object_size_type >= 0 && object_size_type <= 3);
500 
501   if (! offset_limit)
502     init_offset_limit ();
503 
504   if (TREE_CODE (ptr) == ADDR_EXPR)
505     return addr_object_size (NULL, ptr, object_size_type);
506 
507   if (TREE_CODE (ptr) == SSA_NAME
508       && POINTER_TYPE_P (TREE_TYPE (ptr))
509       && computed[object_size_type] != NULL)
510     {
511       if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
512 	{
513 	  struct object_size_info osi;
514 	  bitmap_iterator bi;
515 	  unsigned int i;
516 
517 	  if (num_ssa_names > object_sizes[object_size_type].length ())
518 	    object_sizes[object_size_type].safe_grow (num_ssa_names);
519 	  if (dump_file)
520 	    {
521 	      fprintf (dump_file, "Computing %s %sobject size for ",
522 		       (object_size_type & 2) ? "minimum" : "maximum",
523 		       (object_size_type & 1) ? "sub" : "");
524 	      print_generic_expr (dump_file, ptr, dump_flags);
525 	      fprintf (dump_file, ":\n");
526 	    }
527 
528 	  osi.visited = BITMAP_ALLOC (NULL);
529 	  osi.reexamine = BITMAP_ALLOC (NULL);
530 	  osi.object_size_type = object_size_type;
531 	  osi.depths = NULL;
532 	  osi.stack = NULL;
533 	  osi.tos = NULL;
534 
535 	  /* First pass: walk UD chains, compute object sizes that
536 	     can be computed.  osi.reexamine bitmap at the end will
537 	     contain what variables were found in dependency cycles
538 	     and therefore need to be reexamined.  */
539 	  osi.pass = 0;
540 	  osi.changed = false;
541 	  collect_object_sizes_for (&osi, ptr);
542 
543 	  /* Second pass: keep recomputing object sizes of variables
544 	     that need reexamination, until no object sizes are
545 	     increased or all object sizes are computed.  */
546 	  if (! bitmap_empty_p (osi.reexamine))
547 	    {
548 	      bitmap reexamine = BITMAP_ALLOC (NULL);
549 
550 	      /* If looking for minimum instead of maximum object size,
551 		 detect cases where a pointer is increased in a loop.
552 		 Although even without this detection pass 2 would eventually
553 		 terminate, it could take a long time.  If a pointer is
554 		 increasing this way, we need to assume 0 object size.
555 		 E.g. p = &buf[0]; while (cond) p = p + 4;  */
556 	      if (object_size_type & 2)
557 		{
558 		  osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
559 		  osi.stack = XNEWVEC (unsigned int, num_ssa_names);
560 		  osi.tos = osi.stack;
561 		  osi.pass = 1;
562 		  /* collect_object_sizes_for is changing
563 		     osi.reexamine bitmap, so iterate over a copy.  */
564 		  bitmap_copy (reexamine, osi.reexamine);
565 		  EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
566 		    if (bitmap_bit_p (osi.reexamine, i))
567 		      check_for_plus_in_loops (&osi, ssa_name (i));
568 
569 		  free (osi.depths);
570 		  osi.depths = NULL;
571 		  free (osi.stack);
572 		  osi.stack = NULL;
573 		  osi.tos = NULL;
574 		}
575 
576 	      do
577 		{
578 		  osi.pass = 2;
579 		  osi.changed = false;
580 		  /* collect_object_sizes_for is changing
581 		     osi.reexamine bitmap, so iterate over a copy.  */
582 		  bitmap_copy (reexamine, osi.reexamine);
583 		  EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
584 		    if (bitmap_bit_p (osi.reexamine, i))
585 		      {
586 			collect_object_sizes_for (&osi, ssa_name (i));
587 			if (dump_file && (dump_flags & TDF_DETAILS))
588 			  {
589 			    fprintf (dump_file, "Reexamining ");
590 			    print_generic_expr (dump_file, ssa_name (i),
591 						dump_flags);
592 			    fprintf (dump_file, "\n");
593 			  }
594 		      }
595 		}
596 	      while (osi.changed);
597 
598 	      BITMAP_FREE (reexamine);
599 	    }
600 	  EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
601 	    bitmap_set_bit (computed[object_size_type], i);
602 
603 	  /* Debugging dumps.  */
604 	  if (dump_file)
605 	    {
606 	      EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
607 		if (object_sizes[object_size_type][i]
608 		    != unknown[object_size_type])
609 		  {
610 		    print_generic_expr (dump_file, ssa_name (i),
611 					dump_flags);
612 		    fprintf (dump_file,
613 			     ": %s %sobject size "
614 			     HOST_WIDE_INT_PRINT_UNSIGNED "\n",
615 			     (object_size_type & 2) ? "minimum" : "maximum",
616 			     (object_size_type & 1) ? "sub" : "",
617 			     object_sizes[object_size_type][i]);
618 		  }
619 	    }
620 
621 	  BITMAP_FREE (osi.reexamine);
622 	  BITMAP_FREE (osi.visited);
623 	}
624 
625       return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
626     }
627 
628   return unknown[object_size_type];
629 }
630 
631 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME.  */
632 
633 static void
expr_object_size(struct object_size_info * osi,tree ptr,tree value)634 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
635 {
636   int object_size_type = osi->object_size_type;
637   unsigned int varno = SSA_NAME_VERSION (ptr);
638   unsigned HOST_WIDE_INT bytes;
639 
640   gcc_assert (object_sizes[object_size_type][varno]
641 	      != unknown[object_size_type]);
642   gcc_assert (osi->pass == 0);
643 
644   if (TREE_CODE (value) == WITH_SIZE_EXPR)
645     value = TREE_OPERAND (value, 0);
646 
647   /* Pointer variables should have been handled by merge_object_sizes.  */
648   gcc_assert (TREE_CODE (value) != SSA_NAME
649 	      || !POINTER_TYPE_P (TREE_TYPE (value)));
650 
651   if (TREE_CODE (value) == ADDR_EXPR)
652     bytes = addr_object_size (osi, value, object_size_type);
653   else
654     bytes = unknown[object_size_type];
655 
656   if ((object_size_type & 2) == 0)
657     {
658       if (object_sizes[object_size_type][varno] < bytes)
659 	object_sizes[object_size_type][varno] = bytes;
660     }
661   else
662     {
663       if (object_sizes[object_size_type][varno] > bytes)
664 	object_sizes[object_size_type][varno] = bytes;
665     }
666 }
667 
668 
669 /* Compute object_sizes for PTR, defined to the result of a call.  */
670 
671 static void
call_object_size(struct object_size_info * osi,tree ptr,gimple call)672 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
673 {
674   int object_size_type = osi->object_size_type;
675   unsigned int varno = SSA_NAME_VERSION (ptr);
676   unsigned HOST_WIDE_INT bytes;
677 
678   gcc_assert (is_gimple_call (call));
679 
680   gcc_assert (object_sizes[object_size_type][varno]
681 	      != unknown[object_size_type]);
682   gcc_assert (osi->pass == 0);
683 
684   bytes = alloc_object_size (call, object_size_type);
685 
686   if ((object_size_type & 2) == 0)
687     {
688       if (object_sizes[object_size_type][varno] < bytes)
689 	object_sizes[object_size_type][varno] = bytes;
690     }
691   else
692     {
693       if (object_sizes[object_size_type][varno] > bytes)
694 	object_sizes[object_size_type][varno] = bytes;
695     }
696 }
697 
698 
699 /* Compute object_sizes for PTR, defined to an unknown value.  */
700 
701 static void
unknown_object_size(struct object_size_info * osi,tree ptr)702 unknown_object_size (struct object_size_info *osi, tree ptr)
703 {
704   int object_size_type = osi->object_size_type;
705   unsigned int varno = SSA_NAME_VERSION (ptr);
706   unsigned HOST_WIDE_INT bytes;
707 
708   gcc_assert (object_sizes[object_size_type][varno]
709 	      != unknown[object_size_type]);
710   gcc_assert (osi->pass == 0);
711 
712   bytes = unknown[object_size_type];
713 
714   if ((object_size_type & 2) == 0)
715     {
716       if (object_sizes[object_size_type][varno] < bytes)
717 	object_sizes[object_size_type][varno] = bytes;
718     }
719   else
720     {
721       if (object_sizes[object_size_type][varno] > bytes)
722 	object_sizes[object_size_type][varno] = bytes;
723     }
724 }
725 
726 
727 /* Merge object sizes of ORIG + OFFSET into DEST.  Return true if
728    the object size might need reexamination later.  */
729 
730 static bool
merge_object_sizes(struct object_size_info * osi,tree dest,tree orig,unsigned HOST_WIDE_INT offset)731 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
732 		    unsigned HOST_WIDE_INT offset)
733 {
734   int object_size_type = osi->object_size_type;
735   unsigned int varno = SSA_NAME_VERSION (dest);
736   unsigned HOST_WIDE_INT orig_bytes;
737 
738   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
739     return false;
740   if (offset >= offset_limit)
741     {
742       object_sizes[object_size_type][varno] = unknown[object_size_type];
743       return false;
744     }
745 
746   if (osi->pass == 0)
747     collect_object_sizes_for (osi, orig);
748 
749   orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
750   if (orig_bytes != unknown[object_size_type])
751     orig_bytes = (offset > orig_bytes)
752 		 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
753 
754   if ((object_size_type & 2) == 0)
755     {
756       if (object_sizes[object_size_type][varno] < orig_bytes)
757 	{
758 	  object_sizes[object_size_type][varno] = orig_bytes;
759 	  osi->changed = true;
760 	}
761     }
762   else
763     {
764       if (object_sizes[object_size_type][varno] > orig_bytes)
765 	{
766 	  object_sizes[object_size_type][varno] = orig_bytes;
767 	  osi->changed = true;
768 	}
769     }
770   return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
771 }
772 
773 
774 /* Compute object_sizes for VAR, defined to the result of an assignment
775    with operator POINTER_PLUS_EXPR.  Return true if the object size might
776    need reexamination  later.  */
777 
778 static bool
plus_stmt_object_size(struct object_size_info * osi,tree var,gimple stmt)779 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
780 {
781   int object_size_type = osi->object_size_type;
782   unsigned int varno = SSA_NAME_VERSION (var);
783   unsigned HOST_WIDE_INT bytes;
784   tree op0, op1;
785 
786   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
787     {
788       op0 = gimple_assign_rhs1 (stmt);
789       op1 = gimple_assign_rhs2 (stmt);
790     }
791   else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
792     {
793       tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
794       gcc_assert (TREE_CODE (rhs) == MEM_REF);
795       op0 = TREE_OPERAND (rhs, 0);
796       op1 = TREE_OPERAND (rhs, 1);
797     }
798   else
799     gcc_unreachable ();
800 
801   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
802     return false;
803 
804   /* Handle PTR + OFFSET here.  */
805   if (TREE_CODE (op1) == INTEGER_CST
806       && (TREE_CODE (op0) == SSA_NAME
807 	  || TREE_CODE (op0) == ADDR_EXPR))
808     {
809       if (! tree_fits_uhwi_p (op1))
810 	bytes = unknown[object_size_type];
811       else if (TREE_CODE (op0) == SSA_NAME)
812 	return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
813       else
814 	{
815 	  unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
816 
817           /* op0 will be ADDR_EXPR here.  */
818 	  bytes = addr_object_size (osi, op0, object_size_type);
819 	  if (bytes == unknown[object_size_type])
820 	    ;
821 	  else if (off > offset_limit)
822 	    bytes = unknown[object_size_type];
823 	  else if (off > bytes)
824 	    bytes = 0;
825 	  else
826 	    bytes -= off;
827 	}
828     }
829   else
830     bytes = unknown[object_size_type];
831 
832   if ((object_size_type & 2) == 0)
833     {
834       if (object_sizes[object_size_type][varno] < bytes)
835 	object_sizes[object_size_type][varno] = bytes;
836     }
837   else
838     {
839       if (object_sizes[object_size_type][varno] > bytes)
840 	object_sizes[object_size_type][varno] = bytes;
841     }
842   return false;
843 }
844 
845 
846 /* Compute object_sizes for VAR, defined at STMT, which is
847    a COND_EXPR.  Return true if the object size might need reexamination
848    later.  */
849 
850 static bool
cond_expr_object_size(struct object_size_info * osi,tree var,gimple stmt)851 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
852 {
853   tree then_, else_;
854   int object_size_type = osi->object_size_type;
855   unsigned int varno = SSA_NAME_VERSION (var);
856   bool reexamine = false;
857 
858   gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
859 
860   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
861     return false;
862 
863   then_ = gimple_assign_rhs2 (stmt);
864   else_ = gimple_assign_rhs3 (stmt);
865 
866   if (TREE_CODE (then_) == SSA_NAME)
867     reexamine |= merge_object_sizes (osi, var, then_, 0);
868   else
869     expr_object_size (osi, var, then_);
870 
871   if (TREE_CODE (else_) == SSA_NAME)
872     reexamine |= merge_object_sizes (osi, var, else_, 0);
873   else
874     expr_object_size (osi, var, else_);
875 
876   return reexamine;
877 }
878 
879 /* Compute object sizes for VAR.
880    For ADDR_EXPR an object size is the number of remaining bytes
881    to the end of the object (where what is considered an object depends on
882    OSI->object_size_type).
883    For allocation GIMPLE_CALL like malloc or calloc object size is the size
884    of the allocation.
885    For POINTER_PLUS_EXPR where second operand is a constant integer,
886    object size is object size of the first operand minus the constant.
887    If the constant is bigger than the number of remaining bytes until the
888    end of the object, object size is 0, but if it is instead a pointer
889    subtraction, object size is unknown[object_size_type].
890    To differentiate addition from subtraction, ADDR_EXPR returns
891    unknown[object_size_type] for all objects bigger than half of the address
892    space, and constants less than half of the address space are considered
893    addition, while bigger constants subtraction.
894    For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
895    object size is object size of that argument.
896    Otherwise, object size is the maximum of object sizes of variables
897    that it might be set to.  */
898 
899 static void
collect_object_sizes_for(struct object_size_info * osi,tree var)900 collect_object_sizes_for (struct object_size_info *osi, tree var)
901 {
902   int object_size_type = osi->object_size_type;
903   unsigned int varno = SSA_NAME_VERSION (var);
904   gimple stmt;
905   bool reexamine;
906 
907   if (bitmap_bit_p (computed[object_size_type], varno))
908     return;
909 
910   if (osi->pass == 0)
911     {
912       if (bitmap_set_bit (osi->visited, varno))
913 	{
914 	  object_sizes[object_size_type][varno]
915 	    = (object_size_type & 2) ? -1 : 0;
916 	}
917       else
918 	{
919 	  /* Found a dependency loop.  Mark the variable for later
920 	     re-examination.  */
921 	  bitmap_set_bit (osi->reexamine, varno);
922 	  if (dump_file && (dump_flags & TDF_DETAILS))
923 	    {
924 	      fprintf (dump_file, "Found a dependency loop at ");
925 	      print_generic_expr (dump_file, var, dump_flags);
926 	      fprintf (dump_file, "\n");
927 	    }
928 	  return;
929 	}
930     }
931 
932   if (dump_file && (dump_flags & TDF_DETAILS))
933     {
934       fprintf (dump_file, "Visiting use-def links for ");
935       print_generic_expr (dump_file, var, dump_flags);
936       fprintf (dump_file, "\n");
937     }
938 
939   stmt = SSA_NAME_DEF_STMT (var);
940   reexamine = false;
941 
942   switch (gimple_code (stmt))
943     {
944     case GIMPLE_ASSIGN:
945       {
946 	tree rhs = gimple_assign_rhs1 (stmt);
947         if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
948 	    || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
949 		&& TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
950           reexamine = plus_stmt_object_size (osi, var, stmt);
951 	else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
952 	  reexamine = cond_expr_object_size (osi, var, stmt);
953         else if (gimple_assign_single_p (stmt)
954                  || gimple_assign_unary_nop_p (stmt))
955           {
956             if (TREE_CODE (rhs) == SSA_NAME
957                 && POINTER_TYPE_P (TREE_TYPE (rhs)))
958               reexamine = merge_object_sizes (osi, var, rhs, 0);
959             else
960               expr_object_size (osi, var, rhs);
961           }
962         else
963           unknown_object_size (osi, var);
964         break;
965       }
966 
967     case GIMPLE_CALL:
968       {
969         tree arg = pass_through_call (stmt);
970         if (arg)
971           {
972             if (TREE_CODE (arg) == SSA_NAME
973                 && POINTER_TYPE_P (TREE_TYPE (arg)))
974               reexamine = merge_object_sizes (osi, var, arg, 0);
975             else
976               expr_object_size (osi, var, arg);
977           }
978         else
979           call_object_size (osi, var, stmt);
980 	break;
981       }
982 
983     case GIMPLE_ASM:
984       /* Pointers defined by __asm__ statements can point anywhere.  */
985       object_sizes[object_size_type][varno] = unknown[object_size_type];
986       break;
987 
988     case GIMPLE_NOP:
989       if (SSA_NAME_VAR (var)
990 	  && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
991 	expr_object_size (osi, var, SSA_NAME_VAR (var));
992       else
993 	/* Uninitialized SSA names point nowhere.  */
994 	object_sizes[object_size_type][varno] = unknown[object_size_type];
995       break;
996 
997     case GIMPLE_PHI:
998       {
999 	unsigned i;
1000 
1001 	for (i = 0; i < gimple_phi_num_args (stmt); i++)
1002 	  {
1003 	    tree rhs = gimple_phi_arg (stmt, i)->def;
1004 
1005 	    if (object_sizes[object_size_type][varno]
1006 		== unknown[object_size_type])
1007 	      break;
1008 
1009 	    if (TREE_CODE (rhs) == SSA_NAME)
1010 	      reexamine |= merge_object_sizes (osi, var, rhs, 0);
1011 	    else if (osi->pass == 0)
1012 	      expr_object_size (osi, var, rhs);
1013 	  }
1014 	break;
1015       }
1016 
1017     default:
1018       gcc_unreachable ();
1019     }
1020 
1021   if (! reexamine
1022       || object_sizes[object_size_type][varno] == unknown[object_size_type])
1023     {
1024       bitmap_set_bit (computed[object_size_type], varno);
1025       bitmap_clear_bit (osi->reexamine, varno);
1026     }
1027   else
1028     {
1029       bitmap_set_bit (osi->reexamine, varno);
1030       if (dump_file && (dump_flags & TDF_DETAILS))
1031 	{
1032 	  fprintf (dump_file, "Need to reexamine ");
1033 	  print_generic_expr (dump_file, var, dump_flags);
1034 	  fprintf (dump_file, "\n");
1035 	}
1036     }
1037 }
1038 
1039 
1040 /* Helper function for check_for_plus_in_loops.  Called recursively
1041    to detect loops.  */
1042 
1043 static void
check_for_plus_in_loops_1(struct object_size_info * osi,tree var,unsigned int depth)1044 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1045 			   unsigned int depth)
1046 {
1047   gimple stmt = SSA_NAME_DEF_STMT (var);
1048   unsigned int varno = SSA_NAME_VERSION (var);
1049 
1050   if (osi->depths[varno])
1051     {
1052       if (osi->depths[varno] != depth)
1053 	{
1054 	  unsigned int *sp;
1055 
1056 	  /* Found a loop involving pointer addition.  */
1057 	  for (sp = osi->tos; sp > osi->stack; )
1058 	    {
1059 	      --sp;
1060 	      bitmap_clear_bit (osi->reexamine, *sp);
1061 	      bitmap_set_bit (computed[osi->object_size_type], *sp);
1062 	      object_sizes[osi->object_size_type][*sp] = 0;
1063 	      if (*sp == varno)
1064 		break;
1065 	    }
1066 	}
1067       return;
1068     }
1069   else if (! bitmap_bit_p (osi->reexamine, varno))
1070     return;
1071 
1072   osi->depths[varno] = depth;
1073   *osi->tos++ = varno;
1074 
1075   switch (gimple_code (stmt))
1076     {
1077 
1078     case GIMPLE_ASSIGN:
1079       {
1080         if ((gimple_assign_single_p (stmt)
1081              || gimple_assign_unary_nop_p (stmt))
1082             && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1083           {
1084             tree rhs = gimple_assign_rhs1 (stmt);
1085 
1086             check_for_plus_in_loops_1 (osi, rhs, depth);
1087           }
1088         else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1089           {
1090             tree basevar = gimple_assign_rhs1 (stmt);
1091             tree cst = gimple_assign_rhs2 (stmt);
1092 
1093             gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1094 
1095             check_for_plus_in_loops_1 (osi, basevar,
1096                                        depth + !integer_zerop (cst));
1097           }
1098         else
1099           gcc_unreachable ();
1100         break;
1101       }
1102 
1103     case GIMPLE_CALL:
1104       {
1105         tree arg = pass_through_call (stmt);
1106         if (arg)
1107           {
1108             if (TREE_CODE (arg) == SSA_NAME)
1109               check_for_plus_in_loops_1 (osi, arg, depth);
1110             else
1111               gcc_unreachable ();
1112           }
1113         break;
1114       }
1115 
1116     case GIMPLE_PHI:
1117       {
1118 	unsigned i;
1119 
1120 	for (i = 0; i < gimple_phi_num_args (stmt); i++)
1121 	  {
1122 	    tree rhs = gimple_phi_arg (stmt, i)->def;
1123 
1124 	    if (TREE_CODE (rhs) == SSA_NAME)
1125 	      check_for_plus_in_loops_1 (osi, rhs, depth);
1126 	  }
1127 	break;
1128       }
1129 
1130     default:
1131       gcc_unreachable ();
1132     }
1133 
1134   osi->depths[varno] = 0;
1135   osi->tos--;
1136 }
1137 
1138 
1139 /* Check if some pointer we are computing object size of is being increased
1140    within a loop.  If yes, assume all the SSA variables participating in
1141    that loop have minimum object sizes 0.  */
1142 
1143 static void
check_for_plus_in_loops(struct object_size_info * osi,tree var)1144 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1145 {
1146   gimple stmt = SSA_NAME_DEF_STMT (var);
1147 
1148   /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1149      and looked for a POINTER_PLUS_EXPR in the pass-through
1150      argument, if any.  In GIMPLE, however, such an expression
1151      is not a valid call operand.  */
1152 
1153   if (is_gimple_assign (stmt)
1154       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1155     {
1156       tree basevar = gimple_assign_rhs1 (stmt);
1157       tree cst = gimple_assign_rhs2 (stmt);
1158 
1159       gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1160 
1161       if (integer_zerop (cst))
1162         return;
1163 
1164       osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1165       *osi->tos++ = SSA_NAME_VERSION (basevar);
1166       check_for_plus_in_loops_1 (osi, var, 2);
1167       osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1168       osi->tos--;
1169     }
1170 }
1171 
1172 
1173 /* Initialize data structures for the object size computation.  */
1174 
1175 void
init_object_sizes(void)1176 init_object_sizes (void)
1177 {
1178   int object_size_type;
1179 
1180   if (computed[0])
1181     return;
1182 
1183   for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1184     {
1185       object_sizes[object_size_type].safe_grow (num_ssa_names);
1186       computed[object_size_type] = BITMAP_ALLOC (NULL);
1187     }
1188 
1189   init_offset_limit ();
1190 }
1191 
1192 
1193 /* Destroy data structures after the object size computation.  */
1194 
1195 static void
fini_object_sizes(void)1196 fini_object_sizes (void)
1197 {
1198   int object_size_type;
1199 
1200   for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1201     {
1202       object_sizes[object_size_type].release ();
1203       BITMAP_FREE (computed[object_size_type]);
1204     }
1205 }
1206 
1207 
1208 /* Simple pass to optimize all __builtin_object_size () builtins.  */
1209 
1210 static unsigned int
compute_object_sizes(void)1211 compute_object_sizes (void)
1212 {
1213   basic_block bb;
1214   FOR_EACH_BB_FN (bb, cfun)
1215     {
1216       gimple_stmt_iterator i;
1217       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1218 	{
1219 	  tree result;
1220 	  gimple call = gsi_stmt (i);
1221 	  if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1222 	    continue;
1223 
1224 	  init_object_sizes ();
1225 	  result = fold_call_stmt (call, false);
1226 	  if (!result)
1227 	    {
1228 	      if (gimple_call_num_args (call) == 2
1229 		  && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1230 		{
1231 		  tree ost = gimple_call_arg (call, 1);
1232 
1233 		  if (tree_fits_uhwi_p (ost))
1234 		    {
1235 		      unsigned HOST_WIDE_INT object_size_type
1236 			= tree_to_uhwi (ost);
1237 
1238 		      if (object_size_type < 2)
1239 			result = fold_convert (size_type_node,
1240 					       integer_minus_one_node);
1241 		      else if (object_size_type < 4)
1242 			result = build_zero_cst (size_type_node);
1243 		    }
1244 		}
1245 
1246 	      if (!result)
1247 		continue;
1248 	    }
1249 
1250 	  gcc_assert (TREE_CODE (result) == INTEGER_CST);
1251 
1252 	  if (dump_file && (dump_flags & TDF_DETAILS))
1253 	    {
1254 	      fprintf (dump_file, "Simplified\n  ");
1255 	      print_gimple_stmt (dump_file, call, 0, dump_flags);
1256 	      fprintf (dump_file, " to ");
1257 	      print_generic_expr (dump_file, result, 0);
1258 	      fprintf (dump_file, "\n");
1259 	    }
1260 
1261 	  tree lhs = gimple_call_lhs (call);
1262 	  if (!lhs)
1263 	    continue;
1264 
1265 	  /* Propagate into all uses and fold those stmts.  */
1266 	  gimple use_stmt;
1267 	  imm_use_iterator iter;
1268 	  FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
1269 	    {
1270 	      use_operand_p use_p;
1271 	      FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1272 		SET_USE (use_p, result);
1273 	      gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1274 	      fold_stmt (&gsi);
1275 	      update_stmt (gsi_stmt (gsi));
1276 	    }
1277 	}
1278     }
1279 
1280   fini_object_sizes ();
1281   return 0;
1282 }
1283 
1284 namespace {
1285 
1286 const pass_data pass_data_object_sizes =
1287 {
1288   GIMPLE_PASS, /* type */
1289   "objsz", /* name */
1290   OPTGROUP_NONE, /* optinfo_flags */
1291   false, /* has_gate */
1292   true, /* has_execute */
1293   TV_NONE, /* tv_id */
1294   ( PROP_cfg | PROP_ssa ), /* properties_required */
1295   0, /* properties_provided */
1296   0, /* properties_destroyed */
1297   0, /* todo_flags_start */
1298   TODO_verify_ssa, /* todo_flags_finish */
1299 };
1300 
1301 class pass_object_sizes : public gimple_opt_pass
1302 {
1303 public:
pass_object_sizes(gcc::context * ctxt)1304   pass_object_sizes (gcc::context *ctxt)
1305     : gimple_opt_pass (pass_data_object_sizes, ctxt)
1306   {}
1307 
1308   /* opt_pass methods: */
clone()1309   opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
execute()1310   unsigned int execute () { return compute_object_sizes (); }
1311 
1312 }; // class pass_object_sizes
1313 
1314 } // anon namespace
1315 
1316 gimple_opt_pass *
make_pass_object_sizes(gcc::context * ctxt)1317 make_pass_object_sizes (gcc::context *ctxt)
1318 {
1319   return new pass_object_sizes (ctxt);
1320 }
1321