1 /* Callgraph based analysis of static variables.
2    Copyright (C) 2004-2018 Free Software Foundation, Inc.
3    Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* This file marks functions as being either const (TREE_READONLY) or
22    pure (DECL_PURE_P).  It can also set a variant of these that
23    are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24 
25    This must be run after inlining decisions have been made since
26    otherwise, the local sets will not contain information that is
27    consistent with post inlined state.  The global sets are not prone
28    to this problem since they are by definition transitive.  */
29 
30 /* The code in this module is called by the ipa pass manager. It
31    should be one of the later passes since it's information is used by
32    the rest of the compilation. */
33 
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "ipa-prop.h"
63 #include "ipa-fnsummary.h"
64 
65 /* Lattice values for const and pure functions.  Everything starts out
66    being const, then may drop to pure and then neither depending on
67    what is found.  */
68 enum pure_const_state_e
69 {
70   IPA_CONST,
71   IPA_PURE,
72   IPA_NEITHER
73 };
74 
75 static const char *pure_const_names[3] = {"const", "pure", "neither"};
76 
77 enum malloc_state_e
78 {
79   STATE_MALLOC_TOP,
80   STATE_MALLOC,
81   STATE_MALLOC_BOTTOM
82 };
83 
84 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
85 
86 /* Holder for the const_state.  There is one of these per function
87    decl.  */
88 struct funct_state_d
89 {
90   /* See above.  */
91   enum pure_const_state_e pure_const_state;
92   /* What user set here; we can be always sure about this.  */
93   enum pure_const_state_e state_previously_known;
94   bool looping_previously_known;
95 
96   /* True if the function could possibly infinite loop.  There are a
97      lot of ways that this could be determined.  We are pretty
98      conservative here.  While it is possible to cse pure and const
99      calls, it is not legal to have dce get rid of the call if there
100      is a possibility that the call could infinite loop since this is
101      a behavioral change.  */
102   bool looping;
103 
104   bool can_throw;
105 
106   /* If function can call free, munmap or otherwise make previously
107      non-trapping memory accesses trapping.  */
108   bool can_free;
109 
110   enum malloc_state_e malloc_state;
111 };
112 
113 /* State used when we know nothing about function.  */
114 static struct funct_state_d varying_state
115    = { IPA_NEITHER, IPA_NEITHER, true, true, true, true, STATE_MALLOC_BOTTOM };
116 
117 
118 typedef struct funct_state_d * funct_state;
119 
120 /* The storage of the funct_state is abstracted because there is the
121    possibility that it may be desirable to move this to the cgraph
122    local info.  */
123 
124 /* Array, indexed by cgraph node uid, of function states.  */
125 
126 static vec<funct_state> funct_state_vec;
127 
128 static bool gate_pure_const (void);
129 
130 namespace {
131 
132 const pass_data pass_data_ipa_pure_const =
133 {
134   IPA_PASS, /* type */
135   "pure-const", /* name */
136   OPTGROUP_NONE, /* optinfo_flags */
137   TV_IPA_PURE_CONST, /* tv_id */
138   0, /* properties_required */
139   0, /* properties_provided */
140   0, /* properties_destroyed */
141   0, /* todo_flags_start */
142   0, /* todo_flags_finish */
143 };
144 
145 class pass_ipa_pure_const : public ipa_opt_pass_d
146 {
147 public:
148   pass_ipa_pure_const(gcc::context *ctxt);
149 
150   /* opt_pass methods: */
151   bool gate (function *) { return gate_pure_const (); }
152   unsigned int execute (function *fun);
153 
154   void register_hooks (void);
155 
156 private:
157   bool init_p;
158 
159   /* Holders of ipa cgraph hooks: */
160   struct cgraph_node_hook_list *function_insertion_hook_holder;
161   struct cgraph_2node_hook_list *node_duplication_hook_holder;
162   struct cgraph_node_hook_list *node_removal_hook_holder;
163 
164 }; // class pass_ipa_pure_const
165 
166 } // anon namespace
167 
168 /* Try to guess if function body will always be visible to compiler
169    when compiling the call and whether compiler will be able
170    to propagate the information by itself.  */
171 
172 static bool
173 function_always_visible_to_compiler_p (tree decl)
174 {
175   return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
176 	  || DECL_COMDAT (decl));
177 }
178 
179 /* Emit suggestion about attribute ATTRIB_NAME for DECL.  KNOWN_FINITE
180    is true if the function is known to be finite.  The diagnostic is
181    controlled by OPTION.  WARNED_ABOUT is a hash_set<tree> unique for
182    OPTION, this function may initialize it and it is always returned
183    by the function.  */
184 
185 static hash_set<tree> *
186 suggest_attribute (int option, tree decl, bool known_finite,
187 		   hash_set<tree> *warned_about,
188 		   const char * attrib_name)
189 {
190   if (!option_enabled (option, &global_options))
191     return warned_about;
192   if (TREE_THIS_VOLATILE (decl)
193       || (known_finite && function_always_visible_to_compiler_p (decl)))
194     return warned_about;
195 
196   if (!warned_about)
197     warned_about = new hash_set<tree>;
198   if (warned_about->contains (decl))
199     return warned_about;
200   warned_about->add (decl);
201   warning_at (DECL_SOURCE_LOCATION (decl),
202 	      option,
203 	      known_finite
204 	      ? G_("function might be candidate for attribute %qs")
205 	      : G_("function might be candidate for attribute %qs"
206 		   " if it is known to return normally"), attrib_name);
207   return warned_about;
208 }
209 
210 /* Emit suggestion about __attribute_((pure)) for DECL.  KNOWN_FINITE
211    is true if the function is known to be finite.  */
212 
213 static void
214 warn_function_pure (tree decl, bool known_finite)
215 {
216   /* Declaring a void function pure makes no sense and is diagnosed
217      by -Wattributes because calling it would have no effect.  */
218   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
219     return;
220 
221   static hash_set<tree> *warned_about;
222   warned_about
223     = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
224 			 known_finite, warned_about, "pure");
225 }
226 
227 /* Emit suggestion about __attribute_((const)) for DECL.  KNOWN_FINITE
228    is true if the function is known to be finite.  */
229 
230 static void
231 warn_function_const (tree decl, bool known_finite)
232 {
233   /* Declaring a void function const makes no sense is diagnosed
234      by -Wattributes because calling it would have no effect.  */
235   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
236     return;
237 
238   static hash_set<tree> *warned_about;
239   warned_about
240     = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
241 			 known_finite, warned_about, "const");
242 }
243 
244 /* Emit suggestion about __attribute__((malloc)) for DECL.  */
245 
246 static void
247 warn_function_malloc (tree decl)
248 {
249   static hash_set<tree> *warned_about;
250   warned_about
251     = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
252 			 false, warned_about, "malloc");
253 }
254 
255 /* Emit suggestion about __attribute__((noreturn)) for DECL.  */
256 
257 static void
258 warn_function_noreturn (tree decl)
259 {
260   tree original_decl = decl;
261 
262   cgraph_node *node = cgraph_node::get (decl);
263   if (node->instrumentation_clone)
264     decl = node->instrumented_version->decl;
265 
266   static hash_set<tree> *warned_about;
267   if (!lang_hooks.missing_noreturn_ok_p (decl)
268       && targetm.warn_func_return (decl))
269     warned_about
270       = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
271 			   true, warned_about, "noreturn");
272 }
273 
274 void
275 warn_function_cold (tree decl)
276 {
277   tree original_decl = decl;
278 
279   cgraph_node *node = cgraph_node::get (decl);
280   if (node->instrumentation_clone)
281     decl = node->instrumented_version->decl;
282 
283   static hash_set<tree> *warned_about;
284   warned_about
285     = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
286 			 true, warned_about, "cold");
287 }
288 
289 /* Return true if we have a function state for NODE.  */
290 
291 static inline bool
292 has_function_state (struct cgraph_node *node)
293 {
294   if (!funct_state_vec.exists ()
295       || funct_state_vec.length () <= (unsigned int)node->uid)
296     return false;
297   return funct_state_vec[node->uid] != NULL;
298 }
299 
300 /* Return the function state from NODE.  */
301 
302 static inline funct_state
303 get_function_state (struct cgraph_node *node)
304 {
305   if (!funct_state_vec.exists ()
306       || funct_state_vec.length () <= (unsigned int)node->uid
307       || !funct_state_vec[node->uid])
308     /* We might want to put correct previously_known state into varying.  */
309     return &varying_state;
310  return funct_state_vec[node->uid];
311 }
312 
313 /* Set the function state S for NODE.  */
314 
315 static inline void
316 set_function_state (struct cgraph_node *node, funct_state s)
317 {
318   if (!funct_state_vec.exists ()
319       || funct_state_vec.length () <= (unsigned int)node->uid)
320      funct_state_vec.safe_grow_cleared (node->uid + 1);
321 
322   /* If funct_state_vec already contains a funct_state, we have to release
323      it before it's going to be ovewritten.  */
324   if (funct_state_vec[node->uid] != NULL
325       && funct_state_vec[node->uid] != &varying_state)
326     free (funct_state_vec[node->uid]);
327 
328   funct_state_vec[node->uid] = s;
329 }
330 
331 /* Check to see if the use (or definition when CHECKING_WRITE is true)
332    variable T is legal in a function that is either pure or const.  */
333 
334 static inline void
335 check_decl (funct_state local,
336 	    tree t, bool checking_write, bool ipa)
337 {
338   /* Do not want to do anything with volatile except mark any
339      function that uses one to be not const or pure.  */
340   if (TREE_THIS_VOLATILE (t))
341     {
342       local->pure_const_state = IPA_NEITHER;
343       if (dump_file)
344         fprintf (dump_file, "    Volatile operand is not const/pure\n");
345       return;
346     }
347 
348   /* Do not care about a local automatic that is not static.  */
349   if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
350     return;
351 
352   /* If the variable has the "used" attribute, treat it as if it had a
353      been touched by the devil.  */
354   if (DECL_PRESERVE_P (t))
355     {
356       local->pure_const_state = IPA_NEITHER;
357       if (dump_file)
358         fprintf (dump_file, "    Used static/global variable is not const/pure\n");
359       return;
360     }
361 
362   /* In IPA mode we are not interested in checking actual loads and stores;
363      they will be processed at propagation time using ipa_ref.  */
364   if (ipa)
365     return;
366 
367   /* Since we have dealt with the locals and params cases above, if we
368      are CHECKING_WRITE, this cannot be a pure or constant
369      function.  */
370   if (checking_write)
371     {
372       local->pure_const_state = IPA_NEITHER;
373       if (dump_file)
374         fprintf (dump_file, "    static/global memory write is not const/pure\n");
375       return;
376     }
377 
378   if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
379     {
380       /* Readonly reads are safe.  */
381       if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
382 	return; /* Read of a constant, do not change the function state.  */
383       else
384 	{
385           if (dump_file)
386             fprintf (dump_file, "    global memory read is not const\n");
387 	  /* Just a regular read.  */
388 	  if (local->pure_const_state == IPA_CONST)
389 	    local->pure_const_state = IPA_PURE;
390 	}
391     }
392   else
393     {
394       /* Compilation level statics can be read if they are readonly
395 	 variables.  */
396       if (TREE_READONLY (t))
397 	return;
398 
399       if (dump_file)
400 	fprintf (dump_file, "    static memory read is not const\n");
401       /* Just a regular read.  */
402       if (local->pure_const_state == IPA_CONST)
403 	local->pure_const_state = IPA_PURE;
404     }
405 }
406 
407 
408 /* Check to see if the use (or definition when CHECKING_WRITE is true)
409    variable T is legal in a function that is either pure or const.  */
410 
411 static inline void
412 check_op (funct_state local, tree t, bool checking_write)
413 {
414   t = get_base_address (t);
415   if (t && TREE_THIS_VOLATILE (t))
416     {
417       local->pure_const_state = IPA_NEITHER;
418       if (dump_file)
419 	fprintf (dump_file, "    Volatile indirect ref is not const/pure\n");
420       return;
421     }
422   else if (t
423   	   && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
424 	   && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
425 	   && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
426     {
427       if (dump_file)
428 	fprintf (dump_file, "    Indirect ref to local memory is OK\n");
429       return;
430     }
431   else if (checking_write)
432     {
433       local->pure_const_state = IPA_NEITHER;
434       if (dump_file)
435 	fprintf (dump_file, "    Indirect ref write is not const/pure\n");
436       return;
437     }
438   else
439     {
440       if (dump_file)
441 	fprintf (dump_file, "    Indirect ref read is not const\n");
442       if (local->pure_const_state == IPA_CONST)
443 	local->pure_const_state = IPA_PURE;
444     }
445 }
446 
447 /* compute state based on ECF FLAGS and store to STATE and LOOPING.  */
448 
449 static void
450 state_from_flags (enum pure_const_state_e *state, bool *looping,
451 	          int flags, bool cannot_lead_to_return)
452 {
453   *looping = false;
454   if (flags & ECF_LOOPING_CONST_OR_PURE)
455     {
456       *looping = true;
457       if (dump_file && (dump_flags & TDF_DETAILS))
458 	fprintf (dump_file, " looping\n");
459     }
460   if (flags & ECF_CONST)
461     {
462       *state = IPA_CONST;
463       if (dump_file && (dump_flags & TDF_DETAILS))
464 	fprintf (dump_file, " const\n");
465     }
466   else if (flags & ECF_PURE)
467     {
468       *state = IPA_PURE;
469       if (dump_file && (dump_flags & TDF_DETAILS))
470 	fprintf (dump_file, " pure\n");
471     }
472   else if (cannot_lead_to_return)
473     {
474       *state = IPA_PURE;
475       *looping = true;
476       if (dump_file && (dump_flags & TDF_DETAILS))
477 	fprintf (dump_file, " ignoring side effects->pure looping\n");
478     }
479   else
480     {
481       if (dump_file && (dump_flags & TDF_DETAILS))
482 	fprintf (dump_file, " neither\n");
483       *state = IPA_NEITHER;
484       *looping = true;
485     }
486 }
487 
488 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
489    into STATE and LOOPING better of the two variants.
490    Be sure to merge looping correctly.  IPA_NEITHER functions
491    have looping 0 even if they don't have to return.  */
492 
493 static inline void
494 better_state (enum pure_const_state_e *state, bool *looping,
495 	      enum pure_const_state_e state2, bool looping2)
496 {
497   if (state2 < *state)
498     {
499       if (*state == IPA_NEITHER)
500 	*looping = looping2;
501       else
502 	*looping = MIN (*looping, looping2);
503       *state = state2;
504     }
505   else if (state2 != IPA_NEITHER)
506     *looping = MIN (*looping, looping2);
507 }
508 
509 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
510    into STATE and LOOPING worse of the two variants.
511    N is the actual node called.  */
512 
513 static inline void
514 worse_state (enum pure_const_state_e *state, bool *looping,
515 	     enum pure_const_state_e state2, bool looping2,
516 	     struct symtab_node *from,
517 	     struct symtab_node *to)
518 {
519   /* Consider function:
520 
521      bool a(int *p)
522      {
523        return *p==*p;
524      }
525 
526      During early optimization we will turn this into:
527 
528      bool a(int *p)
529      {
530        return true;
531      }
532 
533      Now if this function will be detected as CONST however when interposed it
534      may end up being just pure.  We always must assume the worst scenario here.
535    */
536   if (*state == IPA_CONST && state2 == IPA_CONST
537       && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
538     {
539       if (dump_file && (dump_flags & TDF_DETAILS))
540 	fprintf (dump_file, "Dropping state to PURE because call to %s may not "
541 		 "bind to current def.\n", to->name ());
542       state2 = IPA_PURE;
543     }
544   *state = MAX (*state, state2);
545   *looping = MAX (*looping, looping2);
546 }
547 
548 /* Recognize special cases of builtins that are by themselves not pure or const
549    but function using them is.  */
550 static bool
551 special_builtin_state (enum pure_const_state_e *state, bool *looping,
552 			tree callee)
553 {
554   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
555     switch (DECL_FUNCTION_CODE (callee))
556       {
557 	case BUILT_IN_RETURN:
558 	case BUILT_IN_UNREACHABLE:
559 	CASE_BUILT_IN_ALLOCA:
560 	case BUILT_IN_STACK_SAVE:
561 	case BUILT_IN_STACK_RESTORE:
562 	case BUILT_IN_EH_POINTER:
563 	case BUILT_IN_EH_FILTER:
564 	case BUILT_IN_UNWIND_RESUME:
565 	case BUILT_IN_CXA_END_CLEANUP:
566 	case BUILT_IN_EH_COPY_VALUES:
567 	case BUILT_IN_FRAME_ADDRESS:
568 	case BUILT_IN_APPLY:
569 	case BUILT_IN_APPLY_ARGS:
570 	case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
571 	case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
572 	  *looping = false;
573 	  *state = IPA_CONST;
574 	  return true;
575 	case BUILT_IN_PREFETCH:
576 	  *looping = true;
577 	  *state = IPA_CONST;
578 	  return true;
579 	default:
580 	  break;
581       }
582   return false;
583 }
584 
585 /* Check the parameters of a function call to CALL_EXPR to see if
586    there are any references in the parameters that are not allowed for
587    pure or const functions.  Also check to see if this is either an
588    indirect call, a call outside the compilation unit, or has special
589    attributes that may also effect the purity.  The CALL_EXPR node for
590    the entire call expression.  */
591 
592 static void
593 check_call (funct_state local, gcall *call, bool ipa)
594 {
595   int flags = gimple_call_flags (call);
596   tree callee_t = gimple_call_fndecl (call);
597   bool possibly_throws = stmt_could_throw_p (call);
598   bool possibly_throws_externally = (possibly_throws
599   				     && stmt_can_throw_external (call));
600 
601   if (possibly_throws)
602     {
603       unsigned int i;
604       for (i = 0; i < gimple_num_ops (call); i++)
605         if (gimple_op (call, i)
606 	    && tree_could_throw_p (gimple_op (call, i)))
607 	  {
608 	    if (possibly_throws && cfun->can_throw_non_call_exceptions)
609 	      {
610 		if (dump_file)
611 		  fprintf (dump_file, "    operand can throw; looping\n");
612 		local->looping = true;
613 	      }
614 	    if (possibly_throws_externally)
615 	      {
616 		if (dump_file)
617 		  fprintf (dump_file, "    operand can throw externally\n");
618 		local->can_throw = true;
619 	      }
620 	  }
621     }
622 
623   /* The const and pure flags are set by a variety of places in the
624      compiler (including here).  If someone has already set the flags
625      for the callee, (such as for some of the builtins) we will use
626      them, otherwise we will compute our own information.
627 
628      Const and pure functions have less clobber effects than other
629      functions so we process these first.  Otherwise if it is a call
630      outside the compilation unit or an indirect call we punt.  This
631      leaves local calls which will be processed by following the call
632      graph.  */
633   if (callee_t)
634     {
635       enum pure_const_state_e call_state;
636       bool call_looping;
637 
638       if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
639 	  && !nonfreeing_call_p (call))
640 	local->can_free = true;
641 
642       if (special_builtin_state (&call_state, &call_looping, callee_t))
643 	{
644 	  worse_state (&local->pure_const_state, &local->looping,
645 		       call_state, call_looping,
646 		       NULL, NULL);
647 	  return;
648 	}
649       /* When bad things happen to bad functions, they cannot be const
650 	 or pure.  */
651       if (setjmp_call_p (callee_t))
652 	{
653 	  if (dump_file)
654 	    fprintf (dump_file, "    setjmp is not const/pure\n");
655           local->looping = true;
656 	  local->pure_const_state = IPA_NEITHER;
657 	}
658 
659       if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
660 	switch (DECL_FUNCTION_CODE (callee_t))
661 	  {
662 	  case BUILT_IN_LONGJMP:
663 	  case BUILT_IN_NONLOCAL_GOTO:
664 	    if (dump_file)
665 	      fprintf (dump_file, "    longjmp and nonlocal goto is not const/pure\n");
666 	    local->pure_const_state = IPA_NEITHER;
667             local->looping = true;
668 	    break;
669 	  default:
670 	    break;
671 	  }
672     }
673   else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
674     local->can_free = true;
675 
676   /* When not in IPA mode, we can still handle self recursion.  */
677   if (!ipa && callee_t
678       && recursive_call_p (current_function_decl, callee_t))
679     {
680       if (dump_file)
681         fprintf (dump_file, "    Recursive call can loop.\n");
682       local->looping = true;
683     }
684   /* Either callee is unknown or we are doing local analysis.
685      Look to see if there are any bits available for the callee (such as by
686      declaration or because it is builtin) and process solely on the basis of
687      those bits.  Handle internal calls always, those calls don't have
688      corresponding cgraph edges and thus aren't processed during
689      the propagation.  */
690   else if (!ipa || gimple_call_internal_p (call))
691     {
692       enum pure_const_state_e call_state;
693       bool call_looping;
694       if (possibly_throws && cfun->can_throw_non_call_exceptions)
695         {
696 	  if (dump_file)
697 	    fprintf (dump_file, "    can throw; looping\n");
698           local->looping = true;
699 	}
700       if (possibly_throws_externally)
701         {
702 	  if (dump_file)
703 	    {
704 	      fprintf (dump_file, "    can throw externally to lp %i\n",
705 	      	       lookup_stmt_eh_lp (call));
706 	      if (callee_t)
707 		fprintf (dump_file, "     callee:%s\n",
708 			 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
709 	    }
710           local->can_throw = true;
711 	}
712       if (dump_file && (dump_flags & TDF_DETAILS))
713 	fprintf (dump_file, "    checking flags for call:");
714       state_from_flags (&call_state, &call_looping, flags,
715 			((flags & (ECF_NORETURN | ECF_NOTHROW))
716 			 == (ECF_NORETURN | ECF_NOTHROW))
717 			|| (!flag_exceptions && (flags & ECF_NORETURN)));
718       worse_state (&local->pure_const_state, &local->looping,
719 		   call_state, call_looping, NULL, NULL);
720     }
721   /* Direct functions calls are handled by IPA propagation.  */
722 }
723 
724 /* Wrapper around check_decl for loads in local more.  */
725 
726 static bool
727 check_load (gimple *, tree op, tree, void *data)
728 {
729   if (DECL_P (op))
730     check_decl ((funct_state)data, op, false, false);
731   else
732     check_op ((funct_state)data, op, false);
733   return false;
734 }
735 
736 /* Wrapper around check_decl for stores in local more.  */
737 
738 static bool
739 check_store (gimple *, tree op, tree, void *data)
740 {
741   if (DECL_P (op))
742     check_decl ((funct_state)data, op, true, false);
743   else
744     check_op ((funct_state)data, op, true);
745   return false;
746 }
747 
748 /* Wrapper around check_decl for loads in ipa mode.  */
749 
750 static bool
751 check_ipa_load (gimple *, tree op, tree, void *data)
752 {
753   if (DECL_P (op))
754     check_decl ((funct_state)data, op, false, true);
755   else
756     check_op ((funct_state)data, op, false);
757   return false;
758 }
759 
760 /* Wrapper around check_decl for stores in ipa mode.  */
761 
762 static bool
763 check_ipa_store (gimple *, tree op, tree, void *data)
764 {
765   if (DECL_P (op))
766     check_decl ((funct_state)data, op, true, true);
767   else
768     check_op ((funct_state)data, op, true);
769   return false;
770 }
771 
772 /* Look into pointer pointed to by GSIP and figure out what interesting side
773    effects it has.  */
774 static void
775 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
776 {
777   gimple *stmt = gsi_stmt (*gsip);
778 
779   if (is_gimple_debug (stmt))
780     return;
781 
782   /* Do consider clobber as side effects before IPA, so we rather inline
783      C++ destructors and keep clobber semantics than eliminate them.
784 
785      TODO: We may get smarter during early optimizations on these and let
786      functions containing only clobbers to be optimized more.  This is a common
787      case of C++ destructors.  */
788 
789   if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
790     return;
791 
792   if (dump_file)
793     {
794       fprintf (dump_file, "  scanning: ");
795       print_gimple_stmt (dump_file, stmt, 0);
796     }
797 
798   if (gimple_has_volatile_ops (stmt)
799       && !gimple_clobber_p (stmt))
800     {
801       local->pure_const_state = IPA_NEITHER;
802       if (dump_file)
803 	fprintf (dump_file, "    Volatile stmt is not const/pure\n");
804     }
805 
806   /* Look for loads and stores.  */
807   walk_stmt_load_store_ops (stmt, local,
808 			    ipa ? check_ipa_load : check_load,
809 			    ipa ? check_ipa_store :  check_store);
810 
811   if (gimple_code (stmt) != GIMPLE_CALL
812       && stmt_could_throw_p (stmt))
813     {
814       if (cfun->can_throw_non_call_exceptions)
815 	{
816 	  if (dump_file)
817 	    fprintf (dump_file, "    can throw; looping\n");
818 	  local->looping = true;
819 	}
820       if (stmt_can_throw_external (stmt))
821 	{
822 	  if (dump_file)
823 	    fprintf (dump_file, "    can throw externally\n");
824 	  local->can_throw = true;
825 	}
826       else
827 	if (dump_file)
828 	  fprintf (dump_file, "    can throw\n");
829     }
830   switch (gimple_code (stmt))
831     {
832     case GIMPLE_CALL:
833       check_call (local, as_a <gcall *> (stmt), ipa);
834       break;
835     case GIMPLE_LABEL:
836       if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
837 	/* Target of long jump. */
838 	{
839           if (dump_file)
840             fprintf (dump_file, "    nonlocal label is not const/pure\n");
841 	  local->pure_const_state = IPA_NEITHER;
842 	}
843       break;
844     case GIMPLE_ASM:
845       if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
846 	{
847 	  if (dump_file)
848 	    fprintf (dump_file, "    memory asm clobber is not const/pure\n");
849 	  /* Abandon all hope, ye who enter here. */
850 	  local->pure_const_state = IPA_NEITHER;
851 	  local->can_free = true;
852 	}
853       if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
854 	{
855 	  if (dump_file)
856 	    fprintf (dump_file, "    volatile is not const/pure\n");
857 	  /* Abandon all hope, ye who enter here. */
858 	  local->pure_const_state = IPA_NEITHER;
859 	  local->looping = true;
860 	  local->can_free = true;
861 	}
862       return;
863     default:
864       break;
865     }
866 }
867 
868 /* Check that RETVAL is used only in STMT and in comparisons against 0.
869    RETVAL is return value of the function and STMT is return stmt.  */
870 
871 static bool
872 check_retval_uses (tree retval, gimple *stmt)
873 {
874   imm_use_iterator use_iter;
875   gimple *use_stmt;
876 
877   FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
878     if (gcond *cond = dyn_cast<gcond *> (use_stmt))
879       {
880 	tree op2 = gimple_cond_rhs (cond);
881 	if (!integer_zerop (op2))
882 	  RETURN_FROM_IMM_USE_STMT (use_iter, false);
883       }
884     else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
885       {
886 	enum tree_code code = gimple_assign_rhs_code (ga);
887 	if (TREE_CODE_CLASS (code) != tcc_comparison)
888 	  RETURN_FROM_IMM_USE_STMT (use_iter, false);
889 	if (!integer_zerop (gimple_assign_rhs2 (ga)))
890 	  RETURN_FROM_IMM_USE_STMT (use_iter, false);
891       }
892     else if (is_gimple_debug (use_stmt))
893       ;
894     else if (use_stmt != stmt)
895       RETURN_FROM_IMM_USE_STMT (use_iter, false);
896 
897   return true;
898 }
899 
900 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
901    attribute. Currently this function does a very conservative analysis.
902    FUN is considered to be a candidate if
903    1) It returns a value of pointer type.
904    2) SSA_NAME_DEF_STMT (return_value) is either a function call or
905       a phi, and element of phi is either NULL or
906       SSA_NAME_DEF_STMT(element) is function call.
907    3) The return-value has immediate uses only within comparisons (gcond or gassign)
908       and return_stmt (and likewise a phi arg has immediate use only within comparison
909       or the phi stmt).  */
910 
911 static bool
912 malloc_candidate_p (function *fun, bool ipa)
913 {
914   basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
915   edge e;
916   edge_iterator ei;
917   cgraph_node *node = cgraph_node::get_create (fun->decl);
918 
919 #define DUMP_AND_RETURN(reason)  \
920 {  \
921   if (dump_file && (dump_flags & TDF_DETAILS))  \
922     fprintf (dump_file, "%s", (reason));  \
923   return false;  \
924 }
925 
926   if (EDGE_COUNT (exit_block->preds) == 0)
927     return false;
928 
929   FOR_EACH_EDGE (e, ei, exit_block->preds)
930     {
931       gimple_stmt_iterator gsi = gsi_last_bb (e->src);
932       greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
933 
934       if (!ret_stmt)
935 	return false;
936 
937       tree retval = gimple_return_retval (ret_stmt);
938       if (!retval)
939 	DUMP_AND_RETURN("No return value.")
940 
941       if (TREE_CODE (retval) != SSA_NAME
942 	  || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
943 	DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
944 
945       if (!check_retval_uses (retval, ret_stmt))
946 	DUMP_AND_RETURN("Return value has uses outside return stmt"
947 			" and comparisons against 0.")
948 
949       gimple *def = SSA_NAME_DEF_STMT (retval);
950       if (gcall *call_stmt = dyn_cast<gcall *> (def))
951 	{
952 	  tree callee_decl = gimple_call_fndecl (call_stmt);
953 	  if (!callee_decl)
954 	    return false;
955 
956 	  if (!ipa && !DECL_IS_MALLOC (callee_decl))
957 	    DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
958 			    " non-ipa mode.")
959 
960 	  cgraph_edge *cs = node->get_edge (call_stmt);
961 	  if (cs)
962 	    {
963 	      ipa_call_summary *es = ipa_call_summaries->get (cs);
964 	      gcc_assert (es);
965 	      es->is_return_callee_uncaptured = true;
966 	    }
967 	}
968 
969       else if (gphi *phi = dyn_cast<gphi *> (def))
970 	for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
971 	  {
972 	    tree arg = gimple_phi_arg_def (phi, i);
973 	    if (TREE_CODE (arg) != SSA_NAME)
974 	      DUMP_AND_RETURN("phi arg is not SSA_NAME.")
975 	    if (!(arg == null_pointer_node || check_retval_uses (arg, phi)))
976 	      DUMP_AND_RETURN("phi arg has uses outside phi"
977 			      " and comparisons against 0.")
978 
979 	    gimple *arg_def = SSA_NAME_DEF_STMT (arg);
980 	    gcall *call_stmt = dyn_cast<gcall *> (arg_def);
981 	    if (!call_stmt)
982 	      return false;
983 	    tree callee_decl = gimple_call_fndecl (call_stmt);
984 	    if (!callee_decl)
985 	      return false;
986 	    if (!ipa && !DECL_IS_MALLOC (callee_decl))
987 	      DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
988 			      " non-ipa mode.")
989 
990 	    cgraph_edge *cs = node->get_edge (call_stmt);
991 	    if (cs)
992 	      {
993 		ipa_call_summary *es = ipa_call_summaries->get (cs);
994 		gcc_assert (es);
995 		es->is_return_callee_uncaptured = true;
996 	      }
997 	  }
998 
999       else
1000 	DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
1001     }
1002 
1003   if (dump_file && (dump_flags & TDF_DETAILS))
1004     fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1005 	     IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1006   return true;
1007 
1008 #undef DUMP_AND_RETURN
1009 }
1010 
1011 
1012 /* This is the main routine for finding the reference patterns for
1013    global variables within a function FN.  */
1014 
1015 static funct_state
1016 analyze_function (struct cgraph_node *fn, bool ipa)
1017 {
1018   tree decl = fn->decl;
1019   funct_state l;
1020   basic_block this_block;
1021 
1022   l = XCNEW (struct funct_state_d);
1023   l->pure_const_state = IPA_CONST;
1024   l->state_previously_known = IPA_NEITHER;
1025   l->looping_previously_known = true;
1026   l->looping = false;
1027   l->can_throw = false;
1028   l->can_free = false;
1029   state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1030 		    flags_from_decl_or_type (fn->decl),
1031 		    fn->cannot_return_p ());
1032 
1033   if (fn->thunk.thunk_p || fn->alias)
1034     {
1035       /* Thunk gets propagated through, so nothing interesting happens.  */
1036       gcc_assert (ipa);
1037       if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
1038 	l->pure_const_state = IPA_NEITHER;
1039       return l;
1040     }
1041 
1042   if (dump_file)
1043     {
1044       fprintf (dump_file, "\n\n local analysis of %s\n ",
1045 	       fn->name ());
1046     }
1047 
1048   push_cfun (DECL_STRUCT_FUNCTION (decl));
1049 
1050   FOR_EACH_BB_FN (this_block, cfun)
1051     {
1052       gimple_stmt_iterator gsi;
1053       struct walk_stmt_info wi;
1054 
1055       memset (&wi, 0, sizeof (wi));
1056       for (gsi = gsi_start_bb (this_block);
1057 	   !gsi_end_p (gsi);
1058 	   gsi_next (&gsi))
1059 	{
1060 	  check_stmt (&gsi, l, ipa);
1061 	  if (l->pure_const_state == IPA_NEITHER
1062 	      && l->looping
1063 	      && l->can_throw
1064 	      && l->can_free)
1065 	    goto end;
1066 	}
1067     }
1068 
1069 end:
1070   if (l->pure_const_state != IPA_NEITHER)
1071     {
1072       /* Const functions cannot have back edges (an
1073 	 indication of possible infinite loop side
1074 	 effect.  */
1075       if (mark_dfs_back_edges ())
1076         {
1077 	  /* Preheaders are needed for SCEV to work.
1078 	     Simple latches and recorded exits improve chances that loop will
1079 	     proved to be finite in testcases such as in loop-15.c
1080 	     and loop-24.c  */
1081 	  loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1082 			       | LOOPS_HAVE_SIMPLE_LATCHES
1083 			       | LOOPS_HAVE_RECORDED_EXITS);
1084 	  if (dump_file && (dump_flags & TDF_DETAILS))
1085 	    flow_loops_dump (dump_file, NULL, 0);
1086 	  if (mark_irreducible_loops ())
1087 	    {
1088 	      if (dump_file)
1089 	        fprintf (dump_file, "    has irreducible loops\n");
1090 	      l->looping = true;
1091 	    }
1092 	  else
1093 	    {
1094 	      struct loop *loop;
1095 	      scev_initialize ();
1096 	      FOR_EACH_LOOP (loop, 0)
1097 		if (!finite_loop_p (loop))
1098 		  {
1099 		    if (dump_file)
1100 		      fprintf (dump_file, "    can not prove finiteness of "
1101 			       "loop %i\n", loop->num);
1102 		    l->looping =true;
1103 		    break;
1104 		  }
1105 	      scev_finalize ();
1106 	    }
1107           loop_optimizer_finalize ();
1108 	}
1109     }
1110 
1111   if (dump_file && (dump_flags & TDF_DETAILS))
1112     fprintf (dump_file, "    checking previously known:");
1113 
1114   better_state (&l->pure_const_state, &l->looping,
1115 		l->state_previously_known,
1116 		l->looping_previously_known);
1117   if (TREE_NOTHROW (decl))
1118     l->can_throw = false;
1119 
1120   l->malloc_state = STATE_MALLOC_BOTTOM;
1121   if (DECL_IS_MALLOC (decl))
1122     l->malloc_state = STATE_MALLOC;
1123   else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1124     l->malloc_state = STATE_MALLOC_TOP;
1125   else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1126     l->malloc_state = STATE_MALLOC;
1127 
1128   pop_cfun ();
1129   if (dump_file)
1130     {
1131       if (l->looping)
1132         fprintf (dump_file, "Function is locally looping.\n");
1133       if (l->can_throw)
1134         fprintf (dump_file, "Function is locally throwing.\n");
1135       if (l->pure_const_state == IPA_CONST)
1136         fprintf (dump_file, "Function is locally const.\n");
1137       if (l->pure_const_state == IPA_PURE)
1138         fprintf (dump_file, "Function is locally pure.\n");
1139       if (l->can_free)
1140 	fprintf (dump_file, "Function can locally free.\n");
1141       if (l->malloc_state == STATE_MALLOC)
1142 	fprintf (dump_file, "Function is locally malloc.\n");
1143     }
1144   return l;
1145 }
1146 
1147 /* Called when new function is inserted to callgraph late.  */
1148 static void
1149 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1150 {
1151   /* There are some shared nodes, in particular the initializers on
1152      static declarations.  We do not need to scan them more than once
1153      since all we would be interested in are the addressof
1154      operations.  */
1155   if (opt_for_fn (node->decl, flag_ipa_pure_const))
1156     set_function_state (node, analyze_function (node, true));
1157 }
1158 
1159 /* Called when new clone is inserted to callgraph late.  */
1160 
1161 static void
1162 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
1163 	 	     void *data ATTRIBUTE_UNUSED)
1164 {
1165   if (has_function_state (src))
1166     {
1167       funct_state l = XNEW (struct funct_state_d);
1168       gcc_assert (!has_function_state (dst));
1169       memcpy (l, get_function_state (src), sizeof (*l));
1170       set_function_state (dst, l);
1171     }
1172 }
1173 
1174 /* Called when new clone is inserted to callgraph late.  */
1175 
1176 static void
1177 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1178 {
1179   if (has_function_state (node))
1180     set_function_state (node, NULL);
1181 }
1182 
1183 
1184 void
1185 pass_ipa_pure_const::
1186 register_hooks (void)
1187 {
1188   if (init_p)
1189     return;
1190 
1191   init_p = true;
1192 
1193   node_removal_hook_holder =
1194       symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
1195   node_duplication_hook_holder =
1196       symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
1197   function_insertion_hook_holder =
1198       symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
1199 }
1200 
1201 
1202 /* Analyze each function in the cgraph to see if it is locally PURE or
1203    CONST.  */
1204 
1205 static void
1206 pure_const_generate_summary (void)
1207 {
1208   struct cgraph_node *node;
1209 
1210   pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1211   pass->register_hooks ();
1212 
1213   /* Process all of the functions.
1214 
1215      We process AVAIL_INTERPOSABLE functions.  We can not use the results
1216      by default, but the info can be used at LTO with -fwhole-program or
1217      when function got cloned and the clone is AVAILABLE.  */
1218 
1219   FOR_EACH_DEFINED_FUNCTION (node)
1220     if (opt_for_fn (node->decl, flag_ipa_pure_const))
1221       set_function_state (node, analyze_function (node, true));
1222 }
1223 
1224 
1225 /* Serialize the ipa info for lto.  */
1226 
1227 static void
1228 pure_const_write_summary (void)
1229 {
1230   struct cgraph_node *node;
1231   struct lto_simple_output_block *ob
1232     = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1233   unsigned int count = 0;
1234   lto_symtab_encoder_iterator lsei;
1235   lto_symtab_encoder_t encoder;
1236 
1237   encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1238 
1239   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1240        lsei_next_function_in_partition (&lsei))
1241     {
1242       node = lsei_cgraph_node (lsei);
1243       if (node->definition && has_function_state (node))
1244 	count++;
1245     }
1246 
1247   streamer_write_uhwi_stream (ob->main_stream, count);
1248 
1249   /* Process all of the functions.  */
1250   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1251        lsei_next_function_in_partition (&lsei))
1252     {
1253       node = lsei_cgraph_node (lsei);
1254       if (node->definition && has_function_state (node))
1255 	{
1256 	  struct bitpack_d bp;
1257 	  funct_state fs;
1258 	  int node_ref;
1259 	  lto_symtab_encoder_t encoder;
1260 
1261 	  fs = get_function_state (node);
1262 
1263 	  encoder = ob->decl_state->symtab_node_encoder;
1264 	  node_ref = lto_symtab_encoder_encode (encoder, node);
1265 	  streamer_write_uhwi_stream (ob->main_stream, node_ref);
1266 
1267 	  /* Note that flags will need to be read in the opposite
1268 	     order as we are pushing the bitflags into FLAGS.  */
1269 	  bp = bitpack_create (ob->main_stream);
1270 	  bp_pack_value (&bp, fs->pure_const_state, 2);
1271 	  bp_pack_value (&bp, fs->state_previously_known, 2);
1272 	  bp_pack_value (&bp, fs->looping_previously_known, 1);
1273 	  bp_pack_value (&bp, fs->looping, 1);
1274 	  bp_pack_value (&bp, fs->can_throw, 1);
1275 	  bp_pack_value (&bp, fs->can_free, 1);
1276 	  bp_pack_value (&bp, fs->malloc_state, 2);
1277 	  streamer_write_bitpack (&bp);
1278 	}
1279     }
1280 
1281   lto_destroy_simple_output_block (ob);
1282 }
1283 
1284 
1285 /* Deserialize the ipa info for lto.  */
1286 
1287 static void
1288 pure_const_read_summary (void)
1289 {
1290   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1291   struct lto_file_decl_data *file_data;
1292   unsigned int j = 0;
1293 
1294   pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1295   pass->register_hooks ();
1296 
1297   while ((file_data = file_data_vec[j++]))
1298     {
1299       const char *data;
1300       size_t len;
1301       struct lto_input_block *ib
1302 	= lto_create_simple_input_block (file_data,
1303 					 LTO_section_ipa_pure_const,
1304 					 &data, &len);
1305       if (ib)
1306 	{
1307 	  unsigned int i;
1308 	  unsigned int count = streamer_read_uhwi (ib);
1309 
1310 	  for (i = 0; i < count; i++)
1311 	    {
1312 	      unsigned int index;
1313 	      struct cgraph_node *node;
1314 	      struct bitpack_d bp;
1315 	      funct_state fs;
1316 	      lto_symtab_encoder_t encoder;
1317 
1318 	      fs = XCNEW (struct funct_state_d);
1319 	      index = streamer_read_uhwi (ib);
1320 	      encoder = file_data->symtab_node_encoder;
1321 	      node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1322 									index));
1323 	      set_function_state (node, fs);
1324 
1325 	      /* Note that the flags must be read in the opposite
1326 		 order in which they were written (the bitflags were
1327 		 pushed into FLAGS).  */
1328 	      bp = streamer_read_bitpack (ib);
1329 	      fs->pure_const_state
1330 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1331 	      fs->state_previously_known
1332 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1333 	      fs->looping_previously_known = bp_unpack_value (&bp, 1);
1334 	      fs->looping = bp_unpack_value (&bp, 1);
1335 	      fs->can_throw = bp_unpack_value (&bp, 1);
1336 	      fs->can_free = bp_unpack_value (&bp, 1);
1337 	      fs->malloc_state
1338 			= (enum malloc_state_e) bp_unpack_value (&bp, 2);
1339 
1340 	      if (dump_file)
1341 		{
1342 		  int flags = flags_from_decl_or_type (node->decl);
1343 		  fprintf (dump_file, "Read info for %s ", node->dump_name ());
1344 		  if (flags & ECF_CONST)
1345 		    fprintf (dump_file, " const");
1346 		  if (flags & ECF_PURE)
1347 		    fprintf (dump_file, " pure");
1348 		  if (flags & ECF_NOTHROW)
1349 		    fprintf (dump_file, " nothrow");
1350 		  fprintf (dump_file, "\n  pure const state: %s\n",
1351 			   pure_const_names[fs->pure_const_state]);
1352 		  fprintf (dump_file, "  previously known state: %s\n",
1353 			   pure_const_names[fs->state_previously_known]);
1354 		  if (fs->looping)
1355 		    fprintf (dump_file,"  function is locally looping\n");
1356 		  if (fs->looping_previously_known)
1357 		    fprintf (dump_file,"  function is previously known looping\n");
1358 		  if (fs->can_throw)
1359 		    fprintf (dump_file,"  function is locally throwing\n");
1360 		  if (fs->can_free)
1361 		    fprintf (dump_file,"  function can locally free\n");
1362 		  fprintf (dump_file, "\n malloc state: %s\n",
1363 			   malloc_state_names[fs->malloc_state]);
1364 		}
1365 	    }
1366 
1367 	  lto_destroy_simple_input_block (file_data,
1368 					  LTO_section_ipa_pure_const,
1369 					  ib, data, len);
1370 	}
1371     }
1372 }
1373 
1374 /* We only propagate across edges that can throw externally and their callee
1375    is not interposable.  */
1376 
1377 static bool
1378 ignore_edge_for_nothrow (struct cgraph_edge *e)
1379 {
1380   if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1381     return true;
1382 
1383   enum availability avail;
1384   cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
1385 							        e->caller);
1386   if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
1387     return true;
1388   return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1389 	 && !e->callee->binds_to_current_def_p (e->caller);
1390 }
1391 
1392 /* Return true if NODE is self recursive function.
1393    Indirectly recursive functions appears as non-trivial strongly
1394    connected components, so we need to care about self recursion
1395    only.  */
1396 
1397 static bool
1398 self_recursive_p (struct cgraph_node *node)
1399 {
1400   struct cgraph_edge *e;
1401   for (e = node->callees; e; e = e->next_callee)
1402     if (e->callee->function_symbol () == node)
1403       return true;
1404   return false;
1405 }
1406 
1407 /* Return true if N is cdtor that is not const or pure.  In this case we may
1408    need to remove unreachable function if it is marked const/pure.  */
1409 
1410 static bool
1411 cdtor_p (cgraph_node *n, void *)
1412 {
1413   if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1414     return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1415 	    || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1416   return false;
1417 }
1418 
1419 /* We only propagate across edges with non-interposable callee.  */
1420 
1421 static bool
1422 ignore_edge_for_pure_const (struct cgraph_edge *e)
1423 {
1424   enum availability avail;
1425   e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1426   return (avail <= AVAIL_INTERPOSABLE);
1427 }
1428 
1429 
1430 /* Produce transitive closure over the callgraph and compute pure/const
1431    attributes.  */
1432 
1433 static bool
1434 propagate_pure_const (void)
1435 {
1436   struct cgraph_node *node;
1437   struct cgraph_node *w;
1438   struct cgraph_node **order =
1439     XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1440   int order_pos;
1441   int i;
1442   struct ipa_dfs_info * w_info;
1443   bool remove_p = false;
1444   bool has_cdtor;
1445 
1446   order_pos = ipa_reduced_postorder (order, true,
1447 				     ignore_edge_for_pure_const);
1448   if (dump_file)
1449     {
1450       cgraph_node::dump_cgraph (dump_file);
1451       ipa_print_order (dump_file, "reduced", order, order_pos);
1452     }
1453 
1454   /* Propagate the local information through the call graph to produce
1455      the global information.  All the nodes within a cycle will have
1456      the same info so we collapse cycles first.  Then we can do the
1457      propagation in one pass from the leaves to the roots.  */
1458   for (i = 0; i < order_pos; i++ )
1459     {
1460       enum pure_const_state_e pure_const_state = IPA_CONST;
1461       bool looping = false;
1462       int count = 0;
1463       node = order[i];
1464 
1465       if (node->alias)
1466 	continue;
1467 
1468       if (dump_file && (dump_flags & TDF_DETAILS))
1469 	fprintf (dump_file, "Starting cycle\n");
1470 
1471       /* Find the worst state for any node in the cycle.  */
1472       w = node;
1473       while (w && pure_const_state != IPA_NEITHER)
1474 	{
1475 	  struct cgraph_edge *e;
1476 	  struct cgraph_edge *ie;
1477 	  int i;
1478 	  struct ipa_ref *ref = NULL;
1479 
1480 	  funct_state w_l = get_function_state (w);
1481 	  if (dump_file && (dump_flags & TDF_DETAILS))
1482 	    fprintf (dump_file, "  Visiting %s state:%s looping %i\n",
1483 		     w->dump_name (),
1484 		     pure_const_names[w_l->pure_const_state],
1485 		     w_l->looping);
1486 
1487 	  /* First merge in function body properties.
1488 	     We are safe to pass NULL as FROM and TO because we will take care
1489 	     of possible interposition when walking callees.  */
1490 	  worse_state (&pure_const_state, &looping,
1491 		       w_l->pure_const_state, w_l->looping,
1492 		       NULL, NULL);
1493 	  if (pure_const_state == IPA_NEITHER)
1494 	    break;
1495 
1496 	  count++;
1497 
1498 	  /* We consider recursive cycles as possibly infinite.
1499 	     This might be relaxed since infinite recursion leads to stack
1500 	     overflow.  */
1501 	  if (count > 1)
1502 	    looping = true;
1503 
1504 	  /* Now walk the edges and merge in callee properties.  */
1505 	  for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1506 	       e = e->next_callee)
1507 	    {
1508 	      enum availability avail;
1509 	      struct cgraph_node *y = e->callee->
1510 				function_or_virtual_thunk_symbol (&avail,
1511 								  e->caller);
1512 	      enum pure_const_state_e edge_state = IPA_CONST;
1513 	      bool edge_looping = false;
1514 
1515 	      if (dump_file && (dump_flags & TDF_DETAILS))
1516 		{
1517 		  fprintf (dump_file, "    Call to %s",
1518 			   e->callee->dump_name ());
1519 		}
1520 	      if (avail > AVAIL_INTERPOSABLE)
1521 		{
1522 		  funct_state y_l = get_function_state (y);
1523 		  if (dump_file && (dump_flags & TDF_DETAILS))
1524 		    {
1525 		      fprintf (dump_file,
1526 			       " state:%s looping:%i\n",
1527 			       pure_const_names[y_l->pure_const_state],
1528 			       y_l->looping);
1529 		    }
1530 		  if (y_l->pure_const_state > IPA_PURE
1531 		      && e->cannot_lead_to_return_p ())
1532 		    {
1533 		      if (dump_file && (dump_flags & TDF_DETAILS))
1534 			fprintf (dump_file,
1535 				 "        Ignoring side effects"
1536 				 " -> pure, looping\n");
1537 		      edge_state = IPA_PURE;
1538 		      edge_looping = true;
1539 		    }
1540 		  else
1541 		    {
1542 		      edge_state = y_l->pure_const_state;
1543 		      edge_looping = y_l->looping;
1544 		    }
1545 		}
1546 	      else if (special_builtin_state (&edge_state, &edge_looping,
1547 					       y->decl))
1548 		;
1549 	      else
1550 		state_from_flags (&edge_state, &edge_looping,
1551 				  flags_from_decl_or_type (y->decl),
1552 				  e->cannot_lead_to_return_p ());
1553 
1554 	      /* Merge the results with what we already know.  */
1555 	      better_state (&edge_state, &edge_looping,
1556 			    w_l->state_previously_known,
1557 			    w_l->looping_previously_known);
1558 	      worse_state (&pure_const_state, &looping,
1559 			   edge_state, edge_looping, e->caller, e->callee);
1560 	      if (pure_const_state == IPA_NEITHER)
1561 	        break;
1562 	    }
1563 
1564 	  /* Now process the indirect call.  */
1565           for (ie = w->indirect_calls;
1566 	       ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1567 	    {
1568 	      enum pure_const_state_e edge_state = IPA_CONST;
1569 	      bool edge_looping = false;
1570 
1571 	      if (dump_file && (dump_flags & TDF_DETAILS))
1572 		fprintf (dump_file, "    Indirect call");
1573 	      state_from_flags (&edge_state, &edge_looping,
1574 			        ie->indirect_info->ecf_flags,
1575 				ie->cannot_lead_to_return_p ());
1576 	      /* Merge the results with what we already know.  */
1577 	      better_state (&edge_state, &edge_looping,
1578 			    w_l->state_previously_known,
1579 			    w_l->looping_previously_known);
1580 	      worse_state (&pure_const_state, &looping,
1581 			   edge_state, edge_looping, NULL, NULL);
1582 	      if (pure_const_state == IPA_NEITHER)
1583 	        break;
1584 	    }
1585 
1586 	  /* And finally all loads and stores.  */
1587 	  for (i = 0; w->iterate_reference (i, ref)
1588 	       && pure_const_state != IPA_NEITHER; i++)
1589 	    {
1590 	      enum pure_const_state_e ref_state = IPA_CONST;
1591 	      bool ref_looping = false;
1592 	      switch (ref->use)
1593 		{
1594 		case IPA_REF_LOAD:
1595 		  /* readonly reads are safe.  */
1596 		  if (TREE_READONLY (ref->referred->decl))
1597 		    break;
1598 		  if (dump_file && (dump_flags & TDF_DETAILS))
1599 		    fprintf (dump_file, "    nonreadonly global var read\n");
1600 		  ref_state = IPA_PURE;
1601 		  break;
1602 		case IPA_REF_STORE:
1603 		  if (ref->cannot_lead_to_return ())
1604 		    break;
1605 		  ref_state = IPA_NEITHER;
1606 		  if (dump_file && (dump_flags & TDF_DETAILS))
1607 		    fprintf (dump_file, "    global var write\n");
1608 		  break;
1609 		case IPA_REF_ADDR:
1610 		case IPA_REF_CHKP:
1611 		  break;
1612 		default:
1613 		  gcc_unreachable ();
1614 		}
1615 	      better_state (&ref_state, &ref_looping,
1616 			    w_l->state_previously_known,
1617 			    w_l->looping_previously_known);
1618 	      worse_state (&pure_const_state, &looping,
1619 			   ref_state, ref_looping, NULL, NULL);
1620 	      if (pure_const_state == IPA_NEITHER)
1621 		break;
1622 	    }
1623 	  w_info = (struct ipa_dfs_info *) w->aux;
1624 	  w = w_info->next_cycle;
1625 	}
1626       if (dump_file && (dump_flags & TDF_DETAILS))
1627 	fprintf (dump_file, "Result %s looping %i\n",
1628 		 pure_const_names [pure_const_state],
1629 		 looping);
1630 
1631       /* Find the worst state of can_free for any node in the cycle.  */
1632       bool can_free = false;
1633       w = node;
1634       while (w && !can_free)
1635 	{
1636 	  struct cgraph_edge *e;
1637 	  funct_state w_l = get_function_state (w);
1638 
1639 	  if (w_l->can_free
1640 	      || w->get_availability () == AVAIL_INTERPOSABLE
1641 	      || w->indirect_calls)
1642 	    can_free = true;
1643 
1644 	  for (e = w->callees; e && !can_free; e = e->next_callee)
1645 	    {
1646 	      enum availability avail;
1647 	      struct cgraph_node *y = e->callee->
1648 				function_or_virtual_thunk_symbol (&avail,
1649 								  e->caller);
1650 
1651 	      if (avail > AVAIL_INTERPOSABLE)
1652 		can_free = get_function_state (y)->can_free;
1653 	      else
1654 		can_free = true;
1655 	    }
1656 	  w_info = (struct ipa_dfs_info *) w->aux;
1657 	  w = w_info->next_cycle;
1658 	}
1659 
1660       /* Copy back the region's pure_const_state which is shared by
1661 	 all nodes in the region.  */
1662       w = node;
1663       while (w)
1664 	{
1665 	  funct_state w_l = get_function_state (w);
1666 	  enum pure_const_state_e this_state = pure_const_state;
1667 	  bool this_looping = looping;
1668 
1669 	  w_l->can_free = can_free;
1670 	  w->nonfreeing_fn = !can_free;
1671 	  if (!can_free && dump_file)
1672 	    fprintf (dump_file, "Function found not to call free: %s\n",
1673 		     w->name ());
1674 
1675 	  if (w_l->state_previously_known != IPA_NEITHER
1676 	      && this_state > w_l->state_previously_known)
1677 	    {
1678               this_state = w_l->state_previously_known;
1679 	      if (this_state == IPA_NEITHER)
1680 	        this_looping = w_l->looping_previously_known;
1681 	    }
1682 	  if (!this_looping && self_recursive_p (w))
1683 	    this_looping = true;
1684 	  if (!w_l->looping_previously_known)
1685 	    this_looping = false;
1686 
1687 	  /* All nodes within a cycle share the same info.  */
1688 	  w_l->pure_const_state = this_state;
1689 	  w_l->looping = this_looping;
1690 
1691 	  /* Inline clones share declaration with their offline copies;
1692 	     do not modify their declarations since the offline copy may
1693 	     be different.  */
1694 	  if (!w->global.inlined_to)
1695 	    switch (this_state)
1696 	      {
1697 	      case IPA_CONST:
1698 		if (!TREE_READONLY (w->decl))
1699 		  {
1700 		    warn_function_const (w->decl, !this_looping);
1701 		    if (dump_file)
1702 		      fprintf (dump_file, "Function found to be %sconst: %s\n",
1703 			       this_looping ? "looping " : "",
1704 			       w->name ());
1705 		  }
1706 		/* Turning constructor or destructor to non-looping const/pure
1707 		   enables us to possibly remove the function completely.  */
1708 		if (this_looping)
1709 		  has_cdtor = false;
1710 		else
1711 		  has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1712 							      NULL, true);
1713 		if (w->set_const_flag (true, this_looping))
1714 		  {
1715 		    if (dump_file)
1716 		      fprintf (dump_file,
1717 			       "Declaration updated to be %sconst: %s\n",
1718 			       this_looping ? "looping " : "",
1719 			       w->name ());
1720 		    remove_p |= has_cdtor;
1721 		  }
1722 		break;
1723 
1724 	      case IPA_PURE:
1725 		if (!DECL_PURE_P (w->decl))
1726 		  {
1727 		    warn_function_pure (w->decl, !this_looping);
1728 		    if (dump_file)
1729 		      fprintf (dump_file, "Function found to be %spure: %s\n",
1730 			       this_looping ? "looping " : "",
1731 			       w->name ());
1732 		  }
1733 		if (this_looping)
1734 		  has_cdtor = false;
1735 		else
1736 		  has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1737 							      NULL, true);
1738 		if (w->set_pure_flag (true, this_looping))
1739 		  {
1740 		    if (dump_file)
1741 		      fprintf (dump_file,
1742 			       "Declaration updated to be %spure: %s\n",
1743 			       this_looping ? "looping " : "",
1744 			       w->name ());
1745 		    remove_p |= has_cdtor;
1746 		  }
1747 		break;
1748 
1749 	      default:
1750 		break;
1751 	      }
1752 	  w_info = (struct ipa_dfs_info *) w->aux;
1753 	  w = w_info->next_cycle;
1754 	}
1755     }
1756 
1757   ipa_free_postorder_info ();
1758   free (order);
1759   return remove_p;
1760 }
1761 
1762 /* Produce transitive closure over the callgraph and compute nothrow
1763    attributes.  */
1764 
1765 static void
1766 propagate_nothrow (void)
1767 {
1768   struct cgraph_node *node;
1769   struct cgraph_node *w;
1770   struct cgraph_node **order =
1771     XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1772   int order_pos;
1773   int i;
1774   struct ipa_dfs_info * w_info;
1775 
1776   order_pos = ipa_reduced_postorder (order, true,
1777 				     ignore_edge_for_nothrow);
1778   if (dump_file)
1779     {
1780       cgraph_node::dump_cgraph (dump_file);
1781       ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1782     }
1783 
1784   /* Propagate the local information through the call graph to produce
1785      the global information.  All the nodes within a cycle will have
1786      the same info so we collapse cycles first.  Then we can do the
1787      propagation in one pass from the leaves to the roots.  */
1788   for (i = 0; i < order_pos; i++ )
1789     {
1790       bool can_throw = false;
1791       node = order[i];
1792 
1793       if (node->alias)
1794 	continue;
1795 
1796       /* Find the worst state for any node in the cycle.  */
1797       w = node;
1798       while (w && !can_throw)
1799 	{
1800 	  struct cgraph_edge *e, *ie;
1801 
1802 	  if (!TREE_NOTHROW (w->decl))
1803 	    {
1804 	      funct_state w_l = get_function_state (w);
1805 
1806 	      if (w_l->can_throw
1807 		  || w->get_availability () == AVAIL_INTERPOSABLE)
1808 		can_throw = true;
1809 
1810 	      for (e = w->callees; e && !can_throw; e = e->next_callee)
1811 		{
1812 		  enum availability avail;
1813 
1814 		  if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1815 		    continue;
1816 
1817 		  struct cgraph_node *y = e->callee->
1818 				   function_or_virtual_thunk_symbol (&avail,
1819 								     e->caller);
1820 
1821 		  /* We can use info about the callee only if we know it can
1822 		     not be interposed.
1823 		     When callee is compiled with non-call exceptions we also
1824 		     must check that the declaration is bound to current
1825 		     body as other semantically equivalent body may still
1826 		     throw.  */
1827 		  if (avail <= AVAIL_INTERPOSABLE
1828 		      || (!TREE_NOTHROW (y->decl)
1829 			  && (get_function_state (y)->can_throw
1830 			      || (opt_for_fn (y->decl, flag_non_call_exceptions)
1831 				  && !e->callee->binds_to_current_def_p (w)))))
1832 		    can_throw = true;
1833 		}
1834 	      for (ie = w->indirect_calls; ie && !can_throw;
1835 		   ie = ie->next_callee)
1836 		if (ie->can_throw_external
1837 		    && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1838 		  can_throw = true;
1839 	    }
1840 	  w_info = (struct ipa_dfs_info *) w->aux;
1841 	  w = w_info->next_cycle;
1842 	}
1843 
1844       /* Copy back the region's pure_const_state which is shared by
1845 	 all nodes in the region.  */
1846       w = node;
1847       while (w)
1848 	{
1849 	  funct_state w_l = get_function_state (w);
1850 	  if (!can_throw && !TREE_NOTHROW (w->decl))
1851 	    {
1852 	      /* Inline clones share declaration with their offline copies;
1853 		 do not modify their declarations since the offline copy may
1854 		 be different.  */
1855 	      if (!w->global.inlined_to)
1856 		{
1857 		  w->set_nothrow_flag (true);
1858 		  if (dump_file)
1859 		    fprintf (dump_file, "Function found to be nothrow: %s\n",
1860 			     w->name ());
1861 		}
1862 	    }
1863 	  else if (can_throw && !TREE_NOTHROW (w->decl))
1864 	    w_l->can_throw = true;
1865 	  w_info = (struct ipa_dfs_info *) w->aux;
1866 	  w = w_info->next_cycle;
1867 	}
1868     }
1869 
1870   ipa_free_postorder_info ();
1871   free (order);
1872 }
1873 
1874 /* Debugging function to dump state of malloc lattice.  */
1875 
1876 DEBUG_FUNCTION
1877 static void
1878 dump_malloc_lattice (FILE *dump_file, const char *s)
1879 {
1880   if (!dump_file)
1881     return;
1882 
1883   fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1884   cgraph_node *node;
1885   FOR_EACH_FUNCTION (node)
1886     {
1887       funct_state fs = get_function_state (node);
1888       malloc_state_e state = fs->malloc_state;
1889       fprintf (dump_file, "%s: %s\n", node->name (), malloc_state_names[state]);
1890     }
1891 }
1892 
1893 /* Propagate malloc attribute across the callgraph.  */
1894 
1895 static void
1896 propagate_malloc (void)
1897 {
1898   cgraph_node *node;
1899   FOR_EACH_FUNCTION (node)
1900     {
1901       if (DECL_IS_MALLOC (node->decl))
1902 	if (!has_function_state (node))
1903 	  {
1904 	    funct_state l = XCNEW (struct funct_state_d);
1905 	    *l = varying_state;
1906 	    l->malloc_state = STATE_MALLOC;
1907 	    set_function_state (node, l);
1908 	  }
1909     }
1910 
1911   dump_malloc_lattice (dump_file, "Initial");
1912   struct cgraph_node **order
1913     = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1914   int order_pos = ipa_reverse_postorder (order);
1915   bool changed = true;
1916 
1917   while (changed)
1918     {
1919       changed = false;
1920       /* Walk in postorder.  */
1921       for (int i = order_pos - 1; i >= 0; --i)
1922 	{
1923 	  cgraph_node *node = order[i];
1924 	  if (node->alias
1925 	      || !node->definition
1926 	      || !has_function_state (node))
1927 	    continue;
1928 
1929 	  funct_state l = get_function_state (node);
1930 
1931 	  /* FIXME: add support for indirect-calls.  */
1932 	  if (node->indirect_calls)
1933 	    {
1934 	      l->malloc_state = STATE_MALLOC_BOTTOM;
1935 	      continue;
1936 	    }
1937 
1938 	  if (node->get_availability () <= AVAIL_INTERPOSABLE)
1939 	    {
1940 	      l->malloc_state = STATE_MALLOC_BOTTOM;
1941 	      continue;
1942 	    }
1943 
1944 	  if (l->malloc_state == STATE_MALLOC_BOTTOM)
1945 	    continue;
1946 
1947 	  vec<cgraph_node *> callees = vNULL;
1948 	  for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1949 	    {
1950 	      ipa_call_summary *es = ipa_call_summaries->get (cs);
1951 	      if (es && es->is_return_callee_uncaptured)
1952 		callees.safe_push (cs->callee);
1953 	    }
1954 
1955 	  malloc_state_e new_state = l->malloc_state;
1956 	  for (unsigned j = 0; j < callees.length (); j++)
1957 	    {
1958 	      cgraph_node *callee = callees[j];
1959 	      if (!has_function_state (callee))
1960 		{
1961 		  new_state = STATE_MALLOC_BOTTOM;
1962 		  break;
1963 		}
1964 	      malloc_state_e callee_state = get_function_state (callee)->malloc_state;
1965 	      if (new_state < callee_state)
1966 		new_state = callee_state;
1967 	    }
1968 	  if (new_state != l->malloc_state)
1969 	    {
1970 	      changed = true;
1971 	      l->malloc_state = new_state;
1972 	    }
1973 	}
1974     }
1975 
1976   FOR_EACH_DEFINED_FUNCTION (node)
1977     if (has_function_state (node))
1978       {
1979 	funct_state l = get_function_state (node);
1980 	if (!node->alias
1981 	    && l->malloc_state == STATE_MALLOC
1982 	    && !node->global.inlined_to)
1983 	  {
1984 	    if (dump_file && (dump_flags & TDF_DETAILS))
1985 	      fprintf (dump_file, "Function %s found to be malloc\n",
1986 		       node->name ());
1987 
1988 	    bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1989 	    node->set_malloc_flag (true);
1990 	    if (!malloc_decl_p && warn_suggest_attribute_malloc)
1991 		warn_function_malloc (node->decl);
1992 	  }
1993       }
1994 
1995   dump_malloc_lattice (dump_file, "after propagation");
1996   ipa_free_postorder_info ();
1997   free (order);
1998 }
1999 
2000 /* Produce the global information by preforming a transitive closure
2001    on the local information that was produced by generate_summary.  */
2002 
2003 unsigned int
2004 pass_ipa_pure_const::
2005 execute (function *)
2006 {
2007   struct cgraph_node *node;
2008   bool remove_p;
2009 
2010   symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
2011   symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
2012   symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
2013 
2014   /* Nothrow makes more function to not lead to return and improve
2015      later analysis.  */
2016   propagate_nothrow ();
2017   propagate_malloc ();
2018   remove_p = propagate_pure_const ();
2019 
2020   /* Cleanup. */
2021   FOR_EACH_FUNCTION (node)
2022     if (has_function_state (node))
2023       free (get_function_state (node));
2024   funct_state_vec.release ();
2025   return remove_p ? TODO_remove_functions : 0;
2026 }
2027 
2028 static bool
2029 gate_pure_const (void)
2030 {
2031   return flag_ipa_pure_const || in_lto_p;
2032 }
2033 
2034 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2035     : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2036 		     pure_const_generate_summary, /* generate_summary */
2037 		     pure_const_write_summary, /* write_summary */
2038 		     pure_const_read_summary, /* read_summary */
2039 		     NULL, /* write_optimization_summary */
2040 		     NULL, /* read_optimization_summary */
2041 		     NULL, /* stmt_fixup */
2042 		     0, /* function_transform_todo_flags_start */
2043 		     NULL, /* function_transform */
2044 		     NULL), /* variable_transform */
2045   init_p(false),
2046   function_insertion_hook_holder(NULL),
2047   node_duplication_hook_holder(NULL),
2048   node_removal_hook_holder(NULL)
2049 {
2050 }
2051 
2052 ipa_opt_pass_d *
2053 make_pass_ipa_pure_const (gcc::context *ctxt)
2054 {
2055   return new pass_ipa_pure_const (ctxt);
2056 }
2057 
2058 /* Return true if function should be skipped for local pure const analysis.  */
2059 
2060 static bool
2061 skip_function_for_local_pure_const (struct cgraph_node *node)
2062 {
2063   /* Because we do not schedule pass_fixup_cfg over whole program after early
2064      optimizations we must not promote functions that are called by already
2065      processed functions.  */
2066 
2067   if (function_called_by_processed_nodes_p ())
2068     {
2069       if (dump_file)
2070         fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2071       return true;
2072     }
2073   /* Save some work and do not analyze functions which are interposable and
2074      do not have any non-interposable aliases.  */
2075   if (node->get_availability () <= AVAIL_INTERPOSABLE
2076       && !node->has_aliases_p ())
2077     {
2078       if (dump_file)
2079         fprintf (dump_file,
2080 		 "Function is interposable; not analyzing.\n");
2081       return true;
2082     }
2083   return false;
2084 }
2085 
2086 /* Simple local pass for pure const discovery reusing the analysis from
2087    ipa_pure_const.   This pass is effective when executed together with
2088    other optimization passes in early optimization pass queue.  */
2089 
2090 namespace {
2091 
2092 const pass_data pass_data_local_pure_const =
2093 {
2094   GIMPLE_PASS, /* type */
2095   "local-pure-const", /* name */
2096   OPTGROUP_NONE, /* optinfo_flags */
2097   TV_IPA_PURE_CONST, /* tv_id */
2098   0, /* properties_required */
2099   0, /* properties_provided */
2100   0, /* properties_destroyed */
2101   0, /* todo_flags_start */
2102   0, /* todo_flags_finish */
2103 };
2104 
2105 class pass_local_pure_const : public gimple_opt_pass
2106 {
2107 public:
2108   pass_local_pure_const (gcc::context *ctxt)
2109     : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2110   {}
2111 
2112   /* opt_pass methods: */
2113   opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
2114   virtual bool gate (function *) { return gate_pure_const (); }
2115   virtual unsigned int execute (function *);
2116 
2117 }; // class pass_local_pure_const
2118 
2119 unsigned int
2120 pass_local_pure_const::execute (function *fun)
2121 {
2122   bool changed = false;
2123   funct_state l;
2124   bool skip;
2125   struct cgraph_node *node;
2126 
2127   node = cgraph_node::get (current_function_decl);
2128   skip = skip_function_for_local_pure_const (node);
2129 
2130   if (!warn_suggest_attribute_const
2131       && !warn_suggest_attribute_pure
2132       && skip)
2133     return 0;
2134 
2135   l = analyze_function (node, false);
2136 
2137   /* Do NORETURN discovery.  */
2138   if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2139       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2140     {
2141       warn_function_noreturn (fun->decl);
2142       if (dump_file)
2143 	fprintf (dump_file, "Function found to be noreturn: %s\n",
2144 		 current_function_name ());
2145 
2146       /* Update declaration and reduce profile to executed once.  */
2147       TREE_THIS_VOLATILE (current_function_decl) = 1;
2148       if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2149 	node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2150 
2151       changed = true;
2152     }
2153 
2154   switch (l->pure_const_state)
2155     {
2156     case IPA_CONST:
2157       if (!TREE_READONLY (current_function_decl))
2158 	{
2159 	  warn_function_const (current_function_decl, !l->looping);
2160 	  if (dump_file)
2161 	    fprintf (dump_file, "Function found to be %sconst: %s\n",
2162 		     l->looping ? "looping " : "",
2163 		     current_function_name ());
2164 	}
2165       else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2166 	       && !l->looping)
2167 	{
2168 	  if (dump_file)
2169 	    fprintf (dump_file, "Function found to be non-looping: %s\n",
2170 		     current_function_name ());
2171 	}
2172       if (!skip && node->set_const_flag (true, l->looping))
2173 	{
2174 	  if (dump_file)
2175 	    fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2176 		     l->looping ? "looping " : "",
2177 		     current_function_name ());
2178 	  changed = true;
2179 	}
2180       break;
2181 
2182     case IPA_PURE:
2183       if (!DECL_PURE_P (current_function_decl))
2184 	{
2185 	  warn_function_pure (current_function_decl, !l->looping);
2186 	  if (dump_file)
2187 	    fprintf (dump_file, "Function found to be %spure: %s\n",
2188 		     l->looping ? "looping " : "",
2189 		     current_function_name ());
2190 	}
2191       else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2192 	       && !l->looping)
2193 	{
2194 	  if (dump_file)
2195 	    fprintf (dump_file, "Function found to be non-looping: %s\n",
2196 		     current_function_name ());
2197 	}
2198       if (!skip && node->set_pure_flag (true, l->looping))
2199 	{
2200 	  if (dump_file)
2201 	    fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2202 		     l->looping ? "looping " : "",
2203 		     current_function_name ());
2204 	  changed = true;
2205 	}
2206       break;
2207 
2208     default:
2209       break;
2210     }
2211   if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2212     {
2213       node->set_nothrow_flag (true);
2214       changed = true;
2215       if (dump_file)
2216 	fprintf (dump_file, "Function found to be nothrow: %s\n",
2217 		 current_function_name ());
2218     }
2219 
2220   if (l->malloc_state == STATE_MALLOC
2221       && !DECL_IS_MALLOC (current_function_decl))
2222     {
2223       node->set_malloc_flag (true);
2224       if (warn_suggest_attribute_malloc)
2225 	warn_function_malloc (node->decl);
2226       changed = true;
2227       if (dump_file)
2228 	fprintf (dump_file, "Function found to be malloc: %s\n",
2229 		 node->name ());
2230     }
2231 
2232   free (l);
2233   if (changed)
2234     return execute_fixup_cfg ();
2235   else
2236     return 0;
2237 }
2238 
2239 } // anon namespace
2240 
2241 gimple_opt_pass *
2242 make_pass_local_pure_const (gcc::context *ctxt)
2243 {
2244   return new pass_local_pure_const (ctxt);
2245 }
2246 
2247 /* Emit noreturn warnings.  */
2248 
2249 namespace {
2250 
2251 const pass_data pass_data_warn_function_noreturn =
2252 {
2253   GIMPLE_PASS, /* type */
2254   "*warn_function_noreturn", /* name */
2255   OPTGROUP_NONE, /* optinfo_flags */
2256   TV_NONE, /* tv_id */
2257   PROP_cfg, /* properties_required */
2258   0, /* properties_provided */
2259   0, /* properties_destroyed */
2260   0, /* todo_flags_start */
2261   0, /* todo_flags_finish */
2262 };
2263 
2264 class pass_warn_function_noreturn : public gimple_opt_pass
2265 {
2266 public:
2267   pass_warn_function_noreturn (gcc::context *ctxt)
2268     : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2269   {}
2270 
2271   /* opt_pass methods: */
2272   virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
2273   virtual unsigned int execute (function *fun)
2274     {
2275       if (!TREE_THIS_VOLATILE (current_function_decl)
2276 	  && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2277 	warn_function_noreturn (current_function_decl);
2278       return 0;
2279     }
2280 
2281 }; // class pass_warn_function_noreturn
2282 
2283 } // anon namespace
2284 
2285 gimple_opt_pass *
2286 make_pass_warn_function_noreturn (gcc::context *ctxt)
2287 {
2288   return new pass_warn_function_noreturn (ctxt);
2289 }
2290 
2291 /* Simple local pass for pure const discovery reusing the analysis from
2292    ipa_pure_const.   This pass is effective when executed together with
2293    other optimization passes in early optimization pass queue.  */
2294 
2295 namespace {
2296 
2297 const pass_data pass_data_nothrow =
2298 {
2299   GIMPLE_PASS, /* type */
2300   "nothrow", /* name */
2301   OPTGROUP_NONE, /* optinfo_flags */
2302   TV_IPA_PURE_CONST, /* tv_id */
2303   0, /* properties_required */
2304   0, /* properties_provided */
2305   0, /* properties_destroyed */
2306   0, /* todo_flags_start */
2307   0, /* todo_flags_finish */
2308 };
2309 
2310 class pass_nothrow : public gimple_opt_pass
2311 {
2312 public:
2313   pass_nothrow (gcc::context *ctxt)
2314     : gimple_opt_pass (pass_data_nothrow, ctxt)
2315   {}
2316 
2317   /* opt_pass methods: */
2318   opt_pass * clone () { return new pass_nothrow (m_ctxt); }
2319   virtual bool gate (function *) { return optimize; }
2320   virtual unsigned int execute (function *);
2321 
2322 }; // class pass_nothrow
2323 
2324 unsigned int
2325 pass_nothrow::execute (function *)
2326 {
2327   struct cgraph_node *node;
2328   basic_block this_block;
2329 
2330   if (TREE_NOTHROW (current_function_decl))
2331     return 0;
2332 
2333   node = cgraph_node::get (current_function_decl);
2334 
2335   /* We run during lowering, we can not really use availability yet.  */
2336   if (cgraph_node::get (current_function_decl)->get_availability ()
2337       <= AVAIL_INTERPOSABLE)
2338     {
2339       if (dump_file)
2340         fprintf (dump_file, "Function is interposable;"
2341 	         " not analyzing.\n");
2342       return true;
2343     }
2344 
2345   FOR_EACH_BB_FN (this_block, cfun)
2346     {
2347       for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2348 	   !gsi_end_p (gsi);
2349 	   gsi_next (&gsi))
2350         if (stmt_can_throw_external (gsi_stmt (gsi)))
2351 	  {
2352 	    if (is_gimple_call (gsi_stmt (gsi)))
2353 	      {
2354 		tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2355 		if (callee_t && recursive_call_p (current_function_decl,
2356 						  callee_t))
2357 		  continue;
2358 	      }
2359 
2360 	    if (dump_file)
2361 	      {
2362 		fprintf (dump_file, "Statement can throw: ");
2363 		print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2364 	      }
2365 	    return 0;
2366 	  }
2367     }
2368 
2369   node->set_nothrow_flag (true);
2370 
2371   bool cfg_changed = false;
2372   if (self_recursive_p (node))
2373     FOR_EACH_BB_FN (this_block, cfun)
2374       if (gimple *g = last_stmt (this_block))
2375 	if (is_gimple_call (g))
2376 	  {
2377 	    tree callee_t = gimple_call_fndecl (g);
2378 	    if (callee_t
2379 		&& recursive_call_p (current_function_decl, callee_t)
2380 		&& maybe_clean_eh_stmt (g)
2381 		&& gimple_purge_dead_eh_edges (this_block))
2382 	      cfg_changed = true;
2383 	  }
2384 
2385   if (dump_file)
2386     fprintf (dump_file, "Function found to be nothrow: %s\n",
2387 	     current_function_name ());
2388   return cfg_changed ? TODO_cleanup_cfg : 0;
2389 }
2390 
2391 } // anon namespace
2392 
2393 gimple_opt_pass *
2394 make_pass_nothrow (gcc::context *ctxt)
2395 {
2396   return new pass_nothrow (ctxt);
2397 }
2398