1 /* Callgraph based analysis of static variables.
2    Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3    Free Software Foundation, Inc.
4    Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 /* This file marks functions as being either const (TREE_READONLY) or
23    pure (DECL_PURE_P).  It can also set a variant of these that
24    are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 
26    This must be run after inlining decisions have been made since
27    otherwise, the local sets will not contain information that is
28    consistent with post inlined state.  The global sets are not prone
29    to this problem since they are by definition transitive.  */
30 
31 /* The code in this module is called by the ipa pass manager. It
32    should be one of the later passes since it's information is used by
33    the rest of the compilation. */
34 
35 #include "config.h"
36 #include "system.h"
37 #include "coretypes.h"
38 #include "tm.h"
39 #include "tree.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "langhooks.h"
44 #include "pointer-set.h"
45 #include "ggc.h"
46 #include "ipa-utils.h"
47 #include "gimple.h"
48 #include "cgraph.h"
49 #include "output.h"
50 #include "flags.h"
51 #include "timevar.h"
52 #include "diagnostic.h"
53 #include "gimple-pretty-print.h"
54 #include "langhooks.h"
55 #include "target.h"
56 #include "lto-streamer.h"
57 #include "data-streamer.h"
58 #include "tree-streamer.h"
59 #include "cfgloop.h"
60 #include "tree-scalar-evolution.h"
61 #include "intl.h"
62 #include "opts.h"
63 
64 static struct pointer_set_t *visited_nodes;
65 
66 /* Lattice values for const and pure functions.  Everything starts out
67    being const, then may drop to pure and then neither depending on
68    what is found.  */
69 enum pure_const_state_e
70 {
71   IPA_CONST,
72   IPA_PURE,
73   IPA_NEITHER
74 };
75 
76 const char *pure_const_names[3] = {"const", "pure", "neither"};
77 
78 /* Holder for the const_state.  There is one of these per function
79    decl.  */
80 struct funct_state_d
81 {
82   /* See above.  */
83   enum pure_const_state_e pure_const_state;
84   /* What user set here; we can be always sure about this.  */
85   enum pure_const_state_e state_previously_known;
86   bool looping_previously_known;
87 
88   /* True if the function could possibly infinite loop.  There are a
89      lot of ways that this could be determined.  We are pretty
90      conservative here.  While it is possible to cse pure and const
91      calls, it is not legal to have dce get rid of the call if there
92      is a possibility that the call could infinite loop since this is
93      a behavioral change.  */
94   bool looping;
95 
96   bool can_throw;
97 };
98 
99 /* State used when we know nothing about function.  */
100 static struct funct_state_d varying_state
101    = { IPA_NEITHER, IPA_NEITHER, true, true, true };
102 
103 
104 typedef struct funct_state_d * funct_state;
105 
106 /* The storage of the funct_state is abstracted because there is the
107    possibility that it may be desirable to move this to the cgraph
108    local info.  */
109 
110 /* Array, indexed by cgraph node uid, of function states.  */
111 
112 DEF_VEC_P (funct_state);
113 DEF_VEC_ALLOC_P (funct_state, heap);
114 static VEC (funct_state, heap) *funct_state_vec;
115 
116 /* Holders of ipa cgraph hooks: */
117 static struct cgraph_node_hook_list *function_insertion_hook_holder;
118 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
119 static struct cgraph_node_hook_list *node_removal_hook_holder;
120 
121 /* Try to guess if function body will always be visible to compiler
122    when compiling the call and whether compiler will be able
123    to propagate the information by itself.  */
124 
125 static bool
126 function_always_visible_to_compiler_p (tree decl)
127 {
128   return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
129 }
130 
131 /* Emit suggestion about attribute ATTRIB_NAME for DECL.  KNOWN_FINITE
132    is true if the function is known to be finite.  The diagnostic is
133    controlled by OPTION.  WARNED_ABOUT is a pointer_set unique for
134    OPTION, this function may initialize it and it is always returned
135    by the function.  */
136 
137 static struct pointer_set_t *
138 suggest_attribute (int option, tree decl, bool known_finite,
139 		   struct pointer_set_t *warned_about,
140 		   const char * attrib_name)
141 {
142   if (!option_enabled (option, &global_options))
143     return warned_about;
144   if (TREE_THIS_VOLATILE (decl)
145       || (known_finite && function_always_visible_to_compiler_p (decl)))
146     return warned_about;
147 
148   if (!warned_about)
149     warned_about = pointer_set_create ();
150   if (pointer_set_contains (warned_about, decl))
151     return warned_about;
152   pointer_set_insert (warned_about, decl);
153   warning_at (DECL_SOURCE_LOCATION (decl),
154 	      option,
155 	      known_finite
156 	      ? _("function might be candidate for attribute %<%s%>")
157 	      : _("function might be candidate for attribute %<%s%>"
158 		  " if it is known to return normally"), attrib_name);
159   return warned_about;
160 }
161 
162 /* Emit suggestion about __attribute_((pure)) for DECL.  KNOWN_FINITE
163    is true if the function is known to be finite.  */
164 
165 static void
166 warn_function_pure (tree decl, bool known_finite)
167 {
168   static struct pointer_set_t *warned_about;
169 
170   warned_about
171     = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
172 			 known_finite, warned_about, "pure");
173 }
174 
175 /* Emit suggestion about __attribute_((const)) for DECL.  KNOWN_FINITE
176    is true if the function is known to be finite.  */
177 
178 static void
179 warn_function_const (tree decl, bool known_finite)
180 {
181   static struct pointer_set_t *warned_about;
182   warned_about
183     = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
184 			 known_finite, warned_about, "const");
185 }
186 
187 void
188 warn_function_noreturn (tree decl)
189 {
190   static struct pointer_set_t *warned_about;
191   if (!lang_hooks.missing_noreturn_ok_p (decl))
192     warned_about
193       = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
194 			   true, warned_about, "noreturn");
195 }
196 /* Init the function state.  */
197 
198 static void
199 finish_state (void)
200 {
201   free (funct_state_vec);
202 }
203 
204 
205 /* Return true if we have a function state for NODE.  */
206 
207 static inline bool
208 has_function_state (struct cgraph_node *node)
209 {
210   if (!funct_state_vec
211       || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
212     return false;
213   return VEC_index (funct_state, funct_state_vec, node->uid) != NULL;
214 }
215 
216 /* Return the function state from NODE.  */
217 
218 static inline funct_state
219 get_function_state (struct cgraph_node *node)
220 {
221   if (!funct_state_vec
222       || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid
223       || !VEC_index (funct_state, funct_state_vec, node->uid))
224     /* We might want to put correct previously_known state into varying.  */
225     return &varying_state;
226  return VEC_index (funct_state, funct_state_vec, node->uid);
227 }
228 
229 /* Set the function state S for NODE.  */
230 
231 static inline void
232 set_function_state (struct cgraph_node *node, funct_state s)
233 {
234   if (!funct_state_vec
235       || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
236      VEC_safe_grow_cleared (funct_state, heap, funct_state_vec, node->uid + 1);
237   VEC_replace (funct_state, funct_state_vec, node->uid, s);
238 }
239 
240 /* Check to see if the use (or definition when CHECKING_WRITE is true)
241    variable T is legal in a function that is either pure or const.  */
242 
243 static inline void
244 check_decl (funct_state local,
245 	    tree t, bool checking_write, bool ipa)
246 {
247   /* Do not want to do anything with volatile except mark any
248      function that uses one to be not const or pure.  */
249   if (TREE_THIS_VOLATILE (t))
250     {
251       local->pure_const_state = IPA_NEITHER;
252       if (dump_file)
253         fprintf (dump_file, "    Volatile operand is not const/pure");
254       return;
255     }
256 
257   /* Do not care about a local automatic that is not static.  */
258   if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
259     return;
260 
261   /* If the variable has the "used" attribute, treat it as if it had a
262      been touched by the devil.  */
263   if (DECL_PRESERVE_P (t))
264     {
265       local->pure_const_state = IPA_NEITHER;
266       if (dump_file)
267         fprintf (dump_file, "    Used static/global variable is not const/pure\n");
268       return;
269     }
270 
271   /* In IPA mode we are not interested in checking actual loads and stores;
272      they will be processed at propagation time using ipa_ref.  */
273   if (ipa)
274     return;
275 
276   /* Since we have dealt with the locals and params cases above, if we
277      are CHECKING_WRITE, this cannot be a pure or constant
278      function.  */
279   if (checking_write)
280     {
281       local->pure_const_state = IPA_NEITHER;
282       if (dump_file)
283         fprintf (dump_file, "    static/global memory write is not const/pure\n");
284       return;
285     }
286 
287   if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
288     {
289       /* Readonly reads are safe.  */
290       if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
291 	return; /* Read of a constant, do not change the function state.  */
292       else
293 	{
294           if (dump_file)
295             fprintf (dump_file, "    global memory read is not const\n");
296 	  /* Just a regular read.  */
297 	  if (local->pure_const_state == IPA_CONST)
298 	    local->pure_const_state = IPA_PURE;
299 	}
300     }
301   else
302     {
303       /* Compilation level statics can be read if they are readonly
304 	 variables.  */
305       if (TREE_READONLY (t))
306 	return;
307 
308       if (dump_file)
309 	fprintf (dump_file, "    static memory read is not const\n");
310       /* Just a regular read.  */
311       if (local->pure_const_state == IPA_CONST)
312 	local->pure_const_state = IPA_PURE;
313     }
314 }
315 
316 
317 /* Check to see if the use (or definition when CHECKING_WRITE is true)
318    variable T is legal in a function that is either pure or const.  */
319 
320 static inline void
321 check_op (funct_state local, tree t, bool checking_write)
322 {
323   t = get_base_address (t);
324   if (t && TREE_THIS_VOLATILE (t))
325     {
326       local->pure_const_state = IPA_NEITHER;
327       if (dump_file)
328 	fprintf (dump_file, "    Volatile indirect ref is not const/pure\n");
329       return;
330     }
331   else if (t
332   	   && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
333 	   && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
334 	   && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
335     {
336       if (dump_file)
337 	fprintf (dump_file, "    Indirect ref to local memory is OK\n");
338       return;
339     }
340   else if (checking_write)
341     {
342       local->pure_const_state = IPA_NEITHER;
343       if (dump_file)
344 	fprintf (dump_file, "    Indirect ref write is not const/pure\n");
345       return;
346     }
347   else
348     {
349       if (dump_file)
350 	fprintf (dump_file, "    Indirect ref read is not const\n");
351       if (local->pure_const_state == IPA_CONST)
352 	local->pure_const_state = IPA_PURE;
353     }
354 }
355 
356 /* compute state based on ECF FLAGS and store to STATE and LOOPING.  */
357 
358 static void
359 state_from_flags (enum pure_const_state_e *state, bool *looping,
360 	          int flags, bool cannot_lead_to_return)
361 {
362   *looping = false;
363   if (flags & ECF_LOOPING_CONST_OR_PURE)
364     {
365       *looping = true;
366       if (dump_file && (dump_flags & TDF_DETAILS))
367 	fprintf (dump_file, " looping");
368     }
369   if (flags & ECF_CONST)
370     {
371       *state = IPA_CONST;
372       if (dump_file && (dump_flags & TDF_DETAILS))
373 	fprintf (dump_file, " const\n");
374     }
375   else if (flags & ECF_PURE)
376     {
377       *state = IPA_PURE;
378       if (dump_file && (dump_flags & TDF_DETAILS))
379 	fprintf (dump_file, " pure\n");
380     }
381   else if (cannot_lead_to_return)
382     {
383       *state = IPA_PURE;
384       *looping = true;
385       if (dump_file && (dump_flags & TDF_DETAILS))
386 	fprintf (dump_file, " ignoring side effects->pure looping\n");
387     }
388   else
389     {
390       if (dump_file && (dump_flags & TDF_DETAILS))
391 	fprintf (dump_file, " neihter\n");
392       *state = IPA_NEITHER;
393       *looping = true;
394     }
395 }
396 
397 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
398    into STATE and LOOPING better of the two variants.
399    Be sure to merge looping correctly.  IPA_NEITHER functions
400    have looping 0 even if they don't have to return.  */
401 
402 static inline void
403 better_state (enum pure_const_state_e *state, bool *looping,
404 	      enum pure_const_state_e state2, bool looping2)
405 {
406   if (state2 < *state)
407     {
408       if (*state == IPA_NEITHER)
409 	*looping = looping2;
410       else
411 	*looping = MIN (*looping, looping2);
412     }
413   else if (state2 != IPA_NEITHER)
414     *looping = MIN (*looping, looping2);
415 }
416 
417 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
418    into STATE and LOOPING worse of the two variants.  */
419 
420 static inline void
421 worse_state (enum pure_const_state_e *state, bool *looping,
422 	     enum pure_const_state_e state2, bool looping2)
423 {
424   *state = MAX (*state, state2);
425   *looping = MAX (*looping, looping2);
426 }
427 
428 /* Recognize special cases of builtins that are by themselves not pure or const
429    but function using them is.  */
430 static bool
431 special_builtin_state (enum pure_const_state_e *state, bool *looping,
432 			tree callee)
433 {
434   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
435     switch (DECL_FUNCTION_CODE (callee))
436       {
437 	case BUILT_IN_RETURN:
438 	case BUILT_IN_UNREACHABLE:
439 	case BUILT_IN_ALLOCA:
440 	case BUILT_IN_ALLOCA_WITH_ALIGN:
441 	case BUILT_IN_STACK_SAVE:
442 	case BUILT_IN_STACK_RESTORE:
443 	case BUILT_IN_EH_POINTER:
444 	case BUILT_IN_EH_FILTER:
445 	case BUILT_IN_UNWIND_RESUME:
446 	case BUILT_IN_CXA_END_CLEANUP:
447 	case BUILT_IN_EH_COPY_VALUES:
448 	case BUILT_IN_FRAME_ADDRESS:
449 	case BUILT_IN_APPLY:
450 	case BUILT_IN_APPLY_ARGS:
451 	  *looping = false;
452 	  *state = IPA_CONST;
453 	  return true;
454 	case BUILT_IN_PREFETCH:
455 	  *looping = true;
456 	  *state = IPA_CONST;
457 	  return true;
458       }
459   return false;
460 }
461 
462 /* Check the parameters of a function call to CALL_EXPR to see if
463    there are any references in the parameters that are not allowed for
464    pure or const functions.  Also check to see if this is either an
465    indirect call, a call outside the compilation unit, or has special
466    attributes that may also effect the purity.  The CALL_EXPR node for
467    the entire call expression.  */
468 
469 static void
470 check_call (funct_state local, gimple call, bool ipa)
471 {
472   int flags = gimple_call_flags (call);
473   tree callee_t = gimple_call_fndecl (call);
474   bool possibly_throws = stmt_could_throw_p (call);
475   bool possibly_throws_externally = (possibly_throws
476   				     && stmt_can_throw_external (call));
477 
478   if (possibly_throws)
479     {
480       unsigned int i;
481       for (i = 0; i < gimple_num_ops (call); i++)
482         if (gimple_op (call, i)
483 	    && tree_could_throw_p (gimple_op (call, i)))
484 	  {
485 	    if (possibly_throws && cfun->can_throw_non_call_exceptions)
486 	      {
487 		if (dump_file)
488 		  fprintf (dump_file, "    operand can throw; looping\n");
489 		local->looping = true;
490 	      }
491 	    if (possibly_throws_externally)
492 	      {
493 		if (dump_file)
494 		  fprintf (dump_file, "    operand can throw externally\n");
495 		local->can_throw = true;
496 	      }
497 	  }
498     }
499 
500   /* The const and pure flags are set by a variety of places in the
501      compiler (including here).  If someone has already set the flags
502      for the callee, (such as for some of the builtins) we will use
503      them, otherwise we will compute our own information.
504 
505      Const and pure functions have less clobber effects than other
506      functions so we process these first.  Otherwise if it is a call
507      outside the compilation unit or an indirect call we punt.  This
508      leaves local calls which will be processed by following the call
509      graph.  */
510   if (callee_t)
511     {
512       enum pure_const_state_e call_state;
513       bool call_looping;
514 
515       if (special_builtin_state (&call_state, &call_looping, callee_t))
516 	{
517 	  worse_state (&local->pure_const_state, &local->looping,
518 		       call_state, call_looping);
519 	  return;
520 	}
521       /* When bad things happen to bad functions, they cannot be const
522 	 or pure.  */
523       if (setjmp_call_p (callee_t))
524 	{
525 	  if (dump_file)
526 	    fprintf (dump_file, "    setjmp is not const/pure\n");
527           local->looping = true;
528 	  local->pure_const_state = IPA_NEITHER;
529 	}
530 
531       if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
532 	switch (DECL_FUNCTION_CODE (callee_t))
533 	  {
534 	  case BUILT_IN_LONGJMP:
535 	  case BUILT_IN_NONLOCAL_GOTO:
536 	    if (dump_file)
537 	      fprintf (dump_file, "    longjmp and nonlocal goto is not const/pure\n");
538 	    local->pure_const_state = IPA_NEITHER;
539             local->looping = true;
540 	    break;
541 	  default:
542 	    break;
543 	  }
544     }
545 
546   /* When not in IPA mode, we can still handle self recursion.  */
547   if (!ipa && callee_t == current_function_decl)
548     {
549       if (dump_file)
550         fprintf (dump_file, "    Recursive call can loop.\n");
551       local->looping = true;
552     }
553   /* Either callee is unknown or we are doing local analysis.
554      Look to see if there are any bits available for the callee (such as by
555      declaration or because it is builtin) and process solely on the basis of
556      those bits. */
557   else if (!ipa)
558     {
559       enum pure_const_state_e call_state;
560       bool call_looping;
561       if (possibly_throws && cfun->can_throw_non_call_exceptions)
562         {
563 	  if (dump_file)
564 	    fprintf (dump_file, "    can throw; looping\n");
565           local->looping = true;
566 	}
567       if (possibly_throws_externally)
568         {
569 	  if (dump_file)
570 	    {
571 	      fprintf (dump_file, "    can throw externally to lp %i\n",
572 	      	       lookup_stmt_eh_lp (call));
573 	      if (callee_t)
574 		fprintf (dump_file, "     callee:%s\n",
575 			 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
576 	    }
577           local->can_throw = true;
578 	}
579       if (dump_file && (dump_flags & TDF_DETAILS))
580 	fprintf (dump_file, "    checking flags for call:");
581       state_from_flags (&call_state, &call_looping, flags,
582 			((flags & (ECF_NORETURN | ECF_NOTHROW))
583 			 == (ECF_NORETURN | ECF_NOTHROW))
584 			|| (!flag_exceptions && (flags & ECF_NORETURN)));
585       worse_state (&local->pure_const_state, &local->looping,
586 		   call_state, call_looping);
587     }
588   /* Direct functions calls are handled by IPA propagation.  */
589 }
590 
591 /* Wrapper around check_decl for loads in local more.  */
592 
593 static bool
594 check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
595 {
596   if (DECL_P (op))
597     check_decl ((funct_state)data, op, false, false);
598   else
599     check_op ((funct_state)data, op, false);
600   return false;
601 }
602 
603 /* Wrapper around check_decl for stores in local more.  */
604 
605 static bool
606 check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
607 {
608   if (DECL_P (op))
609     check_decl ((funct_state)data, op, true, false);
610   else
611     check_op ((funct_state)data, op, true);
612   return false;
613 }
614 
615 /* Wrapper around check_decl for loads in ipa mode.  */
616 
617 static bool
618 check_ipa_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
619 {
620   if (DECL_P (op))
621     check_decl ((funct_state)data, op, false, true);
622   else
623     check_op ((funct_state)data, op, false);
624   return false;
625 }
626 
627 /* Wrapper around check_decl for stores in ipa mode.  */
628 
629 static bool
630 check_ipa_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
631 {
632   if (DECL_P (op))
633     check_decl ((funct_state)data, op, true, true);
634   else
635     check_op ((funct_state)data, op, true);
636   return false;
637 }
638 
639 /* Look into pointer pointed to by GSIP and figure out what interesting side
640    effects it has.  */
641 static void
642 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
643 {
644   gimple stmt = gsi_stmt (*gsip);
645 
646   if (is_gimple_debug (stmt))
647     return;
648 
649   if (dump_file)
650     {
651       fprintf (dump_file, "  scanning: ");
652       print_gimple_stmt (dump_file, stmt, 0, 0);
653     }
654 
655   if (gimple_has_volatile_ops (stmt)
656       && !gimple_clobber_p (stmt))
657     {
658       local->pure_const_state = IPA_NEITHER;
659       if (dump_file)
660 	fprintf (dump_file, "    Volatile stmt is not const/pure\n");
661     }
662 
663   /* Look for loads and stores.  */
664   walk_stmt_load_store_ops (stmt, local,
665 			    ipa ? check_ipa_load : check_load,
666 			    ipa ? check_ipa_store :  check_store);
667 
668   if (gimple_code (stmt) != GIMPLE_CALL
669       && stmt_could_throw_p (stmt))
670     {
671       if (cfun->can_throw_non_call_exceptions)
672 	{
673 	  if (dump_file)
674 	    fprintf (dump_file, "    can throw; looping");
675 	  local->looping = true;
676 	}
677       if (stmt_can_throw_external (stmt))
678 	{
679 	  if (dump_file)
680 	    fprintf (dump_file, "    can throw externally");
681 	  local->can_throw = true;
682 	}
683     }
684   switch (gimple_code (stmt))
685     {
686     case GIMPLE_CALL:
687       check_call (local, stmt, ipa);
688       break;
689     case GIMPLE_LABEL:
690       if (DECL_NONLOCAL (gimple_label_label (stmt)))
691 	/* Target of long jump. */
692 	{
693           if (dump_file)
694             fprintf (dump_file, "    nonlocal label is not const/pure");
695 	  local->pure_const_state = IPA_NEITHER;
696 	}
697       break;
698     case GIMPLE_ASM:
699       if (gimple_asm_clobbers_memory_p (stmt))
700 	{
701 	  if (dump_file)
702 	    fprintf (dump_file, "    memory asm clobber is not const/pure");
703 	  /* Abandon all hope, ye who enter here. */
704 	  local->pure_const_state = IPA_NEITHER;
705 	}
706       if (gimple_asm_volatile_p (stmt))
707 	{
708 	  if (dump_file)
709 	    fprintf (dump_file, "    volatile is not const/pure");
710 	  /* Abandon all hope, ye who enter here. */
711 	  local->pure_const_state = IPA_NEITHER;
712           local->looping = true;
713 	}
714       return;
715     default:
716       break;
717     }
718 }
719 
720 
721 /* This is the main routine for finding the reference patterns for
722    global variables within a function FN.  */
723 
724 static funct_state
725 analyze_function (struct cgraph_node *fn, bool ipa)
726 {
727   tree decl = fn->decl;
728   tree old_decl = current_function_decl;
729   funct_state l;
730   basic_block this_block;
731 
732   l = XCNEW (struct funct_state_d);
733   l->pure_const_state = IPA_CONST;
734   l->state_previously_known = IPA_NEITHER;
735   l->looping_previously_known = true;
736   l->looping = false;
737   l->can_throw = false;
738   state_from_flags (&l->state_previously_known, &l->looping_previously_known,
739 		    flags_from_decl_or_type (fn->decl),
740 		    cgraph_node_cannot_return (fn));
741 
742   if (fn->thunk.thunk_p || fn->alias)
743     {
744       /* Thunk gets propagated through, so nothing interesting happens.  */
745       gcc_assert (ipa);
746       return l;
747     }
748 
749   if (dump_file)
750     {
751       fprintf (dump_file, "\n\n local analysis of %s\n ",
752 	       cgraph_node_name (fn));
753     }
754 
755   push_cfun (DECL_STRUCT_FUNCTION (decl));
756   current_function_decl = decl;
757 
758   FOR_EACH_BB (this_block)
759     {
760       gimple_stmt_iterator gsi;
761       struct walk_stmt_info wi;
762 
763       memset (&wi, 0, sizeof(wi));
764       for (gsi = gsi_start_bb (this_block);
765 	   !gsi_end_p (gsi);
766 	   gsi_next (&gsi))
767 	{
768 	  check_stmt (&gsi, l, ipa);
769 	  if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
770 	    goto end;
771 	}
772     }
773 
774 end:
775   if (l->pure_const_state != IPA_NEITHER)
776     {
777       /* Const functions cannot have back edges (an
778 	 indication of possible infinite loop side
779 	 effect.  */
780       if (mark_dfs_back_edges ())
781         {
782 	  /* Preheaders are needed for SCEV to work.
783 	     Simple latches and recorded exits improve chances that loop will
784 	     proved to be finite in testcases such as in loop-15.c and loop-24.c  */
785 	  loop_optimizer_init (LOOPS_NORMAL
786 			       | LOOPS_HAVE_RECORDED_EXITS);
787 	  if (dump_file && (dump_flags & TDF_DETAILS))
788 	    flow_loops_dump (dump_file, NULL, 0);
789 	  if (mark_irreducible_loops ())
790 	    {
791 	      if (dump_file)
792 	        fprintf (dump_file, "    has irreducible loops\n");
793 	      l->looping = true;
794 	    }
795 	  else
796 	    {
797 	      loop_iterator li;
798 	      struct loop *loop;
799 	      scev_initialize ();
800 	      FOR_EACH_LOOP (li, loop, 0)
801 		if (!finite_loop_p (loop))
802 		  {
803 		    if (dump_file)
804 		      fprintf (dump_file, "    can not prove finiteness of loop %i\n", loop->num);
805 		    l->looping =true;
806 		    FOR_EACH_LOOP_BREAK (li);
807 		  }
808 	      scev_finalize ();
809 	    }
810           loop_optimizer_finalize ();
811 	}
812     }
813 
814   if (dump_file && (dump_flags & TDF_DETAILS))
815     fprintf (dump_file, "    checking previously known:");
816 
817   better_state (&l->pure_const_state, &l->looping,
818 		l->state_previously_known,
819 		l->looping_previously_known);
820   if (TREE_NOTHROW (decl))
821     l->can_throw = false;
822 
823   pop_cfun ();
824   current_function_decl = old_decl;
825   if (dump_file)
826     {
827       if (l->looping)
828         fprintf (dump_file, "Function is locally looping.\n");
829       if (l->can_throw)
830         fprintf (dump_file, "Function is locally throwing.\n");
831       if (l->pure_const_state == IPA_CONST)
832         fprintf (dump_file, "Function is locally const.\n");
833       if (l->pure_const_state == IPA_PURE)
834         fprintf (dump_file, "Function is locally pure.\n");
835     }
836   return l;
837 }
838 
839 /* Called when new function is inserted to callgraph late.  */
840 static void
841 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
842 {
843  if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
844    return;
845   /* There are some shared nodes, in particular the initializers on
846      static declarations.  We do not need to scan them more than once
847      since all we would be interested in are the addressof
848      operations.  */
849   visited_nodes = pointer_set_create ();
850   if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
851     set_function_state (node, analyze_function (node, true));
852   pointer_set_destroy (visited_nodes);
853   visited_nodes = NULL;
854 }
855 
856 /* Called when new clone is inserted to callgraph late.  */
857 
858 static void
859 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
860 	 	     void *data ATTRIBUTE_UNUSED)
861 {
862   if (has_function_state (src))
863     {
864       funct_state l = XNEW (struct funct_state_d);
865       gcc_assert (!has_function_state (dst));
866       memcpy (l, get_function_state (src), sizeof (*l));
867       set_function_state (dst, l);
868     }
869 }
870 
871 /* Called when new clone is inserted to callgraph late.  */
872 
873 static void
874 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
875 {
876   if (has_function_state (node))
877     {
878       funct_state l = get_function_state (node);
879       if (l != &varying_state)
880         free (l);
881       set_function_state (node, NULL);
882     }
883 }
884 
885 
886 static void
887 register_hooks (void)
888 {
889   static bool init_p = false;
890 
891   if (init_p)
892     return;
893 
894   init_p = true;
895 
896   node_removal_hook_holder =
897       cgraph_add_node_removal_hook (&remove_node_data, NULL);
898   node_duplication_hook_holder =
899       cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
900   function_insertion_hook_holder =
901       cgraph_add_function_insertion_hook (&add_new_function, NULL);
902 }
903 
904 
905 /* Analyze each function in the cgraph to see if it is locally PURE or
906    CONST.  */
907 
908 static void
909 generate_summary (void)
910 {
911   struct cgraph_node *node;
912 
913   register_hooks ();
914 
915   /* There are some shared nodes, in particular the initializers on
916      static declarations.  We do not need to scan them more than once
917      since all we would be interested in are the addressof
918      operations.  */
919   visited_nodes = pointer_set_create ();
920 
921   /* Process all of the functions.
922 
923      We process AVAIL_OVERWRITABLE functions.  We can not use the results
924      by default, but the info can be used at LTO with -fwhole-program or
925      when function got cloned and the clone is AVAILABLE.  */
926 
927   for (node = cgraph_nodes; node; node = node->next)
928     if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
929       set_function_state (node, analyze_function (node, true));
930 
931   pointer_set_destroy (visited_nodes);
932   visited_nodes = NULL;
933 }
934 
935 
936 /* Serialize the ipa info for lto.  */
937 
938 static void
939 pure_const_write_summary (cgraph_node_set set,
940 			  varpool_node_set vset ATTRIBUTE_UNUSED)
941 {
942   struct cgraph_node *node;
943   struct lto_simple_output_block *ob
944     = lto_create_simple_output_block (LTO_section_ipa_pure_const);
945   unsigned int count = 0;
946   cgraph_node_set_iterator csi;
947 
948   for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
949     {
950       node = csi_node (csi);
951       if (node->analyzed && has_function_state (node))
952 	count++;
953     }
954 
955   streamer_write_uhwi_stream (ob->main_stream, count);
956 
957   /* Process all of the functions.  */
958   for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
959     {
960       node = csi_node (csi);
961       if (node->analyzed && has_function_state (node))
962 	{
963 	  struct bitpack_d bp;
964 	  funct_state fs;
965 	  int node_ref;
966 	  lto_cgraph_encoder_t encoder;
967 
968 	  fs = get_function_state (node);
969 
970 	  encoder = ob->decl_state->cgraph_node_encoder;
971 	  node_ref = lto_cgraph_encoder_encode (encoder, node);
972 	  streamer_write_uhwi_stream (ob->main_stream, node_ref);
973 
974 	  /* Note that flags will need to be read in the opposite
975 	     order as we are pushing the bitflags into FLAGS.  */
976 	  bp = bitpack_create (ob->main_stream);
977 	  bp_pack_value (&bp, fs->pure_const_state, 2);
978 	  bp_pack_value (&bp, fs->state_previously_known, 2);
979 	  bp_pack_value (&bp, fs->looping_previously_known, 1);
980 	  bp_pack_value (&bp, fs->looping, 1);
981 	  bp_pack_value (&bp, fs->can_throw, 1);
982 	  streamer_write_bitpack (&bp);
983 	}
984     }
985 
986   lto_destroy_simple_output_block (ob);
987 }
988 
989 
990 /* Deserialize the ipa info for lto.  */
991 
992 static void
993 pure_const_read_summary (void)
994 {
995   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
996   struct lto_file_decl_data *file_data;
997   unsigned int j = 0;
998 
999   register_hooks ();
1000   while ((file_data = file_data_vec[j++]))
1001     {
1002       const char *data;
1003       size_t len;
1004       struct lto_input_block *ib
1005 	= lto_create_simple_input_block (file_data,
1006 					 LTO_section_ipa_pure_const,
1007 					 &data, &len);
1008       if (ib)
1009 	{
1010 	  unsigned int i;
1011 	  unsigned int count = streamer_read_uhwi (ib);
1012 
1013 	  for (i = 0; i < count; i++)
1014 	    {
1015 	      unsigned int index;
1016 	      struct cgraph_node *node;
1017 	      struct bitpack_d bp;
1018 	      funct_state fs;
1019 	      lto_cgraph_encoder_t encoder;
1020 
1021 	      fs = XCNEW (struct funct_state_d);
1022 	      index = streamer_read_uhwi (ib);
1023 	      encoder = file_data->cgraph_node_encoder;
1024 	      node = lto_cgraph_encoder_deref (encoder, index);
1025 	      set_function_state (node, fs);
1026 
1027 	      /* Note that the flags must be read in the opposite
1028 		 order in which they were written (the bitflags were
1029 		 pushed into FLAGS).  */
1030 	      bp = streamer_read_bitpack (ib);
1031 	      fs->pure_const_state
1032 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1033 	      fs->state_previously_known
1034 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1035 	      fs->looping_previously_known = bp_unpack_value (&bp, 1);
1036 	      fs->looping = bp_unpack_value (&bp, 1);
1037 	      fs->can_throw = bp_unpack_value (&bp, 1);
1038 	      if (dump_file)
1039 		{
1040 		  int flags = flags_from_decl_or_type (node->decl);
1041 		  fprintf (dump_file, "Read info for %s/%i ",
1042 			   cgraph_node_name (node),
1043 			   node->uid);
1044 		  if (flags & ECF_CONST)
1045 		    fprintf (dump_file, " const");
1046 		  if (flags & ECF_PURE)
1047 		    fprintf (dump_file, " pure");
1048 		  if (flags & ECF_NOTHROW)
1049 		    fprintf (dump_file, " nothrow");
1050 		  fprintf (dump_file, "\n  pure const state: %s\n",
1051 			   pure_const_names[fs->pure_const_state]);
1052 		  fprintf (dump_file, "  previously known state: %s\n",
1053 			   pure_const_names[fs->looping_previously_known]);
1054 		  if (fs->looping)
1055 		    fprintf (dump_file,"  function is locally looping\n");
1056 		  if (fs->looping_previously_known)
1057 		    fprintf (dump_file,"  function is previously known looping\n");
1058 		  if (fs->can_throw)
1059 		    fprintf (dump_file,"  function is locally throwing\n");
1060 		}
1061 	    }
1062 
1063 	  lto_destroy_simple_input_block (file_data,
1064 					  LTO_section_ipa_pure_const,
1065 					  ib, data, len);
1066 	}
1067     }
1068 }
1069 
1070 
1071 static bool
1072 ignore_edge (struct cgraph_edge *e)
1073 {
1074   return (!e->can_throw_external);
1075 }
1076 
1077 /* Return true if NODE is self recursive function.
1078    ??? self recursive and indirectly recursive funcions should
1079    be the same, so this function seems unnecesary.  */
1080 
1081 static bool
1082 self_recursive_p (struct cgraph_node *node)
1083 {
1084   struct cgraph_edge *e;
1085   for (e = node->callees; e; e = e->next_callee)
1086     if (cgraph_function_node (e->callee, NULL) == node)
1087       return true;
1088   return false;
1089 }
1090 
1091 /* Produce transitive closure over the callgraph and compute pure/const
1092    attributes.  */
1093 
1094 static void
1095 propagate_pure_const (void)
1096 {
1097   struct cgraph_node *node;
1098   struct cgraph_node *w;
1099   struct cgraph_node **order =
1100     XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1101   int order_pos;
1102   int i;
1103   struct ipa_dfs_info * w_info;
1104 
1105   order_pos = ipa_reduced_postorder (order, true, false, NULL);
1106   if (dump_file)
1107     {
1108       dump_cgraph (dump_file);
1109       ipa_print_order(dump_file, "reduced", order, order_pos);
1110     }
1111 
1112   /* Propagate the local information thru the call graph to produce
1113      the global information.  All the nodes within a cycle will have
1114      the same info so we collapse cycles first.  Then we can do the
1115      propagation in one pass from the leaves to the roots.  */
1116   for (i = 0; i < order_pos; i++ )
1117     {
1118       enum pure_const_state_e pure_const_state = IPA_CONST;
1119       bool looping = false;
1120       int count = 0;
1121       node = order[i];
1122 
1123       if (node->alias)
1124 	continue;
1125 
1126       if (dump_file && (dump_flags & TDF_DETAILS))
1127 	fprintf (dump_file, "Starting cycle\n");
1128 
1129       /* Find the worst state for any node in the cycle.  */
1130       w = node;
1131       while (w && pure_const_state != IPA_NEITHER)
1132 	{
1133 	  struct cgraph_edge *e;
1134 	  struct cgraph_edge *ie;
1135 	  int i;
1136 	  struct ipa_ref *ref;
1137 
1138 	  funct_state w_l = get_function_state (w);
1139 	  if (dump_file && (dump_flags & TDF_DETAILS))
1140 	    fprintf (dump_file, "  Visiting %s/%i state:%s looping %i\n",
1141 		     cgraph_node_name (w),
1142 		     w->uid,
1143 		     pure_const_names[w_l->pure_const_state],
1144 		     w_l->looping);
1145 
1146 	  /* First merge in function body properties.  */
1147 	  worse_state (&pure_const_state, &looping,
1148 		       w_l->pure_const_state, w_l->looping);
1149 	  if (pure_const_state == IPA_NEITHER)
1150 	    break;
1151 
1152 	  /* For overwritable nodes we can not assume anything.  */
1153 	  if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1154 	    {
1155 	      worse_state (&pure_const_state, &looping,
1156 			   w_l->state_previously_known,
1157 			   w_l->looping_previously_known);
1158 	      if (dump_file && (dump_flags & TDF_DETAILS))
1159 		{
1160 		  fprintf (dump_file,
1161 			   "    Overwritable. state %s looping %i\n",
1162 			   pure_const_names[w_l->state_previously_known],
1163 			   w_l->looping_previously_known);
1164 		}
1165 	      break;
1166 	    }
1167 
1168 	  count++;
1169 
1170 	  /* We consider recursive cycles as possibly infinite.
1171 	     This might be relaxed since infinite recursion leads to stack
1172 	     overflow.  */
1173 	  if (count > 1)
1174 	    looping = true;
1175 
1176 	  /* Now walk the edges and merge in callee properties.  */
1177 	  for (e = w->callees; e; e = e->next_callee)
1178 	    {
1179 	      enum availability avail;
1180 	      struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
1181 	      enum pure_const_state_e edge_state = IPA_CONST;
1182 	      bool edge_looping = false;
1183 
1184 	      if (dump_file && (dump_flags & TDF_DETAILS))
1185 		{
1186 		  fprintf (dump_file,
1187 			   "    Call to %s/%i",
1188 			   cgraph_node_name (e->callee),
1189 			   e->callee->uid);
1190 		}
1191 	      if (avail > AVAIL_OVERWRITABLE)
1192 		{
1193 		  funct_state y_l = get_function_state (y);
1194 		  if (dump_file && (dump_flags & TDF_DETAILS))
1195 		    {
1196 		      fprintf (dump_file,
1197 			       " state:%s looping:%i\n",
1198 			       pure_const_names[y_l->pure_const_state],
1199 			       y_l->looping);
1200 		    }
1201 		  if (y_l->pure_const_state > IPA_PURE
1202 		      && cgraph_edge_cannot_lead_to_return (e))
1203 		    {
1204 		      if (dump_file && (dump_flags & TDF_DETAILS))
1205 			fprintf (dump_file,
1206 				 "        Ignoring side effects"
1207 				 " -> pure, looping\n");
1208 		      edge_state = IPA_PURE;
1209 		      edge_looping = true;
1210 		    }
1211 		  else
1212 		    {
1213 		      edge_state = y_l->pure_const_state;
1214 		      edge_looping = y_l->looping;
1215 		    }
1216 		}
1217 	      else if (special_builtin_state (&edge_state, &edge_looping,
1218 					       y->decl))
1219 		;
1220 	      else
1221 		state_from_flags (&edge_state, &edge_looping,
1222 				  flags_from_decl_or_type (y->decl),
1223 				  cgraph_edge_cannot_lead_to_return (e));
1224 
1225 	      /* Merge the results with what we already know.  */
1226 	      better_state (&edge_state, &edge_looping,
1227 			    w_l->state_previously_known,
1228 			    w_l->looping_previously_known);
1229 	      worse_state (&pure_const_state, &looping,
1230 			   edge_state, edge_looping);
1231 	      if (pure_const_state == IPA_NEITHER)
1232 	        break;
1233 	    }
1234 	  if (pure_const_state == IPA_NEITHER)
1235 	    break;
1236 
1237 	  /* Now process the indirect call.  */
1238           for (ie = w->indirect_calls; ie; ie = ie->next_callee)
1239 	    {
1240 	      enum pure_const_state_e edge_state = IPA_CONST;
1241 	      bool edge_looping = false;
1242 
1243 	      if (dump_file && (dump_flags & TDF_DETAILS))
1244 		fprintf (dump_file, "    Indirect call");
1245 	      state_from_flags (&edge_state, &edge_looping,
1246 			        ie->indirect_info->ecf_flags,
1247 			        cgraph_edge_cannot_lead_to_return (ie));
1248 	      /* Merge the results with what we already know.  */
1249 	      better_state (&edge_state, &edge_looping,
1250 			    w_l->state_previously_known,
1251 			    w_l->looping_previously_known);
1252 	      worse_state (&pure_const_state, &looping,
1253 			   edge_state, edge_looping);
1254 	      if (pure_const_state == IPA_NEITHER)
1255 	        break;
1256 	    }
1257 	  if (pure_const_state == IPA_NEITHER)
1258 	    break;
1259 
1260 	  /* And finally all loads and stores.  */
1261 	  for (i = 0; ipa_ref_list_reference_iterate (&w->ref_list, i, ref); i++)
1262 	    {
1263 	      enum pure_const_state_e ref_state = IPA_CONST;
1264 	      bool ref_looping = false;
1265 	      switch (ref->use)
1266 		{
1267 		case IPA_REF_LOAD:
1268 		  /* readonly reads are safe.  */
1269 		  if (TREE_READONLY (ipa_ref_varpool_node (ref)->decl))
1270 		    break;
1271 		  if (dump_file && (dump_flags & TDF_DETAILS))
1272 		    fprintf (dump_file, "    nonreadonly global var read\n");
1273 		  ref_state = IPA_PURE;
1274 		  break;
1275 		case IPA_REF_STORE:
1276 		  if (ipa_ref_cannot_lead_to_return (ref))
1277 		    break;
1278 		  ref_state = IPA_NEITHER;
1279 		  if (dump_file && (dump_flags & TDF_DETAILS))
1280 		    fprintf (dump_file, "    global var write\n");
1281 		  break;
1282 		case IPA_REF_ADDR:
1283 		  break;
1284 		}
1285 	      better_state (&ref_state, &ref_looping,
1286 			    w_l->state_previously_known,
1287 			    w_l->looping_previously_known);
1288 	      worse_state (&pure_const_state, &looping,
1289 			   ref_state, ref_looping);
1290 	      if (pure_const_state == IPA_NEITHER)
1291 		break;
1292 	    }
1293 	  w_info = (struct ipa_dfs_info *) w->aux;
1294 	  w = w_info->next_cycle;
1295 	}
1296       if (dump_file && (dump_flags & TDF_DETAILS))
1297 	fprintf (dump_file, "Result %s looping %i\n",
1298 		 pure_const_names [pure_const_state],
1299 		 looping);
1300 
1301       /* Copy back the region's pure_const_state which is shared by
1302 	 all nodes in the region.  */
1303       w = node;
1304       while (w)
1305 	{
1306 	  funct_state w_l = get_function_state (w);
1307 	  enum pure_const_state_e this_state = pure_const_state;
1308 	  bool this_looping = looping;
1309 
1310 	  if (w_l->state_previously_known != IPA_NEITHER
1311 	      && this_state > w_l->state_previously_known)
1312 	    {
1313               this_state = w_l->state_previously_known;
1314 	      this_looping |= w_l->looping_previously_known;
1315 	    }
1316 	  if (!this_looping && self_recursive_p (w))
1317 	    this_looping = true;
1318 	  if (!w_l->looping_previously_known)
1319 	    this_looping = false;
1320 
1321 	  /* All nodes within a cycle share the same info.  */
1322 	  w_l->pure_const_state = this_state;
1323 	  w_l->looping = this_looping;
1324 
1325 	  switch (this_state)
1326 	    {
1327 	    case IPA_CONST:
1328 	      if (!TREE_READONLY (w->decl))
1329 		{
1330 		  warn_function_const (w->decl, !this_looping);
1331 		  if (dump_file)
1332 		    fprintf (dump_file, "Function found to be %sconst: %s\n",
1333 			     this_looping ? "looping " : "",
1334 			     cgraph_node_name (w));
1335 		}
1336 	      cgraph_set_const_flag (w, true, this_looping);
1337 	      break;
1338 
1339 	    case IPA_PURE:
1340 	      if (!DECL_PURE_P (w->decl))
1341 		{
1342 		  warn_function_pure (w->decl, !this_looping);
1343 		  if (dump_file)
1344 		    fprintf (dump_file, "Function found to be %spure: %s\n",
1345 			     this_looping ? "looping " : "",
1346 			     cgraph_node_name (w));
1347 		}
1348 	      cgraph_set_pure_flag (w, true, this_looping);
1349 	      break;
1350 
1351 	    default:
1352 	      break;
1353 	    }
1354 	  w_info = (struct ipa_dfs_info *) w->aux;
1355 	  w = w_info->next_cycle;
1356 	}
1357     }
1358 
1359   ipa_free_postorder_info ();
1360   free (order);
1361 }
1362 
1363 /* Produce transitive closure over the callgraph and compute nothrow
1364    attributes.  */
1365 
1366 static void
1367 propagate_nothrow (void)
1368 {
1369   struct cgraph_node *node;
1370   struct cgraph_node *w;
1371   struct cgraph_node **order =
1372     XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1373   int order_pos;
1374   int i;
1375   struct ipa_dfs_info * w_info;
1376 
1377   order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
1378   if (dump_file)
1379     {
1380       dump_cgraph (dump_file);
1381       ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1382     }
1383 
1384   /* Propagate the local information thru the call graph to produce
1385      the global information.  All the nodes within a cycle will have
1386      the same info so we collapse cycles first.  Then we can do the
1387      propagation in one pass from the leaves to the roots.  */
1388   for (i = 0; i < order_pos; i++ )
1389     {
1390       bool can_throw = false;
1391       node = order[i];
1392 
1393       if (node->alias)
1394 	continue;
1395 
1396       /* Find the worst state for any node in the cycle.  */
1397       w = node;
1398       while (w)
1399 	{
1400 	  struct cgraph_edge *e, *ie;
1401 	  funct_state w_l = get_function_state (w);
1402 
1403 	  if (w_l->can_throw
1404 	      || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1405 	    can_throw = true;
1406 
1407 	  if (can_throw)
1408 	    break;
1409 
1410 	  for (e = w->callees; e; e = e->next_callee)
1411 	    {
1412 	      enum availability avail;
1413 	      struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
1414 
1415 	      if (avail > AVAIL_OVERWRITABLE)
1416 		{
1417 		  funct_state y_l = get_function_state (y);
1418 
1419 		  if (can_throw)
1420 		    break;
1421 		  if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1422 		      && e->can_throw_external)
1423 		    can_throw = true;
1424 		}
1425 	      else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
1426 	        can_throw = true;
1427 	    }
1428           for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1429 	    if (ie->can_throw_external)
1430 	      can_throw = true;
1431 	  w_info = (struct ipa_dfs_info *) w->aux;
1432 	  w = w_info->next_cycle;
1433 	}
1434 
1435       /* Copy back the region's pure_const_state which is shared by
1436 	 all nodes in the region.  */
1437       w = node;
1438       while (w)
1439 	{
1440 	  funct_state w_l = get_function_state (w);
1441 	  if (!can_throw && !TREE_NOTHROW (w->decl))
1442 	    {
1443 	      cgraph_set_nothrow_flag (w, true);
1444 	      if (dump_file)
1445 		fprintf (dump_file, "Function found to be nothrow: %s\n",
1446 			 cgraph_node_name (w));
1447 	    }
1448 	  else if (can_throw && !TREE_NOTHROW (w->decl))
1449 	    w_l->can_throw = true;
1450 	  w_info = (struct ipa_dfs_info *) w->aux;
1451 	  w = w_info->next_cycle;
1452 	}
1453     }
1454 
1455   ipa_free_postorder_info ();
1456   free (order);
1457 }
1458 
1459 
1460 /* Produce the global information by preforming a transitive closure
1461    on the local information that was produced by generate_summary.  */
1462 
1463 static unsigned int
1464 propagate (void)
1465 {
1466   struct cgraph_node *node;
1467 
1468   cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
1469   cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1470   cgraph_remove_node_removal_hook (node_removal_hook_holder);
1471 
1472   /* Nothrow makes more function to not lead to return and improve
1473      later analysis.  */
1474   propagate_nothrow ();
1475   propagate_pure_const ();
1476 
1477   /* Cleanup. */
1478   for (node = cgraph_nodes; node; node = node->next)
1479     if (has_function_state (node))
1480       free (get_function_state (node));
1481   VEC_free (funct_state, heap, funct_state_vec);
1482   finish_state ();
1483   return 0;
1484 }
1485 
1486 static bool
1487 gate_pure_const (void)
1488 {
1489   return (flag_ipa_pure_const
1490 	  /* Don't bother doing anything if the program has errors.  */
1491 	  && !seen_error ());
1492 }
1493 
1494 struct ipa_opt_pass_d pass_ipa_pure_const =
1495 {
1496  {
1497   IPA_PASS,
1498   "pure-const",		                /* name */
1499   gate_pure_const,			/* gate */
1500   propagate,			        /* execute */
1501   NULL,					/* sub */
1502   NULL,					/* next */
1503   0,					/* static_pass_number */
1504   TV_IPA_PURE_CONST,		        /* tv_id */
1505   0,	                                /* properties_required */
1506   0,					/* properties_provided */
1507   0,					/* properties_destroyed */
1508   0,					/* todo_flags_start */
1509   0                                     /* todo_flags_finish */
1510  },
1511  generate_summary,		        /* generate_summary */
1512  pure_const_write_summary,		/* write_summary */
1513  pure_const_read_summary,		/* read_summary */
1514  NULL,					/* write_optimization_summary */
1515  NULL,					/* read_optimization_summary */
1516  NULL,					/* stmt_fixup */
1517  0,					/* TODOs */
1518  NULL,			                /* function_transform */
1519  NULL					/* variable_transform */
1520 };
1521 
1522 /* Return true if function should be skipped for local pure const analysis.  */
1523 
1524 static bool
1525 skip_function_for_local_pure_const (struct cgraph_node *node)
1526 {
1527   /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1528      we must not promote functions that are called by already processed functions.  */
1529 
1530   if (function_called_by_processed_nodes_p ())
1531     {
1532       if (dump_file)
1533         fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1534       return true;
1535     }
1536   if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
1537     {
1538       if (dump_file)
1539         fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
1540       return true;
1541     }
1542   return false;
1543 }
1544 
1545 /* Simple local pass for pure const discovery reusing the analysis from
1546    ipa_pure_const.   This pass is effective when executed together with
1547    other optimization passes in early optimization pass queue.  */
1548 
1549 static unsigned int
1550 local_pure_const (void)
1551 {
1552   bool changed = false;
1553   funct_state l;
1554   bool skip;
1555   struct cgraph_node *node;
1556 
1557   node = cgraph_get_node (current_function_decl);
1558   skip = skip_function_for_local_pure_const (node);
1559   if (!warn_suggest_attribute_const
1560       && !warn_suggest_attribute_pure
1561       && skip)
1562     return 0;
1563 
1564   l = analyze_function (node, false);
1565 
1566   /* Do NORETURN discovery.  */
1567   if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1568       && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
1569     {
1570       warn_function_noreturn (cfun->decl);
1571       if (dump_file)
1572         fprintf (dump_file, "Function found to be noreturn: %s\n",
1573 	         lang_hooks.decl_printable_name (current_function_decl, 2));
1574 
1575       /* Update declaration and reduce profile to executed once.  */
1576       TREE_THIS_VOLATILE (current_function_decl) = 1;
1577       if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1578         node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1579 
1580       changed = true;
1581     }
1582 
1583   switch (l->pure_const_state)
1584     {
1585     case IPA_CONST:
1586       if (!TREE_READONLY (current_function_decl))
1587 	{
1588 	  warn_function_const (current_function_decl, !l->looping);
1589 	  if (!skip)
1590 	    {
1591 	      cgraph_set_const_flag (node, true, l->looping);
1592 	      changed = true;
1593 	    }
1594 	  if (dump_file)
1595 	    fprintf (dump_file, "Function found to be %sconst: %s\n",
1596 		     l->looping ? "looping " : "",
1597 		     lang_hooks.decl_printable_name (current_function_decl,
1598 						     2));
1599 	}
1600       else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1601 	       && !l->looping)
1602 	{
1603 	  if (!skip)
1604 	    {
1605 	      cgraph_set_const_flag (node, true, false);
1606 	      changed = true;
1607 	    }
1608 	  if (dump_file)
1609 	    fprintf (dump_file, "Function found to be non-looping: %s\n",
1610 		     lang_hooks.decl_printable_name (current_function_decl,
1611 						     2));
1612 	}
1613       break;
1614 
1615     case IPA_PURE:
1616       if (!DECL_PURE_P (current_function_decl))
1617 	{
1618 	  if (!skip)
1619 	    {
1620 	      cgraph_set_pure_flag (node, true, l->looping);
1621 	      changed = true;
1622 	    }
1623 	  warn_function_pure (current_function_decl, !l->looping);
1624 	  if (dump_file)
1625 	    fprintf (dump_file, "Function found to be %spure: %s\n",
1626 		     l->looping ? "looping " : "",
1627 		     lang_hooks.decl_printable_name (current_function_decl,
1628 						     2));
1629 	}
1630       else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1631 	       && !l->looping)
1632 	{
1633 	  if (!skip)
1634 	    {
1635 	      cgraph_set_pure_flag (node, true, false);
1636 	      changed = true;
1637 	    }
1638 	  if (dump_file)
1639 	    fprintf (dump_file, "Function found to be non-looping: %s\n",
1640 		     lang_hooks.decl_printable_name (current_function_decl,
1641 						     2));
1642 	}
1643       break;
1644 
1645     default:
1646       break;
1647     }
1648   if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1649     {
1650       cgraph_set_nothrow_flag (node, true);
1651       changed = true;
1652       if (dump_file)
1653 	fprintf (dump_file, "Function found to be nothrow: %s\n",
1654 		 lang_hooks.decl_printable_name (current_function_decl,
1655 						 2));
1656     }
1657   free (l);
1658   if (changed)
1659     return execute_fixup_cfg ();
1660   else
1661     return 0;
1662 }
1663 
1664 struct gimple_opt_pass pass_local_pure_const =
1665 {
1666  {
1667   GIMPLE_PASS,
1668   "local-pure-const",	                /* name */
1669   gate_pure_const,			/* gate */
1670   local_pure_const,		        /* execute */
1671   NULL,					/* sub */
1672   NULL,					/* next */
1673   0,					/* static_pass_number */
1674   TV_IPA_PURE_CONST,		        /* tv_id */
1675   0,	                                /* properties_required */
1676   0,					/* properties_provided */
1677   0,					/* properties_destroyed */
1678   0,					/* todo_flags_start */
1679   0                                     /* todo_flags_finish */
1680  }
1681 };
1682