1 /* Callgraph based analysis of static variables.
2    Copyright (C) 2004-2018 Free Software Foundation, Inc.
3    Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* This file marks functions as being either const (TREE_READONLY) or
22    pure (DECL_PURE_P).  It can also set a variant of these that
23    are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24 
25    This must be run after inlining decisions have been made since
26    otherwise, the local sets will not contain information that is
27    consistent with post inlined state.  The global sets are not prone
28    to this problem since they are by definition transitive.  */
29 
30 /* The code in this module is called by the ipa pass manager. It
31    should be one of the later passes since it's information is used by
32    the rest of the compilation. */
33 
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "ipa-prop.h"
63 #include "ipa-fnsummary.h"
64 
65 /* Lattice values for const and pure functions.  Everything starts out
66    being const, then may drop to pure and then neither depending on
67    what is found.  */
68 enum pure_const_state_e
69 {
70   IPA_CONST,
71   IPA_PURE,
72   IPA_NEITHER
73 };
74 
75 static const char *pure_const_names[3] = {"const", "pure", "neither"};
76 
77 enum malloc_state_e
78 {
79   STATE_MALLOC_TOP,
80   STATE_MALLOC,
81   STATE_MALLOC_BOTTOM
82 };
83 
84 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
85 
86 /* Holder for the const_state.  There is one of these per function
87    decl.  */
88 struct funct_state_d
89 {
90   /* See above.  */
91   enum pure_const_state_e pure_const_state;
92   /* What user set here; we can be always sure about this.  */
93   enum pure_const_state_e state_previously_known;
94   bool looping_previously_known;
95 
96   /* True if the function could possibly infinite loop.  There are a
97      lot of ways that this could be determined.  We are pretty
98      conservative here.  While it is possible to cse pure and const
99      calls, it is not legal to have dce get rid of the call if there
100      is a possibility that the call could infinite loop since this is
101      a behavioral change.  */
102   bool looping;
103 
104   bool can_throw;
105 
106   /* If function can call free, munmap or otherwise make previously
107      non-trapping memory accesses trapping.  */
108   bool can_free;
109 
110   enum malloc_state_e malloc_state;
111 };
112 
113 /* State used when we know nothing about function.  */
114 static struct funct_state_d varying_state
115    = { IPA_NEITHER, IPA_NEITHER, true, true, true, true, STATE_MALLOC_BOTTOM };
116 
117 
118 typedef struct funct_state_d * funct_state;
119 
120 /* The storage of the funct_state is abstracted because there is the
121    possibility that it may be desirable to move this to the cgraph
122    local info.  */
123 
124 /* Array, indexed by cgraph node uid, of function states.  */
125 
126 static vec<funct_state> funct_state_vec;
127 
128 static bool gate_pure_const (void);
129 
130 namespace {
131 
132 const pass_data pass_data_ipa_pure_const =
133 {
134   IPA_PASS, /* type */
135   "pure-const", /* name */
136   OPTGROUP_NONE, /* optinfo_flags */
137   TV_IPA_PURE_CONST, /* tv_id */
138   0, /* properties_required */
139   0, /* properties_provided */
140   0, /* properties_destroyed */
141   0, /* todo_flags_start */
142   0, /* todo_flags_finish */
143 };
144 
145 class pass_ipa_pure_const : public ipa_opt_pass_d
146 {
147 public:
148   pass_ipa_pure_const(gcc::context *ctxt);
149 
150   /* opt_pass methods: */
gate(function *)151   bool gate (function *) { return gate_pure_const (); }
152   unsigned int execute (function *fun);
153 
154   void register_hooks (void);
155 
156 private:
157   bool init_p;
158 
159   /* Holders of ipa cgraph hooks: */
160   struct cgraph_node_hook_list *function_insertion_hook_holder;
161   struct cgraph_2node_hook_list *node_duplication_hook_holder;
162   struct cgraph_node_hook_list *node_removal_hook_holder;
163 
164 }; // class pass_ipa_pure_const
165 
166 } // anon namespace
167 
168 /* Try to guess if function body will always be visible to compiler
169    when compiling the call and whether compiler will be able
170    to propagate the information by itself.  */
171 
172 static bool
function_always_visible_to_compiler_p(tree decl)173 function_always_visible_to_compiler_p (tree decl)
174 {
175   return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
176 	  || DECL_COMDAT (decl));
177 }
178 
179 /* Emit suggestion about attribute ATTRIB_NAME for DECL.  KNOWN_FINITE
180    is true if the function is known to be finite.  The diagnostic is
181    controlled by OPTION.  WARNED_ABOUT is a hash_set<tree> unique for
182    OPTION, this function may initialize it and it is always returned
183    by the function.  */
184 
185 static hash_set<tree> *
suggest_attribute(int option,tree decl,bool known_finite,hash_set<tree> * warned_about,const char * attrib_name)186 suggest_attribute (int option, tree decl, bool known_finite,
187 		   hash_set<tree> *warned_about,
188 		   const char * attrib_name)
189 {
190   if (!option_enabled (option, &global_options))
191     return warned_about;
192   if (TREE_THIS_VOLATILE (decl)
193       || (known_finite && function_always_visible_to_compiler_p (decl)))
194     return warned_about;
195 
196   if (!warned_about)
197     warned_about = new hash_set<tree>;
198   if (warned_about->contains (decl))
199     return warned_about;
200   warned_about->add (decl);
201   warning_at (DECL_SOURCE_LOCATION (decl),
202 	      option,
203 	      known_finite
204 	      ? G_("function might be candidate for attribute %qs")
205 	      : G_("function might be candidate for attribute %qs"
206 		   " if it is known to return normally"), attrib_name);
207   return warned_about;
208 }
209 
210 /* Emit suggestion about __attribute_((pure)) for DECL.  KNOWN_FINITE
211    is true if the function is known to be finite.  */
212 
213 static void
warn_function_pure(tree decl,bool known_finite)214 warn_function_pure (tree decl, bool known_finite)
215 {
216   /* Declaring a void function pure makes no sense and is diagnosed
217      by -Wattributes because calling it would have no effect.  */
218   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
219     return;
220 
221   static hash_set<tree> *warned_about;
222   warned_about
223     = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
224 			 known_finite, warned_about, "pure");
225 }
226 
227 /* Emit suggestion about __attribute_((const)) for DECL.  KNOWN_FINITE
228    is true if the function is known to be finite.  */
229 
230 static void
warn_function_const(tree decl,bool known_finite)231 warn_function_const (tree decl, bool known_finite)
232 {
233   /* Declaring a void function const makes no sense is diagnosed
234      by -Wattributes because calling it would have no effect.  */
235   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
236     return;
237 
238   static hash_set<tree> *warned_about;
239   warned_about
240     = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
241 			 known_finite, warned_about, "const");
242 }
243 
244 /* Emit suggestion about __attribute__((malloc)) for DECL.  */
245 
246 static void
warn_function_malloc(tree decl)247 warn_function_malloc (tree decl)
248 {
249   static hash_set<tree> *warned_about;
250   warned_about
251     = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
252 			 false, warned_about, "malloc");
253 }
254 
255 /* Emit suggestion about __attribute__((noreturn)) for DECL.  */
256 
257 static void
warn_function_noreturn(tree decl)258 warn_function_noreturn (tree decl)
259 {
260   tree original_decl = decl;
261 
262   cgraph_node *node = cgraph_node::get (decl);
263   if (node->instrumentation_clone)
264     decl = node->instrumented_version->decl;
265 
266   static hash_set<tree> *warned_about;
267   if (!lang_hooks.missing_noreturn_ok_p (decl)
268       && targetm.warn_func_return (decl))
269     warned_about
270       = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
271 			   true, warned_about, "noreturn");
272 }
273 
274 void
warn_function_cold(tree decl)275 warn_function_cold (tree decl)
276 {
277   tree original_decl = decl;
278 
279   cgraph_node *node = cgraph_node::get (decl);
280   if (node->instrumentation_clone)
281     decl = node->instrumented_version->decl;
282 
283   static hash_set<tree> *warned_about;
284   warned_about
285     = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
286 			 true, warned_about, "cold");
287 }
288 
289 /* Return true if we have a function state for NODE.  */
290 
291 static inline bool
has_function_state(struct cgraph_node * node)292 has_function_state (struct cgraph_node *node)
293 {
294   if (!funct_state_vec.exists ()
295       || funct_state_vec.length () <= (unsigned int)node->uid)
296     return false;
297   return funct_state_vec[node->uid] != NULL;
298 }
299 
300 /* Return the function state from NODE.  */
301 
302 static inline funct_state
get_function_state(struct cgraph_node * node)303 get_function_state (struct cgraph_node *node)
304 {
305   if (!funct_state_vec.exists ()
306       || funct_state_vec.length () <= (unsigned int)node->uid
307       || !funct_state_vec[node->uid])
308     /* We might want to put correct previously_known state into varying.  */
309     return &varying_state;
310  return funct_state_vec[node->uid];
311 }
312 
313 /* Set the function state S for NODE.  */
314 
315 static inline void
set_function_state(struct cgraph_node * node,funct_state s)316 set_function_state (struct cgraph_node *node, funct_state s)
317 {
318   if (!funct_state_vec.exists ()
319       || funct_state_vec.length () <= (unsigned int)node->uid)
320      funct_state_vec.safe_grow_cleared (node->uid + 1);
321 
322   /* If funct_state_vec already contains a funct_state, we have to release
323      it before it's going to be ovewritten.  */
324   if (funct_state_vec[node->uid] != NULL
325       && funct_state_vec[node->uid] != &varying_state)
326     free (funct_state_vec[node->uid]);
327 
328   funct_state_vec[node->uid] = s;
329 }
330 
331 /* Check to see if the use (or definition when CHECKING_WRITE is true)
332    variable T is legal in a function that is either pure or const.  */
333 
334 static inline void
check_decl(funct_state local,tree t,bool checking_write,bool ipa)335 check_decl (funct_state local,
336 	    tree t, bool checking_write, bool ipa)
337 {
338   /* Do not want to do anything with volatile except mark any
339      function that uses one to be not const or pure.  */
340   if (TREE_THIS_VOLATILE (t))
341     {
342       local->pure_const_state = IPA_NEITHER;
343       if (dump_file)
344         fprintf (dump_file, "    Volatile operand is not const/pure\n");
345       return;
346     }
347 
348   /* Do not care about a local automatic that is not static.  */
349   if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
350     return;
351 
352   /* If the variable has the "used" attribute, treat it as if it had a
353      been touched by the devil.  */
354   if (DECL_PRESERVE_P (t))
355     {
356       local->pure_const_state = IPA_NEITHER;
357       if (dump_file)
358         fprintf (dump_file, "    Used static/global variable is not const/pure\n");
359       return;
360     }
361 
362   /* In IPA mode we are not interested in checking actual loads and stores;
363      they will be processed at propagation time using ipa_ref.  */
364   if (ipa)
365     return;
366 
367   /* Since we have dealt with the locals and params cases above, if we
368      are CHECKING_WRITE, this cannot be a pure or constant
369      function.  */
370   if (checking_write)
371     {
372       local->pure_const_state = IPA_NEITHER;
373       if (dump_file)
374         fprintf (dump_file, "    static/global memory write is not const/pure\n");
375       return;
376     }
377 
378   if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
379     {
380       /* Readonly reads are safe.  */
381       if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
382 	return; /* Read of a constant, do not change the function state.  */
383       else
384 	{
385           if (dump_file)
386             fprintf (dump_file, "    global memory read is not const\n");
387 	  /* Just a regular read.  */
388 	  if (local->pure_const_state == IPA_CONST)
389 	    local->pure_const_state = IPA_PURE;
390 	}
391     }
392   else
393     {
394       /* Compilation level statics can be read if they are readonly
395 	 variables.  */
396       if (TREE_READONLY (t))
397 	return;
398 
399       if (dump_file)
400 	fprintf (dump_file, "    static memory read is not const\n");
401       /* Just a regular read.  */
402       if (local->pure_const_state == IPA_CONST)
403 	local->pure_const_state = IPA_PURE;
404     }
405 }
406 
407 
408 /* Check to see if the use (or definition when CHECKING_WRITE is true)
409    variable T is legal in a function that is either pure or const.  */
410 
411 static inline void
check_op(funct_state local,tree t,bool checking_write)412 check_op (funct_state local, tree t, bool checking_write)
413 {
414   t = get_base_address (t);
415   if (t && TREE_THIS_VOLATILE (t))
416     {
417       local->pure_const_state = IPA_NEITHER;
418       if (dump_file)
419 	fprintf (dump_file, "    Volatile indirect ref is not const/pure\n");
420       return;
421     }
422   else if (t
423   	   && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
424 	   && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
425 	   && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
426     {
427       if (dump_file)
428 	fprintf (dump_file, "    Indirect ref to local memory is OK\n");
429       return;
430     }
431   else if (checking_write)
432     {
433       local->pure_const_state = IPA_NEITHER;
434       if (dump_file)
435 	fprintf (dump_file, "    Indirect ref write is not const/pure\n");
436       return;
437     }
438   else
439     {
440       if (dump_file)
441 	fprintf (dump_file, "    Indirect ref read is not const\n");
442       if (local->pure_const_state == IPA_CONST)
443 	local->pure_const_state = IPA_PURE;
444     }
445 }
446 
447 /* compute state based on ECF FLAGS and store to STATE and LOOPING.  */
448 
449 static void
state_from_flags(enum pure_const_state_e * state,bool * looping,int flags,bool cannot_lead_to_return)450 state_from_flags (enum pure_const_state_e *state, bool *looping,
451 	          int flags, bool cannot_lead_to_return)
452 {
453   *looping = false;
454   if (flags & ECF_LOOPING_CONST_OR_PURE)
455     {
456       *looping = true;
457       if (dump_file && (dump_flags & TDF_DETAILS))
458 	fprintf (dump_file, " looping\n");
459     }
460   if (flags & ECF_CONST)
461     {
462       *state = IPA_CONST;
463       if (dump_file && (dump_flags & TDF_DETAILS))
464 	fprintf (dump_file, " const\n");
465     }
466   else if (flags & ECF_PURE)
467     {
468       *state = IPA_PURE;
469       if (dump_file && (dump_flags & TDF_DETAILS))
470 	fprintf (dump_file, " pure\n");
471     }
472   else if (cannot_lead_to_return)
473     {
474       *state = IPA_PURE;
475       *looping = true;
476       if (dump_file && (dump_flags & TDF_DETAILS))
477 	fprintf (dump_file, " ignoring side effects->pure looping\n");
478     }
479   else
480     {
481       if (dump_file && (dump_flags & TDF_DETAILS))
482 	fprintf (dump_file, " neither\n");
483       *state = IPA_NEITHER;
484       *looping = true;
485     }
486 }
487 
488 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
489    into STATE and LOOPING better of the two variants.
490    Be sure to merge looping correctly.  IPA_NEITHER functions
491    have looping 0 even if they don't have to return.  */
492 
493 static inline void
better_state(enum pure_const_state_e * state,bool * looping,enum pure_const_state_e state2,bool looping2)494 better_state (enum pure_const_state_e *state, bool *looping,
495 	      enum pure_const_state_e state2, bool looping2)
496 {
497   if (state2 < *state)
498     {
499       if (*state == IPA_NEITHER)
500 	*looping = looping2;
501       else
502 	*looping = MIN (*looping, looping2);
503       *state = state2;
504     }
505   else if (state2 != IPA_NEITHER)
506     *looping = MIN (*looping, looping2);
507 }
508 
509 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
510    into STATE and LOOPING worse of the two variants.
511    N is the actual node called.  */
512 
513 static inline void
worse_state(enum pure_const_state_e * state,bool * looping,enum pure_const_state_e state2,bool looping2,struct symtab_node * from,struct symtab_node * to)514 worse_state (enum pure_const_state_e *state, bool *looping,
515 	     enum pure_const_state_e state2, bool looping2,
516 	     struct symtab_node *from,
517 	     struct symtab_node *to)
518 {
519   /* Consider function:
520 
521      bool a(int *p)
522      {
523        return *p==*p;
524      }
525 
526      During early optimization we will turn this into:
527 
528      bool a(int *p)
529      {
530        return true;
531      }
532 
533      Now if this function will be detected as CONST however when interposed it
534      may end up being just pure.  We always must assume the worst scenario here.
535    */
536   if (*state == IPA_CONST && state2 == IPA_CONST
537       && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
538     {
539       if (dump_file && (dump_flags & TDF_DETAILS))
540 	fprintf (dump_file, "Dropping state to PURE because call to %s may not "
541 		 "bind to current def.\n", to->name ());
542       state2 = IPA_PURE;
543     }
544   *state = MAX (*state, state2);
545   *looping = MAX (*looping, looping2);
546 }
547 
548 /* Recognize special cases of builtins that are by themselves not pure or const
549    but function using them is.  */
550 static bool
special_builtin_state(enum pure_const_state_e * state,bool * looping,tree callee)551 special_builtin_state (enum pure_const_state_e *state, bool *looping,
552 			tree callee)
553 {
554   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
555     switch (DECL_FUNCTION_CODE (callee))
556       {
557 	case BUILT_IN_RETURN:
558 	case BUILT_IN_UNREACHABLE:
559 	CASE_BUILT_IN_ALLOCA:
560 	case BUILT_IN_STACK_SAVE:
561 	case BUILT_IN_STACK_RESTORE:
562 	case BUILT_IN_EH_POINTER:
563 	case BUILT_IN_EH_FILTER:
564 	case BUILT_IN_UNWIND_RESUME:
565 	case BUILT_IN_CXA_END_CLEANUP:
566 	case BUILT_IN_EH_COPY_VALUES:
567 	case BUILT_IN_FRAME_ADDRESS:
568 	case BUILT_IN_APPLY_ARGS:
569 	case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
570 	case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
571 	  *looping = false;
572 	  *state = IPA_CONST;
573 	  return true;
574 	case BUILT_IN_PREFETCH:
575 	  *looping = true;
576 	  *state = IPA_CONST;
577 	  return true;
578 	default:
579 	  break;
580       }
581   return false;
582 }
583 
584 /* Check the parameters of a function call to CALL_EXPR to see if
585    there are any references in the parameters that are not allowed for
586    pure or const functions.  Also check to see if this is either an
587    indirect call, a call outside the compilation unit, or has special
588    attributes that may also effect the purity.  The CALL_EXPR node for
589    the entire call expression.  */
590 
591 static void
check_call(funct_state local,gcall * call,bool ipa)592 check_call (funct_state local, gcall *call, bool ipa)
593 {
594   int flags = gimple_call_flags (call);
595   tree callee_t = gimple_call_fndecl (call);
596   bool possibly_throws = stmt_could_throw_p (call);
597   bool possibly_throws_externally = (possibly_throws
598   				     && stmt_can_throw_external (call));
599 
600   if (possibly_throws)
601     {
602       unsigned int i;
603       for (i = 0; i < gimple_num_ops (call); i++)
604         if (gimple_op (call, i)
605 	    && tree_could_throw_p (gimple_op (call, i)))
606 	  {
607 	    if (possibly_throws && cfun->can_throw_non_call_exceptions)
608 	      {
609 		if (dump_file)
610 		  fprintf (dump_file, "    operand can throw; looping\n");
611 		local->looping = true;
612 	      }
613 	    if (possibly_throws_externally)
614 	      {
615 		if (dump_file)
616 		  fprintf (dump_file, "    operand can throw externally\n");
617 		local->can_throw = true;
618 	      }
619 	  }
620     }
621 
622   /* The const and pure flags are set by a variety of places in the
623      compiler (including here).  If someone has already set the flags
624      for the callee, (such as for some of the builtins) we will use
625      them, otherwise we will compute our own information.
626 
627      Const and pure functions have less clobber effects than other
628      functions so we process these first.  Otherwise if it is a call
629      outside the compilation unit or an indirect call we punt.  This
630      leaves local calls which will be processed by following the call
631      graph.  */
632   if (callee_t)
633     {
634       enum pure_const_state_e call_state;
635       bool call_looping;
636 
637       if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
638 	  && !nonfreeing_call_p (call))
639 	local->can_free = true;
640 
641       if (special_builtin_state (&call_state, &call_looping, callee_t))
642 	{
643 	  worse_state (&local->pure_const_state, &local->looping,
644 		       call_state, call_looping,
645 		       NULL, NULL);
646 	  return;
647 	}
648       /* When bad things happen to bad functions, they cannot be const
649 	 or pure.  */
650       if (setjmp_call_p (callee_t))
651 	{
652 	  if (dump_file)
653 	    fprintf (dump_file, "    setjmp is not const/pure\n");
654           local->looping = true;
655 	  local->pure_const_state = IPA_NEITHER;
656 	}
657 
658       if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
659 	switch (DECL_FUNCTION_CODE (callee_t))
660 	  {
661 	  case BUILT_IN_LONGJMP:
662 	  case BUILT_IN_NONLOCAL_GOTO:
663 	    if (dump_file)
664 	      fprintf (dump_file, "    longjmp and nonlocal goto is not const/pure\n");
665 	    local->pure_const_state = IPA_NEITHER;
666             local->looping = true;
667 	    break;
668 	  default:
669 	    break;
670 	  }
671     }
672   else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
673     local->can_free = true;
674 
675   /* When not in IPA mode, we can still handle self recursion.  */
676   if (!ipa && callee_t
677       && recursive_call_p (current_function_decl, callee_t))
678     {
679       if (dump_file)
680         fprintf (dump_file, "    Recursive call can loop.\n");
681       local->looping = true;
682     }
683   /* Either callee is unknown or we are doing local analysis.
684      Look to see if there are any bits available for the callee (such as by
685      declaration or because it is builtin) and process solely on the basis of
686      those bits.  Handle internal calls always, those calls don't have
687      corresponding cgraph edges and thus aren't processed during
688      the propagation.  */
689   else if (!ipa || gimple_call_internal_p (call))
690     {
691       enum pure_const_state_e call_state;
692       bool call_looping;
693       if (possibly_throws && cfun->can_throw_non_call_exceptions)
694         {
695 	  if (dump_file)
696 	    fprintf (dump_file, "    can throw; looping\n");
697           local->looping = true;
698 	}
699       if (possibly_throws_externally)
700         {
701 	  if (dump_file)
702 	    {
703 	      fprintf (dump_file, "    can throw externally to lp %i\n",
704 	      	       lookup_stmt_eh_lp (call));
705 	      if (callee_t)
706 		fprintf (dump_file, "     callee:%s\n",
707 			 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
708 	    }
709           local->can_throw = true;
710 	}
711       if (dump_file && (dump_flags & TDF_DETAILS))
712 	fprintf (dump_file, "    checking flags for call:");
713       state_from_flags (&call_state, &call_looping, flags,
714 			((flags & (ECF_NORETURN | ECF_NOTHROW))
715 			 == (ECF_NORETURN | ECF_NOTHROW))
716 			|| (!flag_exceptions && (flags & ECF_NORETURN)));
717       worse_state (&local->pure_const_state, &local->looping,
718 		   call_state, call_looping, NULL, NULL);
719     }
720   /* Direct functions calls are handled by IPA propagation.  */
721 }
722 
723 /* Wrapper around check_decl for loads in local more.  */
724 
725 static bool
check_load(gimple *,tree op,tree,void * data)726 check_load (gimple *, tree op, tree, void *data)
727 {
728   if (DECL_P (op))
729     check_decl ((funct_state)data, op, false, false);
730   else
731     check_op ((funct_state)data, op, false);
732   return false;
733 }
734 
735 /* Wrapper around check_decl for stores in local more.  */
736 
737 static bool
check_store(gimple *,tree op,tree,void * data)738 check_store (gimple *, tree op, tree, void *data)
739 {
740   if (DECL_P (op))
741     check_decl ((funct_state)data, op, true, false);
742   else
743     check_op ((funct_state)data, op, true);
744   return false;
745 }
746 
747 /* Wrapper around check_decl for loads in ipa mode.  */
748 
749 static bool
check_ipa_load(gimple *,tree op,tree,void * data)750 check_ipa_load (gimple *, tree op, tree, void *data)
751 {
752   if (DECL_P (op))
753     check_decl ((funct_state)data, op, false, true);
754   else
755     check_op ((funct_state)data, op, false);
756   return false;
757 }
758 
759 /* Wrapper around check_decl for stores in ipa mode.  */
760 
761 static bool
check_ipa_store(gimple *,tree op,tree,void * data)762 check_ipa_store (gimple *, tree op, tree, void *data)
763 {
764   if (DECL_P (op))
765     check_decl ((funct_state)data, op, true, true);
766   else
767     check_op ((funct_state)data, op, true);
768   return false;
769 }
770 
771 /* Look into pointer pointed to by GSIP and figure out what interesting side
772    effects it has.  */
773 static void
check_stmt(gimple_stmt_iterator * gsip,funct_state local,bool ipa)774 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
775 {
776   gimple *stmt = gsi_stmt (*gsip);
777 
778   if (is_gimple_debug (stmt))
779     return;
780 
781   /* Do consider clobber as side effects before IPA, so we rather inline
782      C++ destructors and keep clobber semantics than eliminate them.
783 
784      TODO: We may get smarter during early optimizations on these and let
785      functions containing only clobbers to be optimized more.  This is a common
786      case of C++ destructors.  */
787 
788   if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
789     return;
790 
791   if (dump_file)
792     {
793       fprintf (dump_file, "  scanning: ");
794       print_gimple_stmt (dump_file, stmt, 0);
795     }
796 
797   if (gimple_has_volatile_ops (stmt)
798       && !gimple_clobber_p (stmt))
799     {
800       local->pure_const_state = IPA_NEITHER;
801       if (dump_file)
802 	fprintf (dump_file, "    Volatile stmt is not const/pure\n");
803     }
804 
805   /* Look for loads and stores.  */
806   walk_stmt_load_store_ops (stmt, local,
807 			    ipa ? check_ipa_load : check_load,
808 			    ipa ? check_ipa_store :  check_store);
809 
810   if (gimple_code (stmt) != GIMPLE_CALL
811       && stmt_could_throw_p (stmt))
812     {
813       if (cfun->can_throw_non_call_exceptions)
814 	{
815 	  if (dump_file)
816 	    fprintf (dump_file, "    can throw; looping\n");
817 	  local->looping = true;
818 	}
819       if (stmt_can_throw_external (stmt))
820 	{
821 	  if (dump_file)
822 	    fprintf (dump_file, "    can throw externally\n");
823 	  local->can_throw = true;
824 	}
825       else
826 	if (dump_file)
827 	  fprintf (dump_file, "    can throw\n");
828     }
829   switch (gimple_code (stmt))
830     {
831     case GIMPLE_CALL:
832       check_call (local, as_a <gcall *> (stmt), ipa);
833       break;
834     case GIMPLE_LABEL:
835       if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
836 	/* Target of long jump. */
837 	{
838           if (dump_file)
839             fprintf (dump_file, "    nonlocal label is not const/pure\n");
840 	  local->pure_const_state = IPA_NEITHER;
841 	}
842       break;
843     case GIMPLE_ASM:
844       if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
845 	{
846 	  if (dump_file)
847 	    fprintf (dump_file, "    memory asm clobber is not const/pure\n");
848 	  /* Abandon all hope, ye who enter here. */
849 	  local->pure_const_state = IPA_NEITHER;
850 	  local->can_free = true;
851 	}
852       if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
853 	{
854 	  if (dump_file)
855 	    fprintf (dump_file, "    volatile is not const/pure\n");
856 	  /* Abandon all hope, ye who enter here. */
857 	  local->pure_const_state = IPA_NEITHER;
858 	  local->looping = true;
859 	  local->can_free = true;
860 	}
861       return;
862     default:
863       break;
864     }
865 }
866 
867 /* Check that RETVAL is used only in STMT and in comparisons against 0.
868    RETVAL is return value of the function and STMT is return stmt.  */
869 
870 static bool
check_retval_uses(tree retval,gimple * stmt)871 check_retval_uses (tree retval, gimple *stmt)
872 {
873   imm_use_iterator use_iter;
874   gimple *use_stmt;
875 
876   FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
877     if (gcond *cond = dyn_cast<gcond *> (use_stmt))
878       {
879 	tree op2 = gimple_cond_rhs (cond);
880 	if (!integer_zerop (op2))
881 	  RETURN_FROM_IMM_USE_STMT (use_iter, false);
882       }
883     else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
884       {
885 	enum tree_code code = gimple_assign_rhs_code (ga);
886 	if (TREE_CODE_CLASS (code) != tcc_comparison)
887 	  RETURN_FROM_IMM_USE_STMT (use_iter, false);
888 	if (!integer_zerop (gimple_assign_rhs2 (ga)))
889 	  RETURN_FROM_IMM_USE_STMT (use_iter, false);
890       }
891     else if (is_gimple_debug (use_stmt))
892       ;
893     else if (use_stmt != stmt)
894       RETURN_FROM_IMM_USE_STMT (use_iter, false);
895 
896   return true;
897 }
898 
899 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
900    attribute. Currently this function does a very conservative analysis.
901    FUN is considered to be a candidate if
902    1) It returns a value of pointer type.
903    2) SSA_NAME_DEF_STMT (return_value) is either a function call or
904       a phi, and element of phi is either NULL or
905       SSA_NAME_DEF_STMT(element) is function call.
906    3) The return-value has immediate uses only within comparisons (gcond or gassign)
907       and return_stmt (and likewise a phi arg has immediate use only within comparison
908       or the phi stmt).  */
909 
910 static bool
malloc_candidate_p(function * fun,bool ipa)911 malloc_candidate_p (function *fun, bool ipa)
912 {
913   basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
914   edge e;
915   edge_iterator ei;
916   cgraph_node *node = cgraph_node::get_create (fun->decl);
917 
918 #define DUMP_AND_RETURN(reason)  \
919 {  \
920   if (dump_file && (dump_flags & TDF_DETAILS))  \
921     fprintf (dump_file, "%s", (reason));  \
922   return false;  \
923 }
924 
925   if (EDGE_COUNT (exit_block->preds) == 0)
926     return false;
927 
928   FOR_EACH_EDGE (e, ei, exit_block->preds)
929     {
930       gimple_stmt_iterator gsi = gsi_last_bb (e->src);
931       greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
932 
933       if (!ret_stmt)
934 	return false;
935 
936       tree retval = gimple_return_retval (ret_stmt);
937       if (!retval)
938 	DUMP_AND_RETURN("No return value.")
939 
940       if (TREE_CODE (retval) != SSA_NAME
941 	  || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
942 	DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
943 
944       if (!check_retval_uses (retval, ret_stmt))
945 	DUMP_AND_RETURN("Return value has uses outside return stmt"
946 			" and comparisons against 0.")
947 
948       gimple *def = SSA_NAME_DEF_STMT (retval);
949       if (gcall *call_stmt = dyn_cast<gcall *> (def))
950 	{
951 	  tree callee_decl = gimple_call_fndecl (call_stmt);
952 	  if (!callee_decl)
953 	    return false;
954 
955 	  if (!ipa && !DECL_IS_MALLOC (callee_decl))
956 	    DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
957 			    " non-ipa mode.")
958 
959 	  cgraph_edge *cs = node->get_edge (call_stmt);
960 	  if (cs)
961 	    {
962 	      ipa_call_summary *es = ipa_call_summaries->get (cs);
963 	      gcc_assert (es);
964 	      es->is_return_callee_uncaptured = true;
965 	    }
966 	}
967 
968       else if (gphi *phi = dyn_cast<gphi *> (def))
969 	for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
970 	  {
971 	    tree arg = gimple_phi_arg_def (phi, i);
972 	    if (TREE_CODE (arg) != SSA_NAME)
973 	      DUMP_AND_RETURN("phi arg is not SSA_NAME.")
974 	    if (!(arg == null_pointer_node || check_retval_uses (arg, phi)))
975 	      DUMP_AND_RETURN("phi arg has uses outside phi"
976 			      " and comparisons against 0.")
977 
978 	    gimple *arg_def = SSA_NAME_DEF_STMT (arg);
979 	    gcall *call_stmt = dyn_cast<gcall *> (arg_def);
980 	    if (!call_stmt)
981 	      return false;
982 	    tree callee_decl = gimple_call_fndecl (call_stmt);
983 	    if (!callee_decl)
984 	      return false;
985 	    if (!ipa && !DECL_IS_MALLOC (callee_decl))
986 	      DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
987 			      " non-ipa mode.")
988 
989 	    cgraph_edge *cs = node->get_edge (call_stmt);
990 	    if (cs)
991 	      {
992 		ipa_call_summary *es = ipa_call_summaries->get (cs);
993 		gcc_assert (es);
994 		es->is_return_callee_uncaptured = true;
995 	      }
996 	  }
997 
998       else
999 	DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
1000     }
1001 
1002   if (dump_file && (dump_flags & TDF_DETAILS))
1003     fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1004 	     IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1005   return true;
1006 
1007 #undef DUMP_AND_RETURN
1008 }
1009 
1010 
1011 /* This is the main routine for finding the reference patterns for
1012    global variables within a function FN.  */
1013 
1014 static funct_state
analyze_function(struct cgraph_node * fn,bool ipa)1015 analyze_function (struct cgraph_node *fn, bool ipa)
1016 {
1017   tree decl = fn->decl;
1018   funct_state l;
1019   basic_block this_block;
1020 
1021   l = XCNEW (struct funct_state_d);
1022   l->pure_const_state = IPA_CONST;
1023   l->state_previously_known = IPA_NEITHER;
1024   l->looping_previously_known = true;
1025   l->looping = false;
1026   l->can_throw = false;
1027   l->can_free = false;
1028   state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1029 		    flags_from_decl_or_type (fn->decl),
1030 		    fn->cannot_return_p ());
1031 
1032   if (fn->thunk.thunk_p || fn->alias)
1033     {
1034       /* Thunk gets propagated through, so nothing interesting happens.  */
1035       gcc_assert (ipa);
1036       if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
1037 	l->pure_const_state = IPA_NEITHER;
1038       return l;
1039     }
1040 
1041   if (dump_file)
1042     {
1043       fprintf (dump_file, "\n\n local analysis of %s\n ",
1044 	       fn->name ());
1045     }
1046 
1047   push_cfun (DECL_STRUCT_FUNCTION (decl));
1048 
1049   FOR_EACH_BB_FN (this_block, cfun)
1050     {
1051       gimple_stmt_iterator gsi;
1052       struct walk_stmt_info wi;
1053 
1054       memset (&wi, 0, sizeof (wi));
1055       for (gsi = gsi_start_bb (this_block);
1056 	   !gsi_end_p (gsi);
1057 	   gsi_next (&gsi))
1058 	{
1059 	  check_stmt (&gsi, l, ipa);
1060 	  if (l->pure_const_state == IPA_NEITHER
1061 	      && l->looping
1062 	      && l->can_throw
1063 	      && l->can_free)
1064 	    goto end;
1065 	}
1066     }
1067 
1068 end:
1069   if (l->pure_const_state != IPA_NEITHER)
1070     {
1071       /* Const functions cannot have back edges (an
1072 	 indication of possible infinite loop side
1073 	 effect.  */
1074       if (mark_dfs_back_edges ())
1075         {
1076 	  /* Preheaders are needed for SCEV to work.
1077 	     Simple latches and recorded exits improve chances that loop will
1078 	     proved to be finite in testcases such as in loop-15.c
1079 	     and loop-24.c  */
1080 	  loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1081 			       | LOOPS_HAVE_SIMPLE_LATCHES
1082 			       | LOOPS_HAVE_RECORDED_EXITS);
1083 	  if (dump_file && (dump_flags & TDF_DETAILS))
1084 	    flow_loops_dump (dump_file, NULL, 0);
1085 	  if (mark_irreducible_loops ())
1086 	    {
1087 	      if (dump_file)
1088 	        fprintf (dump_file, "    has irreducible loops\n");
1089 	      l->looping = true;
1090 	    }
1091 	  else
1092 	    {
1093 	      struct loop *loop;
1094 	      scev_initialize ();
1095 	      FOR_EACH_LOOP (loop, 0)
1096 		if (!finite_loop_p (loop))
1097 		  {
1098 		    if (dump_file)
1099 		      fprintf (dump_file, "    can not prove finiteness of "
1100 			       "loop %i\n", loop->num);
1101 		    l->looping =true;
1102 		    break;
1103 		  }
1104 	      scev_finalize ();
1105 	    }
1106           loop_optimizer_finalize ();
1107 	}
1108     }
1109 
1110   if (dump_file && (dump_flags & TDF_DETAILS))
1111     fprintf (dump_file, "    checking previously known:");
1112 
1113   better_state (&l->pure_const_state, &l->looping,
1114 		l->state_previously_known,
1115 		l->looping_previously_known);
1116   if (TREE_NOTHROW (decl))
1117     l->can_throw = false;
1118 
1119   l->malloc_state = STATE_MALLOC_BOTTOM;
1120   if (DECL_IS_MALLOC (decl))
1121     l->malloc_state = STATE_MALLOC;
1122   else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1123     l->malloc_state = STATE_MALLOC_TOP;
1124   else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1125     l->malloc_state = STATE_MALLOC;
1126 
1127   pop_cfun ();
1128   if (dump_file)
1129     {
1130       if (l->looping)
1131         fprintf (dump_file, "Function is locally looping.\n");
1132       if (l->can_throw)
1133         fprintf (dump_file, "Function is locally throwing.\n");
1134       if (l->pure_const_state == IPA_CONST)
1135         fprintf (dump_file, "Function is locally const.\n");
1136       if (l->pure_const_state == IPA_PURE)
1137         fprintf (dump_file, "Function is locally pure.\n");
1138       if (l->can_free)
1139 	fprintf (dump_file, "Function can locally free.\n");
1140       if (l->malloc_state == STATE_MALLOC)
1141 	fprintf (dump_file, "Function is locally malloc.\n");
1142     }
1143   return l;
1144 }
1145 
1146 /* Called when new function is inserted to callgraph late.  */
1147 static void
add_new_function(struct cgraph_node * node,void * data ATTRIBUTE_UNUSED)1148 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1149 {
1150   /* There are some shared nodes, in particular the initializers on
1151      static declarations.  We do not need to scan them more than once
1152      since all we would be interested in are the addressof
1153      operations.  */
1154   if (opt_for_fn (node->decl, flag_ipa_pure_const))
1155     set_function_state (node, analyze_function (node, true));
1156 }
1157 
1158 /* Called when new clone is inserted to callgraph late.  */
1159 
1160 static void
duplicate_node_data(struct cgraph_node * src,struct cgraph_node * dst,void * data ATTRIBUTE_UNUSED)1161 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
1162 	 	     void *data ATTRIBUTE_UNUSED)
1163 {
1164   if (has_function_state (src))
1165     {
1166       funct_state l = XNEW (struct funct_state_d);
1167       gcc_assert (!has_function_state (dst));
1168       memcpy (l, get_function_state (src), sizeof (*l));
1169       set_function_state (dst, l);
1170     }
1171 }
1172 
1173 /* Called when new clone is inserted to callgraph late.  */
1174 
1175 static void
remove_node_data(struct cgraph_node * node,void * data ATTRIBUTE_UNUSED)1176 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1177 {
1178   if (has_function_state (node))
1179     set_function_state (node, NULL);
1180 }
1181 
1182 
1183 void
1184 pass_ipa_pure_const::
register_hooks(void)1185 register_hooks (void)
1186 {
1187   if (init_p)
1188     return;
1189 
1190   init_p = true;
1191 
1192   node_removal_hook_holder =
1193       symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
1194   node_duplication_hook_holder =
1195       symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
1196   function_insertion_hook_holder =
1197       symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
1198 }
1199 
1200 
1201 /* Analyze each function in the cgraph to see if it is locally PURE or
1202    CONST.  */
1203 
1204 static void
pure_const_generate_summary(void)1205 pure_const_generate_summary (void)
1206 {
1207   struct cgraph_node *node;
1208 
1209   pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1210   pass->register_hooks ();
1211 
1212   /* Process all of the functions.
1213 
1214      We process AVAIL_INTERPOSABLE functions.  We can not use the results
1215      by default, but the info can be used at LTO with -fwhole-program or
1216      when function got cloned and the clone is AVAILABLE.  */
1217 
1218   FOR_EACH_DEFINED_FUNCTION (node)
1219     if (opt_for_fn (node->decl, flag_ipa_pure_const))
1220       set_function_state (node, analyze_function (node, true));
1221 }
1222 
1223 
1224 /* Serialize the ipa info for lto.  */
1225 
1226 static void
pure_const_write_summary(void)1227 pure_const_write_summary (void)
1228 {
1229   struct cgraph_node *node;
1230   struct lto_simple_output_block *ob
1231     = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1232   unsigned int count = 0;
1233   lto_symtab_encoder_iterator lsei;
1234   lto_symtab_encoder_t encoder;
1235 
1236   encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1237 
1238   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1239        lsei_next_function_in_partition (&lsei))
1240     {
1241       node = lsei_cgraph_node (lsei);
1242       if (node->definition && has_function_state (node))
1243 	count++;
1244     }
1245 
1246   streamer_write_uhwi_stream (ob->main_stream, count);
1247 
1248   /* Process all of the functions.  */
1249   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1250        lsei_next_function_in_partition (&lsei))
1251     {
1252       node = lsei_cgraph_node (lsei);
1253       if (node->definition && has_function_state (node))
1254 	{
1255 	  struct bitpack_d bp;
1256 	  funct_state fs;
1257 	  int node_ref;
1258 	  lto_symtab_encoder_t encoder;
1259 
1260 	  fs = get_function_state (node);
1261 
1262 	  encoder = ob->decl_state->symtab_node_encoder;
1263 	  node_ref = lto_symtab_encoder_encode (encoder, node);
1264 	  streamer_write_uhwi_stream (ob->main_stream, node_ref);
1265 
1266 	  /* Note that flags will need to be read in the opposite
1267 	     order as we are pushing the bitflags into FLAGS.  */
1268 	  bp = bitpack_create (ob->main_stream);
1269 	  bp_pack_value (&bp, fs->pure_const_state, 2);
1270 	  bp_pack_value (&bp, fs->state_previously_known, 2);
1271 	  bp_pack_value (&bp, fs->looping_previously_known, 1);
1272 	  bp_pack_value (&bp, fs->looping, 1);
1273 	  bp_pack_value (&bp, fs->can_throw, 1);
1274 	  bp_pack_value (&bp, fs->can_free, 1);
1275 	  bp_pack_value (&bp, fs->malloc_state, 2);
1276 	  streamer_write_bitpack (&bp);
1277 	}
1278     }
1279 
1280   lto_destroy_simple_output_block (ob);
1281 }
1282 
1283 
1284 /* Deserialize the ipa info for lto.  */
1285 
1286 static void
pure_const_read_summary(void)1287 pure_const_read_summary (void)
1288 {
1289   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1290   struct lto_file_decl_data *file_data;
1291   unsigned int j = 0;
1292 
1293   pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1294   pass->register_hooks ();
1295 
1296   while ((file_data = file_data_vec[j++]))
1297     {
1298       const char *data;
1299       size_t len;
1300       struct lto_input_block *ib
1301 	= lto_create_simple_input_block (file_data,
1302 					 LTO_section_ipa_pure_const,
1303 					 &data, &len);
1304       if (ib)
1305 	{
1306 	  unsigned int i;
1307 	  unsigned int count = streamer_read_uhwi (ib);
1308 
1309 	  for (i = 0; i < count; i++)
1310 	    {
1311 	      unsigned int index;
1312 	      struct cgraph_node *node;
1313 	      struct bitpack_d bp;
1314 	      funct_state fs;
1315 	      lto_symtab_encoder_t encoder;
1316 
1317 	      fs = XCNEW (struct funct_state_d);
1318 	      index = streamer_read_uhwi (ib);
1319 	      encoder = file_data->symtab_node_encoder;
1320 	      node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1321 									index));
1322 	      set_function_state (node, fs);
1323 
1324 	      /* Note that the flags must be read in the opposite
1325 		 order in which they were written (the bitflags were
1326 		 pushed into FLAGS).  */
1327 	      bp = streamer_read_bitpack (ib);
1328 	      fs->pure_const_state
1329 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1330 	      fs->state_previously_known
1331 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1332 	      fs->looping_previously_known = bp_unpack_value (&bp, 1);
1333 	      fs->looping = bp_unpack_value (&bp, 1);
1334 	      fs->can_throw = bp_unpack_value (&bp, 1);
1335 	      fs->can_free = bp_unpack_value (&bp, 1);
1336 	      fs->malloc_state
1337 			= (enum malloc_state_e) bp_unpack_value (&bp, 2);
1338 
1339 	      if (dump_file)
1340 		{
1341 		  int flags = flags_from_decl_or_type (node->decl);
1342 		  fprintf (dump_file, "Read info for %s ", node->dump_name ());
1343 		  if (flags & ECF_CONST)
1344 		    fprintf (dump_file, " const");
1345 		  if (flags & ECF_PURE)
1346 		    fprintf (dump_file, " pure");
1347 		  if (flags & ECF_NOTHROW)
1348 		    fprintf (dump_file, " nothrow");
1349 		  fprintf (dump_file, "\n  pure const state: %s\n",
1350 			   pure_const_names[fs->pure_const_state]);
1351 		  fprintf (dump_file, "  previously known state: %s\n",
1352 			   pure_const_names[fs->state_previously_known]);
1353 		  if (fs->looping)
1354 		    fprintf (dump_file,"  function is locally looping\n");
1355 		  if (fs->looping_previously_known)
1356 		    fprintf (dump_file,"  function is previously known looping\n");
1357 		  if (fs->can_throw)
1358 		    fprintf (dump_file,"  function is locally throwing\n");
1359 		  if (fs->can_free)
1360 		    fprintf (dump_file,"  function can locally free\n");
1361 		  fprintf (dump_file, "\n malloc state: %s\n",
1362 			   malloc_state_names[fs->malloc_state]);
1363 		}
1364 	    }
1365 
1366 	  lto_destroy_simple_input_block (file_data,
1367 					  LTO_section_ipa_pure_const,
1368 					  ib, data, len);
1369 	}
1370     }
1371 }
1372 
1373 /* We only propagate across edges that can throw externally and their callee
1374    is not interposable.  */
1375 
1376 static bool
ignore_edge_for_nothrow(struct cgraph_edge * e)1377 ignore_edge_for_nothrow (struct cgraph_edge *e)
1378 {
1379   if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1380     return true;
1381 
1382   enum availability avail;
1383   cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
1384 							        e->caller);
1385   if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
1386     return true;
1387   return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1388 	 && !e->callee->binds_to_current_def_p (e->caller);
1389 }
1390 
1391 /* Return true if NODE is self recursive function.
1392    Indirectly recursive functions appears as non-trivial strongly
1393    connected components, so we need to care about self recursion
1394    only.  */
1395 
1396 static bool
self_recursive_p(struct cgraph_node * node)1397 self_recursive_p (struct cgraph_node *node)
1398 {
1399   struct cgraph_edge *e;
1400   for (e = node->callees; e; e = e->next_callee)
1401     if (e->callee->function_symbol () == node)
1402       return true;
1403   return false;
1404 }
1405 
1406 /* Return true if N is cdtor that is not const or pure.  In this case we may
1407    need to remove unreachable function if it is marked const/pure.  */
1408 
1409 static bool
cdtor_p(cgraph_node * n,void *)1410 cdtor_p (cgraph_node *n, void *)
1411 {
1412   if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1413     return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1414 	    || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1415   return false;
1416 }
1417 
1418 /* We only propagate across edges with non-interposable callee.  */
1419 
1420 static bool
ignore_edge_for_pure_const(struct cgraph_edge * e)1421 ignore_edge_for_pure_const (struct cgraph_edge *e)
1422 {
1423   enum availability avail;
1424   e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1425   return (avail <= AVAIL_INTERPOSABLE);
1426 }
1427 
1428 
1429 /* Produce transitive closure over the callgraph and compute pure/const
1430    attributes.  */
1431 
1432 static bool
propagate_pure_const(void)1433 propagate_pure_const (void)
1434 {
1435   struct cgraph_node *node;
1436   struct cgraph_node *w;
1437   struct cgraph_node **order =
1438     XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1439   int order_pos;
1440   int i;
1441   struct ipa_dfs_info * w_info;
1442   bool remove_p = false;
1443   bool has_cdtor;
1444 
1445   order_pos = ipa_reduced_postorder (order, true,
1446 				     ignore_edge_for_pure_const);
1447   if (dump_file)
1448     {
1449       cgraph_node::dump_cgraph (dump_file);
1450       ipa_print_order (dump_file, "reduced", order, order_pos);
1451     }
1452 
1453   /* Propagate the local information through the call graph to produce
1454      the global information.  All the nodes within a cycle will have
1455      the same info so we collapse cycles first.  Then we can do the
1456      propagation in one pass from the leaves to the roots.  */
1457   for (i = 0; i < order_pos; i++ )
1458     {
1459       enum pure_const_state_e pure_const_state = IPA_CONST;
1460       bool looping = false;
1461       int count = 0;
1462       node = order[i];
1463 
1464       if (node->alias)
1465 	continue;
1466 
1467       if (dump_file && (dump_flags & TDF_DETAILS))
1468 	fprintf (dump_file, "Starting cycle\n");
1469 
1470       /* Find the worst state for any node in the cycle.  */
1471       w = node;
1472       while (w && pure_const_state != IPA_NEITHER)
1473 	{
1474 	  struct cgraph_edge *e;
1475 	  struct cgraph_edge *ie;
1476 	  int i;
1477 	  struct ipa_ref *ref = NULL;
1478 
1479 	  funct_state w_l = get_function_state (w);
1480 	  if (dump_file && (dump_flags & TDF_DETAILS))
1481 	    fprintf (dump_file, "  Visiting %s state:%s looping %i\n",
1482 		     w->dump_name (),
1483 		     pure_const_names[w_l->pure_const_state],
1484 		     w_l->looping);
1485 
1486 	  /* First merge in function body properties.
1487 	     We are safe to pass NULL as FROM and TO because we will take care
1488 	     of possible interposition when walking callees.  */
1489 	  worse_state (&pure_const_state, &looping,
1490 		       w_l->pure_const_state, w_l->looping,
1491 		       NULL, NULL);
1492 	  if (pure_const_state == IPA_NEITHER)
1493 	    break;
1494 
1495 	  count++;
1496 
1497 	  /* We consider recursive cycles as possibly infinite.
1498 	     This might be relaxed since infinite recursion leads to stack
1499 	     overflow.  */
1500 	  if (count > 1)
1501 	    looping = true;
1502 
1503 	  /* Now walk the edges and merge in callee properties.  */
1504 	  for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1505 	       e = e->next_callee)
1506 	    {
1507 	      enum availability avail;
1508 	      struct cgraph_node *y = e->callee->
1509 				function_or_virtual_thunk_symbol (&avail,
1510 								  e->caller);
1511 	      enum pure_const_state_e edge_state = IPA_CONST;
1512 	      bool edge_looping = false;
1513 
1514 	      if (dump_file && (dump_flags & TDF_DETAILS))
1515 		{
1516 		  fprintf (dump_file, "    Call to %s",
1517 			   e->callee->dump_name ());
1518 		}
1519 	      if (avail > AVAIL_INTERPOSABLE)
1520 		{
1521 		  funct_state y_l = get_function_state (y);
1522 		  if (dump_file && (dump_flags & TDF_DETAILS))
1523 		    {
1524 		      fprintf (dump_file,
1525 			       " state:%s looping:%i\n",
1526 			       pure_const_names[y_l->pure_const_state],
1527 			       y_l->looping);
1528 		    }
1529 		  if (y_l->pure_const_state > IPA_PURE
1530 		      && e->cannot_lead_to_return_p ())
1531 		    {
1532 		      if (dump_file && (dump_flags & TDF_DETAILS))
1533 			fprintf (dump_file,
1534 				 "        Ignoring side effects"
1535 				 " -> pure, looping\n");
1536 		      edge_state = IPA_PURE;
1537 		      edge_looping = true;
1538 		    }
1539 		  else
1540 		    {
1541 		      edge_state = y_l->pure_const_state;
1542 		      edge_looping = y_l->looping;
1543 		    }
1544 		}
1545 	      else if (special_builtin_state (&edge_state, &edge_looping,
1546 					       y->decl))
1547 		;
1548 	      else
1549 		state_from_flags (&edge_state, &edge_looping,
1550 				  flags_from_decl_or_type (y->decl),
1551 				  e->cannot_lead_to_return_p ());
1552 
1553 	      /* Merge the results with what we already know.  */
1554 	      better_state (&edge_state, &edge_looping,
1555 			    w_l->state_previously_known,
1556 			    w_l->looping_previously_known);
1557 	      worse_state (&pure_const_state, &looping,
1558 			   edge_state, edge_looping, e->caller, e->callee);
1559 	      if (pure_const_state == IPA_NEITHER)
1560 	        break;
1561 	    }
1562 
1563 	  /* Now process the indirect call.  */
1564           for (ie = w->indirect_calls;
1565 	       ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1566 	    {
1567 	      enum pure_const_state_e edge_state = IPA_CONST;
1568 	      bool edge_looping = false;
1569 
1570 	      if (dump_file && (dump_flags & TDF_DETAILS))
1571 		fprintf (dump_file, "    Indirect call");
1572 	      state_from_flags (&edge_state, &edge_looping,
1573 			        ie->indirect_info->ecf_flags,
1574 				ie->cannot_lead_to_return_p ());
1575 	      /* Merge the results with what we already know.  */
1576 	      better_state (&edge_state, &edge_looping,
1577 			    w_l->state_previously_known,
1578 			    w_l->looping_previously_known);
1579 	      worse_state (&pure_const_state, &looping,
1580 			   edge_state, edge_looping, NULL, NULL);
1581 	      if (pure_const_state == IPA_NEITHER)
1582 	        break;
1583 	    }
1584 
1585 	  /* And finally all loads and stores.  */
1586 	  for (i = 0; w->iterate_reference (i, ref)
1587 	       && pure_const_state != IPA_NEITHER; i++)
1588 	    {
1589 	      enum pure_const_state_e ref_state = IPA_CONST;
1590 	      bool ref_looping = false;
1591 	      switch (ref->use)
1592 		{
1593 		case IPA_REF_LOAD:
1594 		  /* readonly reads are safe.  */
1595 		  if (TREE_READONLY (ref->referred->decl))
1596 		    break;
1597 		  if (dump_file && (dump_flags & TDF_DETAILS))
1598 		    fprintf (dump_file, "    nonreadonly global var read\n");
1599 		  ref_state = IPA_PURE;
1600 		  break;
1601 		case IPA_REF_STORE:
1602 		  if (ref->cannot_lead_to_return ())
1603 		    break;
1604 		  ref_state = IPA_NEITHER;
1605 		  if (dump_file && (dump_flags & TDF_DETAILS))
1606 		    fprintf (dump_file, "    global var write\n");
1607 		  break;
1608 		case IPA_REF_ADDR:
1609 		case IPA_REF_CHKP:
1610 		  break;
1611 		default:
1612 		  gcc_unreachable ();
1613 		}
1614 	      better_state (&ref_state, &ref_looping,
1615 			    w_l->state_previously_known,
1616 			    w_l->looping_previously_known);
1617 	      worse_state (&pure_const_state, &looping,
1618 			   ref_state, ref_looping, NULL, NULL);
1619 	      if (pure_const_state == IPA_NEITHER)
1620 		break;
1621 	    }
1622 	  w_info = (struct ipa_dfs_info *) w->aux;
1623 	  w = w_info->next_cycle;
1624 	}
1625       if (dump_file && (dump_flags & TDF_DETAILS))
1626 	fprintf (dump_file, "Result %s looping %i\n",
1627 		 pure_const_names [pure_const_state],
1628 		 looping);
1629 
1630       /* Find the worst state of can_free for any node in the cycle.  */
1631       bool can_free = false;
1632       w = node;
1633       while (w && !can_free)
1634 	{
1635 	  struct cgraph_edge *e;
1636 	  funct_state w_l = get_function_state (w);
1637 
1638 	  if (w_l->can_free
1639 	      || w->get_availability () == AVAIL_INTERPOSABLE
1640 	      || w->indirect_calls)
1641 	    can_free = true;
1642 
1643 	  for (e = w->callees; e && !can_free; e = e->next_callee)
1644 	    {
1645 	      enum availability avail;
1646 	      struct cgraph_node *y = e->callee->
1647 				function_or_virtual_thunk_symbol (&avail,
1648 								  e->caller);
1649 
1650 	      if (avail > AVAIL_INTERPOSABLE)
1651 		can_free = get_function_state (y)->can_free;
1652 	      else
1653 		can_free = true;
1654 	    }
1655 	  w_info = (struct ipa_dfs_info *) w->aux;
1656 	  w = w_info->next_cycle;
1657 	}
1658 
1659       /* Copy back the region's pure_const_state which is shared by
1660 	 all nodes in the region.  */
1661       w = node;
1662       while (w)
1663 	{
1664 	  funct_state w_l = get_function_state (w);
1665 	  enum pure_const_state_e this_state = pure_const_state;
1666 	  bool this_looping = looping;
1667 
1668 	  w_l->can_free = can_free;
1669 	  w->nonfreeing_fn = !can_free;
1670 	  if (!can_free && dump_file)
1671 	    fprintf (dump_file, "Function found not to call free: %s\n",
1672 		     w->name ());
1673 
1674 	  if (w_l->state_previously_known != IPA_NEITHER
1675 	      && this_state > w_l->state_previously_known)
1676 	    {
1677               this_state = w_l->state_previously_known;
1678 	      if (this_state == IPA_NEITHER)
1679 	        this_looping = w_l->looping_previously_known;
1680 	    }
1681 	  if (!this_looping && self_recursive_p (w))
1682 	    this_looping = true;
1683 	  if (!w_l->looping_previously_known)
1684 	    this_looping = false;
1685 
1686 	  /* All nodes within a cycle share the same info.  */
1687 	  w_l->pure_const_state = this_state;
1688 	  w_l->looping = this_looping;
1689 
1690 	  /* Inline clones share declaration with their offline copies;
1691 	     do not modify their declarations since the offline copy may
1692 	     be different.  */
1693 	  if (!w->global.inlined_to)
1694 	    switch (this_state)
1695 	      {
1696 	      case IPA_CONST:
1697 		if (!TREE_READONLY (w->decl))
1698 		  {
1699 		    warn_function_const (w->decl, !this_looping);
1700 		    if (dump_file)
1701 		      fprintf (dump_file, "Function found to be %sconst: %s\n",
1702 			       this_looping ? "looping " : "",
1703 			       w->name ());
1704 		  }
1705 		/* Turning constructor or destructor to non-looping const/pure
1706 		   enables us to possibly remove the function completely.  */
1707 		if (this_looping)
1708 		  has_cdtor = false;
1709 		else
1710 		  has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1711 							      NULL, true);
1712 		if (w->set_const_flag (true, this_looping))
1713 		  {
1714 		    if (dump_file)
1715 		      fprintf (dump_file,
1716 			       "Declaration updated to be %sconst: %s\n",
1717 			       this_looping ? "looping " : "",
1718 			       w->name ());
1719 		    remove_p |= has_cdtor;
1720 		  }
1721 		break;
1722 
1723 	      case IPA_PURE:
1724 		if (!DECL_PURE_P (w->decl))
1725 		  {
1726 		    warn_function_pure (w->decl, !this_looping);
1727 		    if (dump_file)
1728 		      fprintf (dump_file, "Function found to be %spure: %s\n",
1729 			       this_looping ? "looping " : "",
1730 			       w->name ());
1731 		  }
1732 		if (this_looping)
1733 		  has_cdtor = false;
1734 		else
1735 		  has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1736 							      NULL, true);
1737 		if (w->set_pure_flag (true, this_looping))
1738 		  {
1739 		    if (dump_file)
1740 		      fprintf (dump_file,
1741 			       "Declaration updated to be %spure: %s\n",
1742 			       this_looping ? "looping " : "",
1743 			       w->name ());
1744 		    remove_p |= has_cdtor;
1745 		  }
1746 		break;
1747 
1748 	      default:
1749 		break;
1750 	      }
1751 	  w_info = (struct ipa_dfs_info *) w->aux;
1752 	  w = w_info->next_cycle;
1753 	}
1754     }
1755 
1756   ipa_free_postorder_info ();
1757   free (order);
1758   return remove_p;
1759 }
1760 
1761 /* Produce transitive closure over the callgraph and compute nothrow
1762    attributes.  */
1763 
1764 static void
propagate_nothrow(void)1765 propagate_nothrow (void)
1766 {
1767   struct cgraph_node *node;
1768   struct cgraph_node *w;
1769   struct cgraph_node **order =
1770     XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1771   int order_pos;
1772   int i;
1773   struct ipa_dfs_info * w_info;
1774 
1775   order_pos = ipa_reduced_postorder (order, true,
1776 				     ignore_edge_for_nothrow);
1777   if (dump_file)
1778     {
1779       cgraph_node::dump_cgraph (dump_file);
1780       ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1781     }
1782 
1783   /* Propagate the local information through the call graph to produce
1784      the global information.  All the nodes within a cycle will have
1785      the same info so we collapse cycles first.  Then we can do the
1786      propagation in one pass from the leaves to the roots.  */
1787   for (i = 0; i < order_pos; i++ )
1788     {
1789       bool can_throw = false;
1790       node = order[i];
1791 
1792       if (node->alias)
1793 	continue;
1794 
1795       /* Find the worst state for any node in the cycle.  */
1796       w = node;
1797       while (w && !can_throw)
1798 	{
1799 	  struct cgraph_edge *e, *ie;
1800 
1801 	  if (!TREE_NOTHROW (w->decl))
1802 	    {
1803 	      funct_state w_l = get_function_state (w);
1804 
1805 	      if (w_l->can_throw
1806 		  || w->get_availability () == AVAIL_INTERPOSABLE)
1807 		can_throw = true;
1808 
1809 	      for (e = w->callees; e && !can_throw; e = e->next_callee)
1810 		{
1811 		  enum availability avail;
1812 
1813 		  if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1814 		    continue;
1815 
1816 		  struct cgraph_node *y = e->callee->
1817 				   function_or_virtual_thunk_symbol (&avail,
1818 								     e->caller);
1819 
1820 		  /* We can use info about the callee only if we know it can
1821 		     not be interposed.
1822 		     When callee is compiled with non-call exceptions we also
1823 		     must check that the declaration is bound to current
1824 		     body as other semantically equivalent body may still
1825 		     throw.  */
1826 		  if (avail <= AVAIL_INTERPOSABLE
1827 		      || (!TREE_NOTHROW (y->decl)
1828 			  && (get_function_state (y)->can_throw
1829 			      || (opt_for_fn (y->decl, flag_non_call_exceptions)
1830 				  && !e->callee->binds_to_current_def_p (w)))))
1831 		    can_throw = true;
1832 		}
1833 	      for (ie = w->indirect_calls; ie && !can_throw;
1834 		   ie = ie->next_callee)
1835 		if (ie->can_throw_external
1836 		    && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1837 		  can_throw = true;
1838 	    }
1839 	  w_info = (struct ipa_dfs_info *) w->aux;
1840 	  w = w_info->next_cycle;
1841 	}
1842 
1843       /* Copy back the region's pure_const_state which is shared by
1844 	 all nodes in the region.  */
1845       w = node;
1846       while (w)
1847 	{
1848 	  funct_state w_l = get_function_state (w);
1849 	  if (!can_throw && !TREE_NOTHROW (w->decl))
1850 	    {
1851 	      /* Inline clones share declaration with their offline copies;
1852 		 do not modify their declarations since the offline copy may
1853 		 be different.  */
1854 	      if (!w->global.inlined_to)
1855 		{
1856 		  w->set_nothrow_flag (true);
1857 		  if (dump_file)
1858 		    fprintf (dump_file, "Function found to be nothrow: %s\n",
1859 			     w->name ());
1860 		}
1861 	    }
1862 	  else if (can_throw && !TREE_NOTHROW (w->decl))
1863 	    w_l->can_throw = true;
1864 	  w_info = (struct ipa_dfs_info *) w->aux;
1865 	  w = w_info->next_cycle;
1866 	}
1867     }
1868 
1869   ipa_free_postorder_info ();
1870   free (order);
1871 }
1872 
1873 /* Debugging function to dump state of malloc lattice.  */
1874 
1875 DEBUG_FUNCTION
1876 static void
dump_malloc_lattice(FILE * dump_file,const char * s)1877 dump_malloc_lattice (FILE *dump_file, const char *s)
1878 {
1879   if (!dump_file)
1880     return;
1881 
1882   fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1883   cgraph_node *node;
1884   FOR_EACH_FUNCTION (node)
1885     {
1886       funct_state fs = get_function_state (node);
1887       malloc_state_e state = fs->malloc_state;
1888       fprintf (dump_file, "%s: %s\n", node->name (), malloc_state_names[state]);
1889     }
1890 }
1891 
1892 /* Propagate malloc attribute across the callgraph.  */
1893 
1894 static void
propagate_malloc(void)1895 propagate_malloc (void)
1896 {
1897   cgraph_node *node;
1898   FOR_EACH_FUNCTION (node)
1899     {
1900       if (DECL_IS_MALLOC (node->decl))
1901 	if (!has_function_state (node))
1902 	  {
1903 	    funct_state l = XCNEW (struct funct_state_d);
1904 	    *l = varying_state;
1905 	    l->malloc_state = STATE_MALLOC;
1906 	    set_function_state (node, l);
1907 	  }
1908     }
1909 
1910   dump_malloc_lattice (dump_file, "Initial");
1911   struct cgraph_node **order
1912     = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1913   int order_pos = ipa_reverse_postorder (order);
1914   bool changed = true;
1915 
1916   while (changed)
1917     {
1918       changed = false;
1919       /* Walk in postorder.  */
1920       for (int i = order_pos - 1; i >= 0; --i)
1921 	{
1922 	  cgraph_node *node = order[i];
1923 	  if (node->alias
1924 	      || !node->definition
1925 	      || !has_function_state (node))
1926 	    continue;
1927 
1928 	  funct_state l = get_function_state (node);
1929 
1930 	  /* FIXME: add support for indirect-calls.  */
1931 	  if (node->indirect_calls)
1932 	    {
1933 	      l->malloc_state = STATE_MALLOC_BOTTOM;
1934 	      continue;
1935 	    }
1936 
1937 	  if (node->get_availability () <= AVAIL_INTERPOSABLE)
1938 	    {
1939 	      l->malloc_state = STATE_MALLOC_BOTTOM;
1940 	      continue;
1941 	    }
1942 
1943 	  if (l->malloc_state == STATE_MALLOC_BOTTOM)
1944 	    continue;
1945 
1946 	  vec<cgraph_node *> callees = vNULL;
1947 	  for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1948 	    {
1949 	      ipa_call_summary *es = ipa_call_summaries->get (cs);
1950 	      if (es && es->is_return_callee_uncaptured)
1951 		callees.safe_push (cs->callee);
1952 	    }
1953 
1954 	  malloc_state_e new_state = l->malloc_state;
1955 	  for (unsigned j = 0; j < callees.length (); j++)
1956 	    {
1957 	      cgraph_node *callee = callees[j];
1958 	      if (!has_function_state (callee))
1959 		{
1960 		  new_state = STATE_MALLOC_BOTTOM;
1961 		  break;
1962 		}
1963 	      malloc_state_e callee_state = get_function_state (callee)->malloc_state;
1964 	      if (new_state < callee_state)
1965 		new_state = callee_state;
1966 	    }
1967 	  if (new_state != l->malloc_state)
1968 	    {
1969 	      changed = true;
1970 	      l->malloc_state = new_state;
1971 	    }
1972 	}
1973     }
1974 
1975   FOR_EACH_DEFINED_FUNCTION (node)
1976     if (has_function_state (node))
1977       {
1978 	funct_state l = get_function_state (node);
1979 	if (!node->alias
1980 	    && l->malloc_state == STATE_MALLOC
1981 	    && !node->global.inlined_to)
1982 	  {
1983 	    if (dump_file && (dump_flags & TDF_DETAILS))
1984 	      fprintf (dump_file, "Function %s found to be malloc\n",
1985 		       node->name ());
1986 
1987 	    bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1988 	    node->set_malloc_flag (true);
1989 	    if (!malloc_decl_p && warn_suggest_attribute_malloc)
1990 		warn_function_malloc (node->decl);
1991 	  }
1992       }
1993 
1994   dump_malloc_lattice (dump_file, "after propagation");
1995   ipa_free_postorder_info ();
1996   free (order);
1997 }
1998 
1999 /* Produce the global information by preforming a transitive closure
2000    on the local information that was produced by generate_summary.  */
2001 
2002 unsigned int
2003 pass_ipa_pure_const::
execute(function *)2004 execute (function *)
2005 {
2006   struct cgraph_node *node;
2007   bool remove_p;
2008 
2009   symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
2010   symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
2011   symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
2012 
2013   /* Nothrow makes more function to not lead to return and improve
2014      later analysis.  */
2015   propagate_nothrow ();
2016   propagate_malloc ();
2017   remove_p = propagate_pure_const ();
2018 
2019   /* Cleanup. */
2020   FOR_EACH_FUNCTION (node)
2021     if (has_function_state (node))
2022       free (get_function_state (node));
2023   funct_state_vec.release ();
2024   return remove_p ? TODO_remove_functions : 0;
2025 }
2026 
2027 static bool
gate_pure_const(void)2028 gate_pure_const (void)
2029 {
2030   return flag_ipa_pure_const || in_lto_p;
2031 }
2032 
pass_ipa_pure_const(gcc::context * ctxt)2033 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2034     : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2035 		     pure_const_generate_summary, /* generate_summary */
2036 		     pure_const_write_summary, /* write_summary */
2037 		     pure_const_read_summary, /* read_summary */
2038 		     NULL, /* write_optimization_summary */
2039 		     NULL, /* read_optimization_summary */
2040 		     NULL, /* stmt_fixup */
2041 		     0, /* function_transform_todo_flags_start */
2042 		     NULL, /* function_transform */
2043 		     NULL), /* variable_transform */
2044   init_p(false),
2045   function_insertion_hook_holder(NULL),
2046   node_duplication_hook_holder(NULL),
2047   node_removal_hook_holder(NULL)
2048 {
2049 }
2050 
2051 ipa_opt_pass_d *
make_pass_ipa_pure_const(gcc::context * ctxt)2052 make_pass_ipa_pure_const (gcc::context *ctxt)
2053 {
2054   return new pass_ipa_pure_const (ctxt);
2055 }
2056 
2057 /* Return true if function should be skipped for local pure const analysis.  */
2058 
2059 static bool
skip_function_for_local_pure_const(struct cgraph_node * node)2060 skip_function_for_local_pure_const (struct cgraph_node *node)
2061 {
2062   /* Because we do not schedule pass_fixup_cfg over whole program after early
2063      optimizations we must not promote functions that are called by already
2064      processed functions.  */
2065 
2066   if (function_called_by_processed_nodes_p ())
2067     {
2068       if (dump_file)
2069         fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2070       return true;
2071     }
2072   /* Save some work and do not analyze functions which are interposable and
2073      do not have any non-interposable aliases.  */
2074   if (node->get_availability () <= AVAIL_INTERPOSABLE
2075       && !node->has_aliases_p ())
2076     {
2077       if (dump_file)
2078         fprintf (dump_file,
2079 		 "Function is interposable; not analyzing.\n");
2080       return true;
2081     }
2082   return false;
2083 }
2084 
2085 /* Simple local pass for pure const discovery reusing the analysis from
2086    ipa_pure_const.   This pass is effective when executed together with
2087    other optimization passes in early optimization pass queue.  */
2088 
2089 namespace {
2090 
2091 const pass_data pass_data_local_pure_const =
2092 {
2093   GIMPLE_PASS, /* type */
2094   "local-pure-const", /* name */
2095   OPTGROUP_NONE, /* optinfo_flags */
2096   TV_IPA_PURE_CONST, /* tv_id */
2097   0, /* properties_required */
2098   0, /* properties_provided */
2099   0, /* properties_destroyed */
2100   0, /* todo_flags_start */
2101   0, /* todo_flags_finish */
2102 };
2103 
2104 class pass_local_pure_const : public gimple_opt_pass
2105 {
2106 public:
pass_local_pure_const(gcc::context * ctxt)2107   pass_local_pure_const (gcc::context *ctxt)
2108     : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2109   {}
2110 
2111   /* opt_pass methods: */
clone()2112   opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
gate(function *)2113   virtual bool gate (function *) { return gate_pure_const (); }
2114   virtual unsigned int execute (function *);
2115 
2116 }; // class pass_local_pure_const
2117 
2118 unsigned int
execute(function * fun)2119 pass_local_pure_const::execute (function *fun)
2120 {
2121   bool changed = false;
2122   funct_state l;
2123   bool skip;
2124   struct cgraph_node *node;
2125 
2126   node = cgraph_node::get (current_function_decl);
2127   skip = skip_function_for_local_pure_const (node);
2128 
2129   if (!warn_suggest_attribute_const
2130       && !warn_suggest_attribute_pure
2131       && skip)
2132     return 0;
2133 
2134   l = analyze_function (node, false);
2135 
2136   /* Do NORETURN discovery.  */
2137   if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2138       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2139     {
2140       warn_function_noreturn (fun->decl);
2141       if (dump_file)
2142 	fprintf (dump_file, "Function found to be noreturn: %s\n",
2143 		 current_function_name ());
2144 
2145       /* Update declaration and reduce profile to executed once.  */
2146       TREE_THIS_VOLATILE (current_function_decl) = 1;
2147       if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2148 	node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2149 
2150       changed = true;
2151     }
2152 
2153   switch (l->pure_const_state)
2154     {
2155     case IPA_CONST:
2156       if (!TREE_READONLY (current_function_decl))
2157 	{
2158 	  warn_function_const (current_function_decl, !l->looping);
2159 	  if (dump_file)
2160 	    fprintf (dump_file, "Function found to be %sconst: %s\n",
2161 		     l->looping ? "looping " : "",
2162 		     current_function_name ());
2163 	}
2164       else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2165 	       && !l->looping)
2166 	{
2167 	  if (dump_file)
2168 	    fprintf (dump_file, "Function found to be non-looping: %s\n",
2169 		     current_function_name ());
2170 	}
2171       if (!skip && node->set_const_flag (true, l->looping))
2172 	{
2173 	  if (dump_file)
2174 	    fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2175 		     l->looping ? "looping " : "",
2176 		     current_function_name ());
2177 	  changed = true;
2178 	}
2179       break;
2180 
2181     case IPA_PURE:
2182       if (!DECL_PURE_P (current_function_decl))
2183 	{
2184 	  warn_function_pure (current_function_decl, !l->looping);
2185 	  if (dump_file)
2186 	    fprintf (dump_file, "Function found to be %spure: %s\n",
2187 		     l->looping ? "looping " : "",
2188 		     current_function_name ());
2189 	}
2190       else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2191 	       && !l->looping)
2192 	{
2193 	  if (dump_file)
2194 	    fprintf (dump_file, "Function found to be non-looping: %s\n",
2195 		     current_function_name ());
2196 	}
2197       if (!skip && node->set_pure_flag (true, l->looping))
2198 	{
2199 	  if (dump_file)
2200 	    fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2201 		     l->looping ? "looping " : "",
2202 		     current_function_name ());
2203 	  changed = true;
2204 	}
2205       break;
2206 
2207     default:
2208       break;
2209     }
2210   if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2211     {
2212       node->set_nothrow_flag (true);
2213       changed = true;
2214       if (dump_file)
2215 	fprintf (dump_file, "Function found to be nothrow: %s\n",
2216 		 current_function_name ());
2217     }
2218 
2219   if (l->malloc_state == STATE_MALLOC
2220       && !DECL_IS_MALLOC (current_function_decl))
2221     {
2222       node->set_malloc_flag (true);
2223       if (warn_suggest_attribute_malloc)
2224 	warn_function_malloc (node->decl);
2225       changed = true;
2226       if (dump_file)
2227 	fprintf (dump_file, "Function found to be malloc: %s\n",
2228 		 node->name ());
2229     }
2230 
2231   free (l);
2232   if (changed)
2233     return execute_fixup_cfg ();
2234   else
2235     return 0;
2236 }
2237 
2238 } // anon namespace
2239 
2240 gimple_opt_pass *
make_pass_local_pure_const(gcc::context * ctxt)2241 make_pass_local_pure_const (gcc::context *ctxt)
2242 {
2243   return new pass_local_pure_const (ctxt);
2244 }
2245 
2246 /* Emit noreturn warnings.  */
2247 
2248 namespace {
2249 
2250 const pass_data pass_data_warn_function_noreturn =
2251 {
2252   GIMPLE_PASS, /* type */
2253   "*warn_function_noreturn", /* name */
2254   OPTGROUP_NONE, /* optinfo_flags */
2255   TV_NONE, /* tv_id */
2256   PROP_cfg, /* properties_required */
2257   0, /* properties_provided */
2258   0, /* properties_destroyed */
2259   0, /* todo_flags_start */
2260   0, /* todo_flags_finish */
2261 };
2262 
2263 class pass_warn_function_noreturn : public gimple_opt_pass
2264 {
2265 public:
pass_warn_function_noreturn(gcc::context * ctxt)2266   pass_warn_function_noreturn (gcc::context *ctxt)
2267     : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2268   {}
2269 
2270   /* opt_pass methods: */
gate(function *)2271   virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
execute(function * fun)2272   virtual unsigned int execute (function *fun)
2273     {
2274       if (!TREE_THIS_VOLATILE (current_function_decl)
2275 	  && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2276 	warn_function_noreturn (current_function_decl);
2277       return 0;
2278     }
2279 
2280 }; // class pass_warn_function_noreturn
2281 
2282 } // anon namespace
2283 
2284 gimple_opt_pass *
make_pass_warn_function_noreturn(gcc::context * ctxt)2285 make_pass_warn_function_noreturn (gcc::context *ctxt)
2286 {
2287   return new pass_warn_function_noreturn (ctxt);
2288 }
2289 
2290 /* Simple local pass for pure const discovery reusing the analysis from
2291    ipa_pure_const.   This pass is effective when executed together with
2292    other optimization passes in early optimization pass queue.  */
2293 
2294 namespace {
2295 
2296 const pass_data pass_data_nothrow =
2297 {
2298   GIMPLE_PASS, /* type */
2299   "nothrow", /* name */
2300   OPTGROUP_NONE, /* optinfo_flags */
2301   TV_IPA_PURE_CONST, /* tv_id */
2302   0, /* properties_required */
2303   0, /* properties_provided */
2304   0, /* properties_destroyed */
2305   0, /* todo_flags_start */
2306   0, /* todo_flags_finish */
2307 };
2308 
2309 class pass_nothrow : public gimple_opt_pass
2310 {
2311 public:
pass_nothrow(gcc::context * ctxt)2312   pass_nothrow (gcc::context *ctxt)
2313     : gimple_opt_pass (pass_data_nothrow, ctxt)
2314   {}
2315 
2316   /* opt_pass methods: */
clone()2317   opt_pass * clone () { return new pass_nothrow (m_ctxt); }
gate(function *)2318   virtual bool gate (function *) { return optimize; }
2319   virtual unsigned int execute (function *);
2320 
2321 }; // class pass_nothrow
2322 
2323 unsigned int
execute(function *)2324 pass_nothrow::execute (function *)
2325 {
2326   struct cgraph_node *node;
2327   basic_block this_block;
2328 
2329   if (TREE_NOTHROW (current_function_decl))
2330     return 0;
2331 
2332   node = cgraph_node::get (current_function_decl);
2333 
2334   /* We run during lowering, we can not really use availability yet.  */
2335   if (cgraph_node::get (current_function_decl)->get_availability ()
2336       <= AVAIL_INTERPOSABLE)
2337     {
2338       if (dump_file)
2339         fprintf (dump_file, "Function is interposable;"
2340 	         " not analyzing.\n");
2341       return true;
2342     }
2343 
2344   FOR_EACH_BB_FN (this_block, cfun)
2345     {
2346       for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2347 	   !gsi_end_p (gsi);
2348 	   gsi_next (&gsi))
2349         if (stmt_can_throw_external (gsi_stmt (gsi)))
2350 	  {
2351 	    if (is_gimple_call (gsi_stmt (gsi)))
2352 	      {
2353 		tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2354 		if (callee_t && recursive_call_p (current_function_decl,
2355 						  callee_t))
2356 		  continue;
2357 	      }
2358 
2359 	    if (dump_file)
2360 	      {
2361 		fprintf (dump_file, "Statement can throw: ");
2362 		print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2363 	      }
2364 	    return 0;
2365 	  }
2366     }
2367 
2368   node->set_nothrow_flag (true);
2369 
2370   bool cfg_changed = false;
2371   if (self_recursive_p (node))
2372     FOR_EACH_BB_FN (this_block, cfun)
2373       if (gimple *g = last_stmt (this_block))
2374 	if (is_gimple_call (g))
2375 	  {
2376 	    tree callee_t = gimple_call_fndecl (g);
2377 	    if (callee_t
2378 		&& recursive_call_p (current_function_decl, callee_t)
2379 		&& maybe_clean_eh_stmt (g)
2380 		&& gimple_purge_dead_eh_edges (this_block))
2381 	      cfg_changed = true;
2382 	  }
2383 
2384   if (dump_file)
2385     fprintf (dump_file, "Function found to be nothrow: %s\n",
2386 	     current_function_name ());
2387   return cfg_changed ? TODO_cleanup_cfg : 0;
2388 }
2389 
2390 } // anon namespace
2391 
2392 gimple_opt_pass *
make_pass_nothrow(gcc::context * ctxt)2393 make_pass_nothrow (gcc::context *ctxt)
2394 {
2395   return new pass_nothrow (ctxt);
2396 }
2397