1 /* Callgraph based analysis of static variables.
2    Copyright (C) 2004-2019 Free Software Foundation, Inc.
3    Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* This file marks functions as being either const (TREE_READONLY) or
22    pure (DECL_PURE_P).  It can also set a variant of these that
23    are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24 
25    This must be run after inlining decisions have been made since
26    otherwise, the local sets will not contain information that is
27    consistent with post inlined state.  The global sets are not prone
28    to this problem since they are by definition transitive.  */
29 
30 /* The code in this module is called by the ipa pass manager. It
31    should be one of the later passes since it's information is used by
32    the rest of the compilation. */
33 
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "ipa-prop.h"
63 #include "ipa-fnsummary.h"
64 
65 /* Lattice values for const and pure functions.  Everything starts out
66    being const, then may drop to pure and then neither depending on
67    what is found.  */
68 enum pure_const_state_e
69 {
70   IPA_CONST,
71   IPA_PURE,
72   IPA_NEITHER
73 };
74 
75 static const char *pure_const_names[3] = {"const", "pure", "neither"};
76 
77 enum malloc_state_e
78 {
79   STATE_MALLOC_TOP,
80   STATE_MALLOC,
81   STATE_MALLOC_BOTTOM
82 };
83 
84 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
85 
86 /* Holder for the const_state.  There is one of these per function
87    decl.  */
88 class funct_state_d
89 {
90 public:
funct_state_d()91   funct_state_d (): pure_const_state (IPA_NEITHER),
92     state_previously_known (IPA_NEITHER), looping_previously_known (true),
93     looping (true), can_throw (true), can_free (true),
94     malloc_state (STATE_MALLOC_BOTTOM) {}
95 
funct_state_d(const funct_state_d & s)96   funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
97     state_previously_known (s.state_previously_known),
98     looping_previously_known (s.looping_previously_known),
99     looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
100     malloc_state (s.malloc_state) {}
101 
102   /* See above.  */
103   enum pure_const_state_e pure_const_state;
104   /* What user set here; we can be always sure about this.  */
105   enum pure_const_state_e state_previously_known;
106   bool looping_previously_known;
107 
108   /* True if the function could possibly infinite loop.  There are a
109      lot of ways that this could be determined.  We are pretty
110      conservative here.  While it is possible to cse pure and const
111      calls, it is not legal to have dce get rid of the call if there
112      is a possibility that the call could infinite loop since this is
113      a behavioral change.  */
114   bool looping;
115 
116   bool can_throw;
117 
118   /* If function can call free, munmap or otherwise make previously
119      non-trapping memory accesses trapping.  */
120   bool can_free;
121 
122   enum malloc_state_e malloc_state;
123 };
124 
125 typedef struct funct_state_d * funct_state;
126 
127 /* The storage of the funct_state is abstracted because there is the
128    possibility that it may be desirable to move this to the cgraph
129    local info.  */
130 
131 class funct_state_summary_t:
132   public fast_function_summary <funct_state_d *, va_heap>
133 {
134 public:
funct_state_summary_t(symbol_table * symtab)135   funct_state_summary_t (symbol_table *symtab):
136     fast_function_summary <funct_state_d *, va_heap> (symtab) {}
137 
138   virtual void insert (cgraph_node *, funct_state_d *state);
139   virtual void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
140 			  funct_state_d *src_data,
141 			  funct_state_d *dst_data);
142 };
143 
144 static funct_state_summary_t *funct_state_summaries = NULL;
145 
146 static bool gate_pure_const (void);
147 
148 namespace {
149 
150 const pass_data pass_data_ipa_pure_const =
151 {
152   IPA_PASS, /* type */
153   "pure-const", /* name */
154   OPTGROUP_NONE, /* optinfo_flags */
155   TV_IPA_PURE_CONST, /* tv_id */
156   0, /* properties_required */
157   0, /* properties_provided */
158   0, /* properties_destroyed */
159   0, /* todo_flags_start */
160   0, /* todo_flags_finish */
161 };
162 
163 class pass_ipa_pure_const : public ipa_opt_pass_d
164 {
165 public:
166   pass_ipa_pure_const(gcc::context *ctxt);
167 
168   /* opt_pass methods: */
gate(function *)169   bool gate (function *) { return gate_pure_const (); }
170   unsigned int execute (function *fun);
171 
172   void register_hooks (void);
173 
174 private:
175   bool init_p;
176 }; // class pass_ipa_pure_const
177 
178 } // anon namespace
179 
180 /* Try to guess if function body will always be visible to compiler
181    when compiling the call and whether compiler will be able
182    to propagate the information by itself.  */
183 
184 static bool
function_always_visible_to_compiler_p(tree decl)185 function_always_visible_to_compiler_p (tree decl)
186 {
187   return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
188 	  || DECL_COMDAT (decl));
189 }
190 
191 /* Emit suggestion about attribute ATTRIB_NAME for DECL.  KNOWN_FINITE
192    is true if the function is known to be finite.  The diagnostic is
193    controlled by OPTION.  WARNED_ABOUT is a hash_set<tree> unique for
194    OPTION, this function may initialize it and it is always returned
195    by the function.  */
196 
197 static hash_set<tree> *
suggest_attribute(int option,tree decl,bool known_finite,hash_set<tree> * warned_about,const char * attrib_name)198 suggest_attribute (int option, tree decl, bool known_finite,
199 		   hash_set<tree> *warned_about,
200 		   const char * attrib_name)
201 {
202   if (!option_enabled (option, &global_options))
203     return warned_about;
204   if (TREE_THIS_VOLATILE (decl)
205       || (known_finite && function_always_visible_to_compiler_p (decl)))
206     return warned_about;
207 
208   if (!warned_about)
209     warned_about = new hash_set<tree>;
210   if (warned_about->contains (decl))
211     return warned_about;
212   warned_about->add (decl);
213   warning_at (DECL_SOURCE_LOCATION (decl),
214 	      option,
215 	      known_finite
216 	      ? G_("function might be candidate for attribute %qs")
217 	      : G_("function might be candidate for attribute %qs"
218 		   " if it is known to return normally"), attrib_name);
219   return warned_about;
220 }
221 
222 /* Emit suggestion about __attribute_((pure)) for DECL.  KNOWN_FINITE
223    is true if the function is known to be finite.  */
224 
225 static void
warn_function_pure(tree decl,bool known_finite)226 warn_function_pure (tree decl, bool known_finite)
227 {
228   /* Declaring a void function pure makes no sense and is diagnosed
229      by -Wattributes because calling it would have no effect.  */
230   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
231     return;
232 
233   static hash_set<tree> *warned_about;
234   warned_about
235     = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
236 			 known_finite, warned_about, "pure");
237 }
238 
239 /* Emit suggestion about __attribute_((const)) for DECL.  KNOWN_FINITE
240    is true if the function is known to be finite.  */
241 
242 static void
warn_function_const(tree decl,bool known_finite)243 warn_function_const (tree decl, bool known_finite)
244 {
245   /* Declaring a void function const makes no sense is diagnosed
246      by -Wattributes because calling it would have no effect.  */
247   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
248     return;
249 
250   static hash_set<tree> *warned_about;
251   warned_about
252     = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
253 			 known_finite, warned_about, "const");
254 }
255 
256 /* Emit suggestion about __attribute__((malloc)) for DECL.  */
257 
258 static void
warn_function_malloc(tree decl)259 warn_function_malloc (tree decl)
260 {
261   static hash_set<tree> *warned_about;
262   warned_about
263     = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
264 			 true, warned_about, "malloc");
265 }
266 
267 /* Emit suggestion about __attribute__((noreturn)) for DECL.  */
268 
269 static void
warn_function_noreturn(tree decl)270 warn_function_noreturn (tree decl)
271 {
272   tree original_decl = decl;
273 
274   static hash_set<tree> *warned_about;
275   if (!lang_hooks.missing_noreturn_ok_p (decl)
276       && targetm.warn_func_return (decl))
277     warned_about
278       = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
279 			   true, warned_about, "noreturn");
280 }
281 
282 void
warn_function_cold(tree decl)283 warn_function_cold (tree decl)
284 {
285   tree original_decl = decl;
286 
287   static hash_set<tree> *warned_about;
288   warned_about
289     = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
290 			 true, warned_about, "cold");
291 }
292 
293 /* Check to see if the use (or definition when CHECKING_WRITE is true)
294    variable T is legal in a function that is either pure or const.  */
295 
296 static inline void
check_decl(funct_state local,tree t,bool checking_write,bool ipa)297 check_decl (funct_state local,
298 	    tree t, bool checking_write, bool ipa)
299 {
300   /* Do not want to do anything with volatile except mark any
301      function that uses one to be not const or pure.  */
302   if (TREE_THIS_VOLATILE (t))
303     {
304       local->pure_const_state = IPA_NEITHER;
305       if (dump_file)
306         fprintf (dump_file, "    Volatile operand is not const/pure\n");
307       return;
308     }
309 
310   /* Do not care about a local automatic that is not static.  */
311   if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
312     return;
313 
314   /* If the variable has the "used" attribute, treat it as if it had a
315      been touched by the devil.  */
316   if (DECL_PRESERVE_P (t))
317     {
318       local->pure_const_state = IPA_NEITHER;
319       if (dump_file)
320         fprintf (dump_file, "    Used static/global variable is not const/pure\n");
321       return;
322     }
323 
324   /* In IPA mode we are not interested in checking actual loads and stores;
325      they will be processed at propagation time using ipa_ref.  */
326   if (ipa)
327     return;
328 
329   /* Since we have dealt with the locals and params cases above, if we
330      are CHECKING_WRITE, this cannot be a pure or constant
331      function.  */
332   if (checking_write)
333     {
334       local->pure_const_state = IPA_NEITHER;
335       if (dump_file)
336         fprintf (dump_file, "    static/global memory write is not const/pure\n");
337       return;
338     }
339 
340   if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
341     {
342       /* Readonly reads are safe.  */
343       if (TREE_READONLY (t))
344 	return; /* Read of a constant, do not change the function state.  */
345       else
346 	{
347           if (dump_file)
348             fprintf (dump_file, "    global memory read is not const\n");
349 	  /* Just a regular read.  */
350 	  if (local->pure_const_state == IPA_CONST)
351 	    local->pure_const_state = IPA_PURE;
352 	}
353     }
354   else
355     {
356       /* Compilation level statics can be read if they are readonly
357 	 variables.  */
358       if (TREE_READONLY (t))
359 	return;
360 
361       if (dump_file)
362 	fprintf (dump_file, "    static memory read is not const\n");
363       /* Just a regular read.  */
364       if (local->pure_const_state == IPA_CONST)
365 	local->pure_const_state = IPA_PURE;
366     }
367 }
368 
369 
370 /* Check to see if the use (or definition when CHECKING_WRITE is true)
371    variable T is legal in a function that is either pure or const.  */
372 
373 static inline void
check_op(funct_state local,tree t,bool checking_write)374 check_op (funct_state local, tree t, bool checking_write)
375 {
376   t = get_base_address (t);
377   if (t && TREE_THIS_VOLATILE (t))
378     {
379       local->pure_const_state = IPA_NEITHER;
380       if (dump_file)
381 	fprintf (dump_file, "    Volatile indirect ref is not const/pure\n");
382       return;
383     }
384   else if (t
385   	   && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
386 	   && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
387 	   && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
388     {
389       if (dump_file)
390 	fprintf (dump_file, "    Indirect ref to local memory is OK\n");
391       return;
392     }
393   else if (checking_write)
394     {
395       local->pure_const_state = IPA_NEITHER;
396       if (dump_file)
397 	fprintf (dump_file, "    Indirect ref write is not const/pure\n");
398       return;
399     }
400   else
401     {
402       if (dump_file)
403 	fprintf (dump_file, "    Indirect ref read is not const\n");
404       if (local->pure_const_state == IPA_CONST)
405 	local->pure_const_state = IPA_PURE;
406     }
407 }
408 
409 /* compute state based on ECF FLAGS and store to STATE and LOOPING.  */
410 
411 static void
state_from_flags(enum pure_const_state_e * state,bool * looping,int flags,bool cannot_lead_to_return)412 state_from_flags (enum pure_const_state_e *state, bool *looping,
413 	          int flags, bool cannot_lead_to_return)
414 {
415   *looping = false;
416   if (flags & ECF_LOOPING_CONST_OR_PURE)
417     {
418       *looping = true;
419       if (dump_file && (dump_flags & TDF_DETAILS))
420 	fprintf (dump_file, " looping\n");
421     }
422   if (flags & ECF_CONST)
423     {
424       *state = IPA_CONST;
425       if (dump_file && (dump_flags & TDF_DETAILS))
426 	fprintf (dump_file, " const\n");
427     }
428   else if (flags & ECF_PURE)
429     {
430       *state = IPA_PURE;
431       if (dump_file && (dump_flags & TDF_DETAILS))
432 	fprintf (dump_file, " pure\n");
433     }
434   else if (cannot_lead_to_return)
435     {
436       *state = IPA_PURE;
437       *looping = true;
438       if (dump_file && (dump_flags & TDF_DETAILS))
439 	fprintf (dump_file, " ignoring side effects->pure looping\n");
440     }
441   else
442     {
443       if (dump_file && (dump_flags & TDF_DETAILS))
444 	fprintf (dump_file, " neither\n");
445       *state = IPA_NEITHER;
446       *looping = true;
447     }
448 }
449 
450 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
451    into STATE and LOOPING better of the two variants.
452    Be sure to merge looping correctly.  IPA_NEITHER functions
453    have looping 0 even if they don't have to return.  */
454 
455 static inline void
better_state(enum pure_const_state_e * state,bool * looping,enum pure_const_state_e state2,bool looping2)456 better_state (enum pure_const_state_e *state, bool *looping,
457 	      enum pure_const_state_e state2, bool looping2)
458 {
459   if (state2 < *state)
460     {
461       if (*state == IPA_NEITHER)
462 	*looping = looping2;
463       else
464 	*looping = MIN (*looping, looping2);
465       *state = state2;
466     }
467   else if (state2 != IPA_NEITHER)
468     *looping = MIN (*looping, looping2);
469 }
470 
471 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
472    into STATE and LOOPING worse of the two variants.
473    N is the actual node called.  */
474 
475 static inline void
worse_state(enum pure_const_state_e * state,bool * looping,enum pure_const_state_e state2,bool looping2,struct symtab_node * from,struct symtab_node * to)476 worse_state (enum pure_const_state_e *state, bool *looping,
477 	     enum pure_const_state_e state2, bool looping2,
478 	     struct symtab_node *from,
479 	     struct symtab_node *to)
480 {
481   /* Consider function:
482 
483      bool a(int *p)
484      {
485        return *p==*p;
486      }
487 
488      During early optimization we will turn this into:
489 
490      bool a(int *p)
491      {
492        return true;
493      }
494 
495      Now if this function will be detected as CONST however when interposed it
496      may end up being just pure.  We always must assume the worst scenario here.
497    */
498   if (*state == IPA_CONST && state2 == IPA_CONST
499       && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
500     {
501       if (dump_file && (dump_flags & TDF_DETAILS))
502 	fprintf (dump_file, "Dropping state to PURE because call to %s may not "
503 		 "bind to current def.\n", to->name ());
504       state2 = IPA_PURE;
505     }
506   *state = MAX (*state, state2);
507   *looping = MAX (*looping, looping2);
508 }
509 
510 /* Recognize special cases of builtins that are by themselves not pure or const
511    but function using them is.  */
512 static bool
special_builtin_state(enum pure_const_state_e * state,bool * looping,tree callee)513 special_builtin_state (enum pure_const_state_e *state, bool *looping,
514 			tree callee)
515 {
516   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
517     switch (DECL_FUNCTION_CODE (callee))
518       {
519 	case BUILT_IN_RETURN:
520 	case BUILT_IN_UNREACHABLE:
521 	CASE_BUILT_IN_ALLOCA:
522 	case BUILT_IN_STACK_SAVE:
523 	case BUILT_IN_STACK_RESTORE:
524 	case BUILT_IN_EH_POINTER:
525 	case BUILT_IN_EH_FILTER:
526 	case BUILT_IN_UNWIND_RESUME:
527 	case BUILT_IN_CXA_END_CLEANUP:
528 	case BUILT_IN_EH_COPY_VALUES:
529 	case BUILT_IN_FRAME_ADDRESS:
530 	case BUILT_IN_APPLY_ARGS:
531 	case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
532 	case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
533 	  *looping = false;
534 	  *state = IPA_CONST;
535 	  return true;
536 	case BUILT_IN_PREFETCH:
537 	  *looping = true;
538 	  *state = IPA_CONST;
539 	  return true;
540 	default:
541 	  break;
542       }
543   return false;
544 }
545 
546 /* Check the parameters of a function call to CALL_EXPR to see if
547    there are any references in the parameters that are not allowed for
548    pure or const functions.  Also check to see if this is either an
549    indirect call, a call outside the compilation unit, or has special
550    attributes that may also effect the purity.  The CALL_EXPR node for
551    the entire call expression.  */
552 
553 static void
check_call(funct_state local,gcall * call,bool ipa)554 check_call (funct_state local, gcall *call, bool ipa)
555 {
556   int flags = gimple_call_flags (call);
557   tree callee_t = gimple_call_fndecl (call);
558   bool possibly_throws = stmt_could_throw_p (cfun, call);
559   bool possibly_throws_externally = (possibly_throws
560   				     && stmt_can_throw_external (cfun, call));
561 
562   if (possibly_throws)
563     {
564       unsigned int i;
565       for (i = 0; i < gimple_num_ops (call); i++)
566         if (gimple_op (call, i)
567 	    && tree_could_throw_p (gimple_op (call, i)))
568 	  {
569 	    if (possibly_throws && cfun->can_throw_non_call_exceptions)
570 	      {
571 		if (dump_file)
572 		  fprintf (dump_file, "    operand can throw; looping\n");
573 		local->looping = true;
574 	      }
575 	    if (possibly_throws_externally)
576 	      {
577 		if (dump_file)
578 		  fprintf (dump_file, "    operand can throw externally\n");
579 		local->can_throw = true;
580 	      }
581 	  }
582     }
583 
584   /* The const and pure flags are set by a variety of places in the
585      compiler (including here).  If someone has already set the flags
586      for the callee, (such as for some of the builtins) we will use
587      them, otherwise we will compute our own information.
588 
589      Const and pure functions have less clobber effects than other
590      functions so we process these first.  Otherwise if it is a call
591      outside the compilation unit or an indirect call we punt.  This
592      leaves local calls which will be processed by following the call
593      graph.  */
594   if (callee_t)
595     {
596       enum pure_const_state_e call_state;
597       bool call_looping;
598 
599       if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
600 	  && !nonfreeing_call_p (call))
601 	local->can_free = true;
602 
603       if (special_builtin_state (&call_state, &call_looping, callee_t))
604 	{
605 	  worse_state (&local->pure_const_state, &local->looping,
606 		       call_state, call_looping,
607 		       NULL, NULL);
608 	  return;
609 	}
610       /* When bad things happen to bad functions, they cannot be const
611 	 or pure.  */
612       if (setjmp_call_p (callee_t))
613 	{
614 	  if (dump_file)
615 	    fprintf (dump_file, "    setjmp is not const/pure\n");
616           local->looping = true;
617 	  local->pure_const_state = IPA_NEITHER;
618 	}
619 
620       if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
621 	switch (DECL_FUNCTION_CODE (callee_t))
622 	  {
623 	  case BUILT_IN_LONGJMP:
624 	  case BUILT_IN_NONLOCAL_GOTO:
625 	    if (dump_file)
626 	      fprintf (dump_file, "    longjmp and nonlocal goto is not const/pure\n");
627 	    local->pure_const_state = IPA_NEITHER;
628             local->looping = true;
629 	    break;
630 	  default:
631 	    break;
632 	  }
633     }
634   else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
635     local->can_free = true;
636 
637   /* When not in IPA mode, we can still handle self recursion.  */
638   if (!ipa && callee_t
639       && recursive_call_p (current_function_decl, callee_t))
640     {
641       if (dump_file)
642         fprintf (dump_file, "    Recursive call can loop.\n");
643       local->looping = true;
644     }
645   /* Either callee is unknown or we are doing local analysis.
646      Look to see if there are any bits available for the callee (such as by
647      declaration or because it is builtin) and process solely on the basis of
648      those bits.  Handle internal calls always, those calls don't have
649      corresponding cgraph edges and thus aren't processed during
650      the propagation.  */
651   else if (!ipa || gimple_call_internal_p (call))
652     {
653       enum pure_const_state_e call_state;
654       bool call_looping;
655       if (possibly_throws && cfun->can_throw_non_call_exceptions)
656         {
657 	  if (dump_file)
658 	    fprintf (dump_file, "    can throw; looping\n");
659           local->looping = true;
660 	}
661       if (possibly_throws_externally)
662         {
663 	  if (dump_file)
664 	    {
665 	      fprintf (dump_file, "    can throw externally to lp %i\n",
666 	      	       lookup_stmt_eh_lp (call));
667 	      if (callee_t)
668 		fprintf (dump_file, "     callee:%s\n",
669 			 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
670 	    }
671           local->can_throw = true;
672 	}
673       if (dump_file && (dump_flags & TDF_DETAILS))
674 	fprintf (dump_file, "    checking flags for call:");
675       state_from_flags (&call_state, &call_looping, flags,
676 			((flags & (ECF_NORETURN | ECF_NOTHROW))
677 			 == (ECF_NORETURN | ECF_NOTHROW))
678 			|| (!flag_exceptions && (flags & ECF_NORETURN)));
679       worse_state (&local->pure_const_state, &local->looping,
680 		   call_state, call_looping, NULL, NULL);
681     }
682   /* Direct functions calls are handled by IPA propagation.  */
683 }
684 
685 /* Wrapper around check_decl for loads in local more.  */
686 
687 static bool
check_load(gimple *,tree op,tree,void * data)688 check_load (gimple *, tree op, tree, void *data)
689 {
690   if (DECL_P (op))
691     check_decl ((funct_state)data, op, false, false);
692   else
693     check_op ((funct_state)data, op, false);
694   return false;
695 }
696 
697 /* Wrapper around check_decl for stores in local more.  */
698 
699 static bool
check_store(gimple *,tree op,tree,void * data)700 check_store (gimple *, tree op, tree, void *data)
701 {
702   if (DECL_P (op))
703     check_decl ((funct_state)data, op, true, false);
704   else
705     check_op ((funct_state)data, op, true);
706   return false;
707 }
708 
709 /* Wrapper around check_decl for loads in ipa mode.  */
710 
711 static bool
check_ipa_load(gimple *,tree op,tree,void * data)712 check_ipa_load (gimple *, tree op, tree, void *data)
713 {
714   if (DECL_P (op))
715     check_decl ((funct_state)data, op, false, true);
716   else
717     check_op ((funct_state)data, op, false);
718   return false;
719 }
720 
721 /* Wrapper around check_decl for stores in ipa mode.  */
722 
723 static bool
check_ipa_store(gimple *,tree op,tree,void * data)724 check_ipa_store (gimple *, tree op, tree, void *data)
725 {
726   if (DECL_P (op))
727     check_decl ((funct_state)data, op, true, true);
728   else
729     check_op ((funct_state)data, op, true);
730   return false;
731 }
732 
733 /* Look into pointer pointed to by GSIP and figure out what interesting side
734    effects it has.  */
735 static void
check_stmt(gimple_stmt_iterator * gsip,funct_state local,bool ipa)736 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
737 {
738   gimple *stmt = gsi_stmt (*gsip);
739 
740   if (is_gimple_debug (stmt))
741     return;
742 
743   /* Do consider clobber as side effects before IPA, so we rather inline
744      C++ destructors and keep clobber semantics than eliminate them.
745 
746      TODO: We may get smarter during early optimizations on these and let
747      functions containing only clobbers to be optimized more.  This is a common
748      case of C++ destructors.  */
749 
750   if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
751     return;
752 
753   if (dump_file)
754     {
755       fprintf (dump_file, "  scanning: ");
756       print_gimple_stmt (dump_file, stmt, 0);
757     }
758 
759   if (gimple_has_volatile_ops (stmt)
760       && !gimple_clobber_p (stmt))
761     {
762       local->pure_const_state = IPA_NEITHER;
763       if (dump_file)
764 	fprintf (dump_file, "    Volatile stmt is not const/pure\n");
765     }
766 
767   /* Look for loads and stores.  */
768   walk_stmt_load_store_ops (stmt, local,
769 			    ipa ? check_ipa_load : check_load,
770 			    ipa ? check_ipa_store :  check_store);
771 
772   if (gimple_code (stmt) != GIMPLE_CALL
773       && stmt_could_throw_p (cfun, stmt))
774     {
775       if (cfun->can_throw_non_call_exceptions)
776 	{
777 	  if (dump_file)
778 	    fprintf (dump_file, "    can throw; looping\n");
779 	  local->looping = true;
780 	}
781       if (stmt_can_throw_external (cfun, stmt))
782 	{
783 	  if (dump_file)
784 	    fprintf (dump_file, "    can throw externally\n");
785 	  local->can_throw = true;
786 	}
787       else
788 	if (dump_file)
789 	  fprintf (dump_file, "    can throw\n");
790     }
791   switch (gimple_code (stmt))
792     {
793     case GIMPLE_CALL:
794       check_call (local, as_a <gcall *> (stmt), ipa);
795       break;
796     case GIMPLE_LABEL:
797       if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
798 	/* Target of long jump. */
799 	{
800           if (dump_file)
801             fprintf (dump_file, "    nonlocal label is not const/pure\n");
802 	  local->pure_const_state = IPA_NEITHER;
803 	}
804       break;
805     case GIMPLE_ASM:
806       if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
807 	{
808 	  if (dump_file)
809 	    fprintf (dump_file, "    memory asm clobber is not const/pure\n");
810 	  /* Abandon all hope, ye who enter here. */
811 	  local->pure_const_state = IPA_NEITHER;
812 	  local->can_free = true;
813 	}
814       if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
815 	{
816 	  if (dump_file)
817 	    fprintf (dump_file, "    volatile is not const/pure\n");
818 	  /* Abandon all hope, ye who enter here. */
819 	  local->pure_const_state = IPA_NEITHER;
820 	  local->looping = true;
821 	  local->can_free = true;
822 	}
823       return;
824     default:
825       break;
826     }
827 }
828 
829 /* Check that RETVAL is used only in STMT and in comparisons against 0.
830    RETVAL is return value of the function and STMT is return stmt.  */
831 
832 static bool
check_retval_uses(tree retval,gimple * stmt)833 check_retval_uses (tree retval, gimple *stmt)
834 {
835   imm_use_iterator use_iter;
836   gimple *use_stmt;
837 
838   FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
839     if (gcond *cond = dyn_cast<gcond *> (use_stmt))
840       {
841 	tree op2 = gimple_cond_rhs (cond);
842 	if (!integer_zerop (op2))
843 	  RETURN_FROM_IMM_USE_STMT (use_iter, false);
844       }
845     else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
846       {
847 	enum tree_code code = gimple_assign_rhs_code (ga);
848 	if (TREE_CODE_CLASS (code) != tcc_comparison)
849 	  RETURN_FROM_IMM_USE_STMT (use_iter, false);
850 	if (!integer_zerop (gimple_assign_rhs2 (ga)))
851 	  RETURN_FROM_IMM_USE_STMT (use_iter, false);
852       }
853     else if (is_gimple_debug (use_stmt))
854       ;
855     else if (use_stmt != stmt)
856       RETURN_FROM_IMM_USE_STMT (use_iter, false);
857 
858   return true;
859 }
860 
861 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
862    attribute. Currently this function does a very conservative analysis.
863    FUN is considered to be a candidate if
864    1) It returns a value of pointer type.
865    2) SSA_NAME_DEF_STMT (return_value) is either a function call or
866       a phi, and element of phi is either NULL or
867       SSA_NAME_DEF_STMT(element) is function call.
868    3) The return-value has immediate uses only within comparisons (gcond or gassign)
869       and return_stmt (and likewise a phi arg has immediate use only within comparison
870       or the phi stmt).  */
871 
872 #define DUMP_AND_RETURN(reason)  \
873 {  \
874   if (dump_file && (dump_flags & TDF_DETAILS))  \
875     fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
876 	     (node->name()), (reason));  \
877   return false;  \
878 }
879 
880 static bool
malloc_candidate_p_1(function * fun,tree retval,gimple * ret_stmt,bool ipa,bitmap visited)881 malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
882 		      bitmap visited)
883 {
884   cgraph_node *node = cgraph_node::get_create (fun->decl);
885   if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
886     return true;
887 
888   if (!check_retval_uses (retval, ret_stmt))
889     DUMP_AND_RETURN("Return value has uses outside return stmt"
890 		    " and comparisons against 0.")
891 
892   gimple *def = SSA_NAME_DEF_STMT (retval);
893 
894   if (gcall *call_stmt = dyn_cast<gcall *> (def))
895     {
896       tree callee_decl = gimple_call_fndecl (call_stmt);
897       if (!callee_decl)
898 	return false;
899 
900       if (!ipa && !DECL_IS_MALLOC (callee_decl))
901 	DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
902 			" non-ipa mode.")
903 
904       cgraph_edge *cs = node->get_edge (call_stmt);
905       if (cs)
906 	{
907 	  ipa_call_summary *es = ipa_call_summaries->get_create (cs);
908 	  es->is_return_callee_uncaptured = true;
909 	}
910     }
911 
912     else if (gphi *phi = dyn_cast<gphi *> (def))
913       {
914 	bool all_args_zero = true;
915 	for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
916 	  {
917 	    tree arg = gimple_phi_arg_def (phi, i);
918 	    if (integer_zerop (arg))
919 	      continue;
920 
921 	    all_args_zero = false;
922 	    if (TREE_CODE (arg) != SSA_NAME)
923 	      DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
924 	    if (!check_retval_uses (arg, phi))
925 	      DUMP_AND_RETURN ("phi arg has uses outside phi"
926 				 " and comparisons against 0.")
927 
928 	    gimple *arg_def = SSA_NAME_DEF_STMT (arg);
929 	    if (is_a<gphi *> (arg_def))
930 	      {
931 		if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
932 		    DUMP_AND_RETURN ("nested phi fail")
933 		continue;
934 	      }
935 
936 	    gcall *call_stmt = dyn_cast<gcall *> (arg_def);
937 	    if (!call_stmt)
938 	      DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
939 
940 	    tree callee_decl = gimple_call_fndecl (call_stmt);
941 	    if (!callee_decl)
942 	      return false;
943 	    if (!ipa && !DECL_IS_MALLOC (callee_decl))
944 	      DUMP_AND_RETURN("callee_decl does not have malloc attribute"
945 			      " for non-ipa mode.")
946 
947 	    cgraph_edge *cs = node->get_edge (call_stmt);
948 	    if (cs)
949 	      {
950 		ipa_call_summary *es = ipa_call_summaries->get_create (cs);
951 		es->is_return_callee_uncaptured = true;
952 	      }
953 	  }
954 
955 	if (all_args_zero)
956 	  DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
957       }
958 
959     else
960       DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
961 
962   return true;
963 }
964 
965 static bool
malloc_candidate_p(function * fun,bool ipa)966 malloc_candidate_p (function *fun, bool ipa)
967 {
968   basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
969   edge e;
970   edge_iterator ei;
971   cgraph_node *node = cgraph_node::get_create (fun->decl);
972 
973   if (EDGE_COUNT (exit_block->preds) == 0
974       || !flag_delete_null_pointer_checks)
975     return false;
976 
977   auto_bitmap visited;
978   FOR_EACH_EDGE (e, ei, exit_block->preds)
979     {
980       gimple_stmt_iterator gsi = gsi_last_bb (e->src);
981       greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
982 
983       if (!ret_stmt)
984 	return false;
985 
986       tree retval = gimple_return_retval (ret_stmt);
987       if (!retval)
988 	DUMP_AND_RETURN("No return value.")
989 
990       if (TREE_CODE (retval) != SSA_NAME
991 	  || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
992 	DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
993 
994       if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
995 	return false;
996     }
997 
998   if (dump_file && (dump_flags & TDF_DETAILS))
999     fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1000 	     IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1001   return true;
1002 }
1003 
1004 #undef DUMP_AND_RETURN
1005 
1006 /* This is the main routine for finding the reference patterns for
1007    global variables within a function FN.  */
1008 
1009 static funct_state
analyze_function(struct cgraph_node * fn,bool ipa)1010 analyze_function (struct cgraph_node *fn, bool ipa)
1011 {
1012   tree decl = fn->decl;
1013   funct_state l;
1014   basic_block this_block;
1015 
1016   l = XCNEW (struct funct_state_d);
1017   l->pure_const_state = IPA_CONST;
1018   l->state_previously_known = IPA_NEITHER;
1019   l->looping_previously_known = true;
1020   l->looping = false;
1021   l->can_throw = false;
1022   l->can_free = false;
1023   state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1024 		    flags_from_decl_or_type (fn->decl),
1025 		    fn->cannot_return_p ());
1026 
1027   if (fn->thunk.thunk_p || fn->alias)
1028     {
1029       /* Thunk gets propagated through, so nothing interesting happens.  */
1030       gcc_assert (ipa);
1031       if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
1032 	l->pure_const_state = IPA_NEITHER;
1033       return l;
1034     }
1035 
1036   if (dump_file)
1037     {
1038       fprintf (dump_file, "\n\n local analysis of %s\n ",
1039 	       fn->name ());
1040     }
1041 
1042   push_cfun (DECL_STRUCT_FUNCTION (decl));
1043 
1044   FOR_EACH_BB_FN (this_block, cfun)
1045     {
1046       gimple_stmt_iterator gsi;
1047       struct walk_stmt_info wi;
1048 
1049       memset (&wi, 0, sizeof (wi));
1050       for (gsi = gsi_start_bb (this_block);
1051 	   !gsi_end_p (gsi);
1052 	   gsi_next (&gsi))
1053 	{
1054 	  check_stmt (&gsi, l, ipa);
1055 	  if (l->pure_const_state == IPA_NEITHER
1056 	      && l->looping
1057 	      && l->can_throw
1058 	      && l->can_free)
1059 	    goto end;
1060 	}
1061     }
1062 
1063 end:
1064   if (l->pure_const_state != IPA_NEITHER)
1065     {
1066       /* Const functions cannot have back edges (an
1067 	 indication of possible infinite loop side
1068 	 effect.  */
1069       if (mark_dfs_back_edges ())
1070         {
1071 	  /* Preheaders are needed for SCEV to work.
1072 	     Simple latches and recorded exits improve chances that loop will
1073 	     proved to be finite in testcases such as in loop-15.c
1074 	     and loop-24.c  */
1075 	  loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1076 			       | LOOPS_HAVE_SIMPLE_LATCHES
1077 			       | LOOPS_HAVE_RECORDED_EXITS);
1078 	  if (dump_file && (dump_flags & TDF_DETAILS))
1079 	    flow_loops_dump (dump_file, NULL, 0);
1080 	  if (mark_irreducible_loops ())
1081 	    {
1082 	      if (dump_file)
1083 	        fprintf (dump_file, "    has irreducible loops\n");
1084 	      l->looping = true;
1085 	    }
1086 	  else
1087 	    {
1088 	      struct loop *loop;
1089 	      scev_initialize ();
1090 	      FOR_EACH_LOOP (loop, 0)
1091 		if (!finite_loop_p (loop))
1092 		  {
1093 		    if (dump_file)
1094 		      fprintf (dump_file, "    cannot prove finiteness of "
1095 			       "loop %i\n", loop->num);
1096 		    l->looping =true;
1097 		    break;
1098 		  }
1099 	      scev_finalize ();
1100 	    }
1101           loop_optimizer_finalize ();
1102 	}
1103     }
1104 
1105   if (dump_file && (dump_flags & TDF_DETAILS))
1106     fprintf (dump_file, "    checking previously known:");
1107 
1108   better_state (&l->pure_const_state, &l->looping,
1109 		l->state_previously_known,
1110 		l->looping_previously_known);
1111   if (TREE_NOTHROW (decl))
1112     l->can_throw = false;
1113 
1114   l->malloc_state = STATE_MALLOC_BOTTOM;
1115   if (DECL_IS_MALLOC (decl))
1116     l->malloc_state = STATE_MALLOC;
1117   else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1118     l->malloc_state = STATE_MALLOC_TOP;
1119   else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1120     l->malloc_state = STATE_MALLOC;
1121 
1122   pop_cfun ();
1123   if (dump_file)
1124     {
1125       if (l->looping)
1126         fprintf (dump_file, "Function is locally looping.\n");
1127       if (l->can_throw)
1128         fprintf (dump_file, "Function is locally throwing.\n");
1129       if (l->pure_const_state == IPA_CONST)
1130         fprintf (dump_file, "Function is locally const.\n");
1131       if (l->pure_const_state == IPA_PURE)
1132         fprintf (dump_file, "Function is locally pure.\n");
1133       if (l->can_free)
1134 	fprintf (dump_file, "Function can locally free.\n");
1135       if (l->malloc_state == STATE_MALLOC)
1136 	fprintf (dump_file, "Function is locally malloc.\n");
1137     }
1138   return l;
1139 }
1140 
1141 void
insert(cgraph_node * node,funct_state_d * state)1142 funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1143 {
1144   /* There are some shared nodes, in particular the initializers on
1145      static declarations.  We do not need to scan them more than once
1146      since all we would be interested in are the addressof
1147      operations.  */
1148   if (opt_for_fn (node->decl, flag_ipa_pure_const))
1149     {
1150       funct_state_d *a = analyze_function (node, true);
1151       new (state) funct_state_d (*a);
1152       free (a);
1153     }
1154 }
1155 
1156 /* Called when new clone is inserted to callgraph late.  */
1157 
1158 void
duplicate(cgraph_node *,cgraph_node *,funct_state_d * src_data,funct_state_d * dst_data)1159 funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *,
1160 				  funct_state_d *src_data,
1161 				  funct_state_d *dst_data)
1162 {
1163   new (dst_data) funct_state_d (*src_data);
1164 }
1165 
1166 
1167 void
1168 pass_ipa_pure_const::
register_hooks(void)1169 register_hooks (void)
1170 {
1171   if (init_p)
1172     return;
1173 
1174   init_p = true;
1175 
1176   funct_state_summaries = new funct_state_summary_t (symtab);
1177 }
1178 
1179 
1180 /* Analyze each function in the cgraph to see if it is locally PURE or
1181    CONST.  */
1182 
1183 static void
pure_const_generate_summary(void)1184 pure_const_generate_summary (void)
1185 {
1186   struct cgraph_node *node;
1187 
1188   pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1189   pass->register_hooks ();
1190 
1191   /* Process all of the functions.
1192 
1193      We process AVAIL_INTERPOSABLE functions.  We cannot use the results
1194      by default, but the info can be used at LTO with -fwhole-program or
1195      when function got cloned and the clone is AVAILABLE.  */
1196 
1197   FOR_EACH_DEFINED_FUNCTION (node)
1198     if (opt_for_fn (node->decl, flag_ipa_pure_const))
1199       {
1200 	funct_state_d *a = analyze_function (node, true);
1201 	new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1202 	free (a);
1203       }
1204 }
1205 
1206 
1207 /* Serialize the ipa info for lto.  */
1208 
1209 static void
pure_const_write_summary(void)1210 pure_const_write_summary (void)
1211 {
1212   struct cgraph_node *node;
1213   struct lto_simple_output_block *ob
1214     = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1215   unsigned int count = 0;
1216   lto_symtab_encoder_iterator lsei;
1217   lto_symtab_encoder_t encoder;
1218 
1219   encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1220 
1221   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1222        lsei_next_function_in_partition (&lsei))
1223     {
1224       node = lsei_cgraph_node (lsei);
1225       if (node->definition && funct_state_summaries->exists (node))
1226 	count++;
1227     }
1228 
1229   streamer_write_uhwi_stream (ob->main_stream, count);
1230 
1231   /* Process all of the functions.  */
1232   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1233        lsei_next_function_in_partition (&lsei))
1234     {
1235       node = lsei_cgraph_node (lsei);
1236       funct_state_d *fs = funct_state_summaries->get (node);
1237       if (node->definition && fs != NULL)
1238 	{
1239 	  struct bitpack_d bp;
1240 	  int node_ref;
1241 	  lto_symtab_encoder_t encoder;
1242 
1243 	  encoder = ob->decl_state->symtab_node_encoder;
1244 	  node_ref = lto_symtab_encoder_encode (encoder, node);
1245 	  streamer_write_uhwi_stream (ob->main_stream, node_ref);
1246 
1247 	  /* Note that flags will need to be read in the opposite
1248 	     order as we are pushing the bitflags into FLAGS.  */
1249 	  bp = bitpack_create (ob->main_stream);
1250 	  bp_pack_value (&bp, fs->pure_const_state, 2);
1251 	  bp_pack_value (&bp, fs->state_previously_known, 2);
1252 	  bp_pack_value (&bp, fs->looping_previously_known, 1);
1253 	  bp_pack_value (&bp, fs->looping, 1);
1254 	  bp_pack_value (&bp, fs->can_throw, 1);
1255 	  bp_pack_value (&bp, fs->can_free, 1);
1256 	  bp_pack_value (&bp, fs->malloc_state, 2);
1257 	  streamer_write_bitpack (&bp);
1258 	}
1259     }
1260 
1261   lto_destroy_simple_output_block (ob);
1262 }
1263 
1264 
1265 /* Deserialize the ipa info for lto.  */
1266 
1267 static void
pure_const_read_summary(void)1268 pure_const_read_summary (void)
1269 {
1270   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1271   struct lto_file_decl_data *file_data;
1272   unsigned int j = 0;
1273 
1274   pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1275   pass->register_hooks ();
1276 
1277   while ((file_data = file_data_vec[j++]))
1278     {
1279       const char *data;
1280       size_t len;
1281       struct lto_input_block *ib
1282 	= lto_create_simple_input_block (file_data,
1283 					 LTO_section_ipa_pure_const,
1284 					 &data, &len);
1285       if (ib)
1286 	{
1287 	  unsigned int i;
1288 	  unsigned int count = streamer_read_uhwi (ib);
1289 
1290 	  for (i = 0; i < count; i++)
1291 	    {
1292 	      unsigned int index;
1293 	      struct cgraph_node *node;
1294 	      struct bitpack_d bp;
1295 	      funct_state fs;
1296 	      lto_symtab_encoder_t encoder;
1297 
1298 	      index = streamer_read_uhwi (ib);
1299 	      encoder = file_data->symtab_node_encoder;
1300 	      node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1301 									index));
1302 
1303 	      fs = funct_state_summaries->get_create (node);
1304 	      /* Note that the flags must be read in the opposite
1305 		 order in which they were written (the bitflags were
1306 		 pushed into FLAGS).  */
1307 	      bp = streamer_read_bitpack (ib);
1308 	      fs->pure_const_state
1309 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1310 	      fs->state_previously_known
1311 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1312 	      fs->looping_previously_known = bp_unpack_value (&bp, 1);
1313 	      fs->looping = bp_unpack_value (&bp, 1);
1314 	      fs->can_throw = bp_unpack_value (&bp, 1);
1315 	      fs->can_free = bp_unpack_value (&bp, 1);
1316 	      fs->malloc_state
1317 			= (enum malloc_state_e) bp_unpack_value (&bp, 2);
1318 
1319 	      if (dump_file)
1320 		{
1321 		  int flags = flags_from_decl_or_type (node->decl);
1322 		  fprintf (dump_file, "Read info for %s ", node->dump_name ());
1323 		  if (flags & ECF_CONST)
1324 		    fprintf (dump_file, " const");
1325 		  if (flags & ECF_PURE)
1326 		    fprintf (dump_file, " pure");
1327 		  if (flags & ECF_NOTHROW)
1328 		    fprintf (dump_file, " nothrow");
1329 		  fprintf (dump_file, "\n  pure const state: %s\n",
1330 			   pure_const_names[fs->pure_const_state]);
1331 		  fprintf (dump_file, "  previously known state: %s\n",
1332 			   pure_const_names[fs->state_previously_known]);
1333 		  if (fs->looping)
1334 		    fprintf (dump_file,"  function is locally looping\n");
1335 		  if (fs->looping_previously_known)
1336 		    fprintf (dump_file,"  function is previously known looping\n");
1337 		  if (fs->can_throw)
1338 		    fprintf (dump_file,"  function is locally throwing\n");
1339 		  if (fs->can_free)
1340 		    fprintf (dump_file,"  function can locally free\n");
1341 		  fprintf (dump_file, "\n malloc state: %s\n",
1342 			   malloc_state_names[fs->malloc_state]);
1343 		}
1344 	    }
1345 
1346 	  lto_destroy_simple_input_block (file_data,
1347 					  LTO_section_ipa_pure_const,
1348 					  ib, data, len);
1349 	}
1350     }
1351 }
1352 
1353 /* We only propagate across edges that can throw externally and their callee
1354    is not interposable.  */
1355 
1356 static bool
ignore_edge_for_nothrow(struct cgraph_edge * e)1357 ignore_edge_for_nothrow (struct cgraph_edge *e)
1358 {
1359   if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1360     return true;
1361 
1362   enum availability avail;
1363   cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
1364 							        e->caller);
1365   if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
1366     return true;
1367   return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1368 	 && !e->callee->binds_to_current_def_p (e->caller);
1369 }
1370 
1371 /* Return true if NODE is self recursive function.
1372    Indirectly recursive functions appears as non-trivial strongly
1373    connected components, so we need to care about self recursion
1374    only.  */
1375 
1376 static bool
self_recursive_p(struct cgraph_node * node)1377 self_recursive_p (struct cgraph_node *node)
1378 {
1379   struct cgraph_edge *e;
1380   for (e = node->callees; e; e = e->next_callee)
1381     if (e->callee->function_symbol () == node)
1382       return true;
1383   return false;
1384 }
1385 
1386 /* Return true if N is cdtor that is not const or pure.  In this case we may
1387    need to remove unreachable function if it is marked const/pure.  */
1388 
1389 static bool
cdtor_p(cgraph_node * n,void *)1390 cdtor_p (cgraph_node *n, void *)
1391 {
1392   if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1393     return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1394 	    || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1395   return false;
1396 }
1397 
1398 /* We only propagate across edges with non-interposable callee.  */
1399 
1400 static bool
ignore_edge_for_pure_const(struct cgraph_edge * e)1401 ignore_edge_for_pure_const (struct cgraph_edge *e)
1402 {
1403   enum availability avail;
1404   e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1405   return (avail <= AVAIL_INTERPOSABLE);
1406 }
1407 
1408 
1409 /* Produce transitive closure over the callgraph and compute pure/const
1410    attributes.  */
1411 
1412 static bool
propagate_pure_const(void)1413 propagate_pure_const (void)
1414 {
1415   struct cgraph_node *node;
1416   struct cgraph_node *w;
1417   struct cgraph_node **order =
1418     XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1419   int order_pos;
1420   int i;
1421   struct ipa_dfs_info * w_info;
1422   bool remove_p = false;
1423   bool has_cdtor;
1424 
1425   order_pos = ipa_reduced_postorder (order, true,
1426 				     ignore_edge_for_pure_const);
1427   if (dump_file)
1428     {
1429       cgraph_node::dump_cgraph (dump_file);
1430       ipa_print_order (dump_file, "reduced", order, order_pos);
1431     }
1432 
1433   /* Propagate the local information through the call graph to produce
1434      the global information.  All the nodes within a cycle will have
1435      the same info so we collapse cycles first.  Then we can do the
1436      propagation in one pass from the leaves to the roots.  */
1437   for (i = 0; i < order_pos; i++ )
1438     {
1439       enum pure_const_state_e pure_const_state = IPA_CONST;
1440       bool looping = false;
1441       int count = 0;
1442       node = order[i];
1443 
1444       if (node->alias)
1445 	continue;
1446 
1447       if (dump_file && (dump_flags & TDF_DETAILS))
1448 	fprintf (dump_file, "Starting cycle\n");
1449 
1450       /* Find the worst state for any node in the cycle.  */
1451       w = node;
1452       while (w && pure_const_state != IPA_NEITHER)
1453 	{
1454 	  struct cgraph_edge *e;
1455 	  struct cgraph_edge *ie;
1456 	  int i;
1457 	  struct ipa_ref *ref = NULL;
1458 
1459 	  funct_state w_l = funct_state_summaries->get_create (w);
1460 	  if (dump_file && (dump_flags & TDF_DETAILS))
1461 	    fprintf (dump_file, "  Visiting %s state:%s looping %i\n",
1462 		     w->dump_name (),
1463 		     pure_const_names[w_l->pure_const_state],
1464 		     w_l->looping);
1465 
1466 	  /* First merge in function body properties.
1467 	     We are safe to pass NULL as FROM and TO because we will take care
1468 	     of possible interposition when walking callees.  */
1469 	  worse_state (&pure_const_state, &looping,
1470 		       w_l->pure_const_state, w_l->looping,
1471 		       NULL, NULL);
1472 	  if (pure_const_state == IPA_NEITHER)
1473 	    break;
1474 
1475 	  count++;
1476 
1477 	  /* We consider recursive cycles as possibly infinite.
1478 	     This might be relaxed since infinite recursion leads to stack
1479 	     overflow.  */
1480 	  if (count > 1)
1481 	    looping = true;
1482 
1483 	  /* Now walk the edges and merge in callee properties.  */
1484 	  for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1485 	       e = e->next_callee)
1486 	    {
1487 	      enum availability avail;
1488 	      struct cgraph_node *y = e->callee->
1489 				function_or_virtual_thunk_symbol (&avail,
1490 								  e->caller);
1491 	      enum pure_const_state_e edge_state = IPA_CONST;
1492 	      bool edge_looping = false;
1493 
1494 	      if (dump_file && (dump_flags & TDF_DETAILS))
1495 		{
1496 		  fprintf (dump_file, "    Call to %s",
1497 			   e->callee->dump_name ());
1498 		}
1499 	      if (avail > AVAIL_INTERPOSABLE)
1500 		{
1501 		  funct_state y_l = funct_state_summaries->get_create (y);
1502 
1503 		  if (dump_file && (dump_flags & TDF_DETAILS))
1504 		    {
1505 		      fprintf (dump_file,
1506 			       " state:%s looping:%i\n",
1507 			       pure_const_names[y_l->pure_const_state],
1508 			       y_l->looping);
1509 		    }
1510 		  if (y_l->pure_const_state > IPA_PURE
1511 		      && e->cannot_lead_to_return_p ())
1512 		    {
1513 		      if (dump_file && (dump_flags & TDF_DETAILS))
1514 			fprintf (dump_file,
1515 				 "        Ignoring side effects"
1516 				 " -> pure, looping\n");
1517 		      edge_state = IPA_PURE;
1518 		      edge_looping = true;
1519 		    }
1520 		  else
1521 		    {
1522 		      edge_state = y_l->pure_const_state;
1523 		      edge_looping = y_l->looping;
1524 		    }
1525 		}
1526 	      else if (special_builtin_state (&edge_state, &edge_looping,
1527 					       y->decl))
1528 		;
1529 	      else
1530 		state_from_flags (&edge_state, &edge_looping,
1531 				  flags_from_decl_or_type (y->decl),
1532 				  e->cannot_lead_to_return_p ());
1533 
1534 	      /* Merge the results with what we already know.  */
1535 	      better_state (&edge_state, &edge_looping,
1536 			    w_l->state_previously_known,
1537 			    w_l->looping_previously_known);
1538 	      worse_state (&pure_const_state, &looping,
1539 			   edge_state, edge_looping, e->caller, e->callee);
1540 	      if (pure_const_state == IPA_NEITHER)
1541 	        break;
1542 	    }
1543 
1544 	  /* Now process the indirect call.  */
1545           for (ie = w->indirect_calls;
1546 	       ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1547 	    {
1548 	      enum pure_const_state_e edge_state = IPA_CONST;
1549 	      bool edge_looping = false;
1550 
1551 	      if (dump_file && (dump_flags & TDF_DETAILS))
1552 		fprintf (dump_file, "    Indirect call");
1553 	      state_from_flags (&edge_state, &edge_looping,
1554 			        ie->indirect_info->ecf_flags,
1555 				ie->cannot_lead_to_return_p ());
1556 	      /* Merge the results with what we already know.  */
1557 	      better_state (&edge_state, &edge_looping,
1558 			    w_l->state_previously_known,
1559 			    w_l->looping_previously_known);
1560 	      worse_state (&pure_const_state, &looping,
1561 			   edge_state, edge_looping, NULL, NULL);
1562 	      if (pure_const_state == IPA_NEITHER)
1563 	        break;
1564 	    }
1565 
1566 	  /* And finally all loads and stores.  */
1567 	  for (i = 0; w->iterate_reference (i, ref)
1568 	       && pure_const_state != IPA_NEITHER; i++)
1569 	    {
1570 	      enum pure_const_state_e ref_state = IPA_CONST;
1571 	      bool ref_looping = false;
1572 	      switch (ref->use)
1573 		{
1574 		case IPA_REF_LOAD:
1575 		  /* readonly reads are safe.  */
1576 		  if (TREE_READONLY (ref->referred->decl))
1577 		    break;
1578 		  if (dump_file && (dump_flags & TDF_DETAILS))
1579 		    fprintf (dump_file, "    nonreadonly global var read\n");
1580 		  ref_state = IPA_PURE;
1581 		  break;
1582 		case IPA_REF_STORE:
1583 		  if (ref->cannot_lead_to_return ())
1584 		    break;
1585 		  ref_state = IPA_NEITHER;
1586 		  if (dump_file && (dump_flags & TDF_DETAILS))
1587 		    fprintf (dump_file, "    global var write\n");
1588 		  break;
1589 		case IPA_REF_ADDR:
1590 		  break;
1591 		default:
1592 		  gcc_unreachable ();
1593 		}
1594 	      better_state (&ref_state, &ref_looping,
1595 			    w_l->state_previously_known,
1596 			    w_l->looping_previously_known);
1597 	      worse_state (&pure_const_state, &looping,
1598 			   ref_state, ref_looping, NULL, NULL);
1599 	      if (pure_const_state == IPA_NEITHER)
1600 		break;
1601 	    }
1602 	  w_info = (struct ipa_dfs_info *) w->aux;
1603 	  w = w_info->next_cycle;
1604 	}
1605       if (dump_file && (dump_flags & TDF_DETAILS))
1606 	fprintf (dump_file, "Result %s looping %i\n",
1607 		 pure_const_names [pure_const_state],
1608 		 looping);
1609 
1610       /* Find the worst state of can_free for any node in the cycle.  */
1611       bool can_free = false;
1612       w = node;
1613       while (w && !can_free)
1614 	{
1615 	  struct cgraph_edge *e;
1616 	  funct_state w_l = funct_state_summaries->get (w);
1617 
1618 	  if (w_l->can_free
1619 	      || w->get_availability () == AVAIL_INTERPOSABLE
1620 	      || w->indirect_calls)
1621 	    can_free = true;
1622 
1623 	  for (e = w->callees; e && !can_free; e = e->next_callee)
1624 	    {
1625 	      enum availability avail;
1626 	      struct cgraph_node *y = e->callee->
1627 				function_or_virtual_thunk_symbol (&avail,
1628 								  e->caller);
1629 
1630 	      if (avail > AVAIL_INTERPOSABLE)
1631 		can_free = funct_state_summaries->get (y)->can_free;
1632 	      else
1633 		can_free = true;
1634 	    }
1635 	  w_info = (struct ipa_dfs_info *) w->aux;
1636 	  w = w_info->next_cycle;
1637 	}
1638 
1639       /* Copy back the region's pure_const_state which is shared by
1640 	 all nodes in the region.  */
1641       w = node;
1642       while (w)
1643 	{
1644 	  funct_state w_l = funct_state_summaries->get (w);
1645 	  enum pure_const_state_e this_state = pure_const_state;
1646 	  bool this_looping = looping;
1647 
1648 	  w_l->can_free = can_free;
1649 	  w->nonfreeing_fn = !can_free;
1650 	  if (!can_free && dump_file)
1651 	    fprintf (dump_file, "Function found not to call free: %s\n",
1652 		     w->name ());
1653 
1654 	  if (w_l->state_previously_known != IPA_NEITHER
1655 	      && this_state > w_l->state_previously_known)
1656 	    {
1657               this_state = w_l->state_previously_known;
1658 	      if (this_state == IPA_NEITHER)
1659 	        this_looping = w_l->looping_previously_known;
1660 	    }
1661 	  if (!this_looping && self_recursive_p (w))
1662 	    this_looping = true;
1663 	  if (!w_l->looping_previously_known)
1664 	    this_looping = false;
1665 
1666 	  /* All nodes within a cycle share the same info.  */
1667 	  w_l->pure_const_state = this_state;
1668 	  w_l->looping = this_looping;
1669 
1670 	  /* Inline clones share declaration with their offline copies;
1671 	     do not modify their declarations since the offline copy may
1672 	     be different.  */
1673 	  if (!w->global.inlined_to)
1674 	    switch (this_state)
1675 	      {
1676 	      case IPA_CONST:
1677 		if (!TREE_READONLY (w->decl))
1678 		  {
1679 		    warn_function_const (w->decl, !this_looping);
1680 		    if (dump_file)
1681 		      fprintf (dump_file, "Function found to be %sconst: %s\n",
1682 			       this_looping ? "looping " : "",
1683 			       w->name ());
1684 		  }
1685 		/* Turning constructor or destructor to non-looping const/pure
1686 		   enables us to possibly remove the function completely.  */
1687 		if (this_looping)
1688 		  has_cdtor = false;
1689 		else
1690 		  has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1691 							      NULL, true);
1692 		if (w->set_const_flag (true, this_looping))
1693 		  {
1694 		    if (dump_file)
1695 		      fprintf (dump_file,
1696 			       "Declaration updated to be %sconst: %s\n",
1697 			       this_looping ? "looping " : "",
1698 			       w->name ());
1699 		    remove_p |= has_cdtor;
1700 		  }
1701 		break;
1702 
1703 	      case IPA_PURE:
1704 		if (!DECL_PURE_P (w->decl))
1705 		  {
1706 		    warn_function_pure (w->decl, !this_looping);
1707 		    if (dump_file)
1708 		      fprintf (dump_file, "Function found to be %spure: %s\n",
1709 			       this_looping ? "looping " : "",
1710 			       w->name ());
1711 		  }
1712 		if (this_looping)
1713 		  has_cdtor = false;
1714 		else
1715 		  has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1716 							      NULL, true);
1717 		if (w->set_pure_flag (true, this_looping))
1718 		  {
1719 		    if (dump_file)
1720 		      fprintf (dump_file,
1721 			       "Declaration updated to be %spure: %s\n",
1722 			       this_looping ? "looping " : "",
1723 			       w->name ());
1724 		    remove_p |= has_cdtor;
1725 		  }
1726 		break;
1727 
1728 	      default:
1729 		break;
1730 	      }
1731 	  w_info = (struct ipa_dfs_info *) w->aux;
1732 	  w = w_info->next_cycle;
1733 	}
1734     }
1735 
1736   ipa_free_postorder_info ();
1737   free (order);
1738   return remove_p;
1739 }
1740 
1741 /* Produce transitive closure over the callgraph and compute nothrow
1742    attributes.  */
1743 
1744 static void
propagate_nothrow(void)1745 propagate_nothrow (void)
1746 {
1747   struct cgraph_node *node;
1748   struct cgraph_node *w;
1749   struct cgraph_node **order =
1750     XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1751   int order_pos;
1752   int i;
1753   struct ipa_dfs_info * w_info;
1754 
1755   order_pos = ipa_reduced_postorder (order, true,
1756 				     ignore_edge_for_nothrow);
1757   if (dump_file)
1758     {
1759       cgraph_node::dump_cgraph (dump_file);
1760       ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1761     }
1762 
1763   /* Propagate the local information through the call graph to produce
1764      the global information.  All the nodes within a cycle will have
1765      the same info so we collapse cycles first.  Then we can do the
1766      propagation in one pass from the leaves to the roots.  */
1767   for (i = 0; i < order_pos; i++ )
1768     {
1769       bool can_throw = false;
1770       node = order[i];
1771 
1772       if (node->alias)
1773 	continue;
1774 
1775       /* Find the worst state for any node in the cycle.  */
1776       w = node;
1777       while (w && !can_throw)
1778 	{
1779 	  struct cgraph_edge *e, *ie;
1780 
1781 	  if (!TREE_NOTHROW (w->decl))
1782 	    {
1783 	      funct_state w_l = funct_state_summaries->get_create (w);
1784 
1785 	      if (w_l->can_throw
1786 		  || w->get_availability () == AVAIL_INTERPOSABLE)
1787 		can_throw = true;
1788 
1789 	      for (e = w->callees; e && !can_throw; e = e->next_callee)
1790 		{
1791 		  enum availability avail;
1792 
1793 		  if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1794 		    continue;
1795 
1796 		  struct cgraph_node *y = e->callee->
1797 				   function_or_virtual_thunk_symbol (&avail,
1798 								     e->caller);
1799 
1800 		  /* We can use info about the callee only if we know it
1801 		     cannot be interposed.
1802 		     When callee is compiled with non-call exceptions we also
1803 		     must check that the declaration is bound to current
1804 		     body as other semantically equivalent body may still
1805 		     throw.  */
1806 		  if (avail <= AVAIL_INTERPOSABLE
1807 		      || (!TREE_NOTHROW (y->decl)
1808 			  && (funct_state_summaries->get_create (y)->can_throw
1809 			      || (opt_for_fn (y->decl, flag_non_call_exceptions)
1810 				  && !e->callee->binds_to_current_def_p (w)))))
1811 		    can_throw = true;
1812 		}
1813 	      for (ie = w->indirect_calls; ie && !can_throw;
1814 		   ie = ie->next_callee)
1815 		if (ie->can_throw_external
1816 		    && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1817 		  can_throw = true;
1818 	    }
1819 	  w_info = (struct ipa_dfs_info *) w->aux;
1820 	  w = w_info->next_cycle;
1821 	}
1822 
1823       /* Copy back the region's pure_const_state which is shared by
1824 	 all nodes in the region.  */
1825       w = node;
1826       while (w)
1827 	{
1828 	  funct_state w_l = funct_state_summaries->get_create (w);
1829 	  if (!can_throw && !TREE_NOTHROW (w->decl))
1830 	    {
1831 	      /* Inline clones share declaration with their offline copies;
1832 		 do not modify their declarations since the offline copy may
1833 		 be different.  */
1834 	      if (!w->global.inlined_to)
1835 		{
1836 		  w->set_nothrow_flag (true);
1837 		  if (dump_file)
1838 		    fprintf (dump_file, "Function found to be nothrow: %s\n",
1839 			     w->name ());
1840 		}
1841 	    }
1842 	  else if (can_throw && !TREE_NOTHROW (w->decl))
1843 	    w_l->can_throw = true;
1844 	  w_info = (struct ipa_dfs_info *) w->aux;
1845 	  w = w_info->next_cycle;
1846 	}
1847     }
1848 
1849   ipa_free_postorder_info ();
1850   free (order);
1851 }
1852 
1853 /* Debugging function to dump state of malloc lattice.  */
1854 
1855 DEBUG_FUNCTION
1856 static void
dump_malloc_lattice(FILE * dump_file,const char * s)1857 dump_malloc_lattice (FILE *dump_file, const char *s)
1858 {
1859   if (!dump_file)
1860     return;
1861 
1862   fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1863   cgraph_node *node;
1864   FOR_EACH_FUNCTION (node)
1865     {
1866       funct_state fs = funct_state_summaries->get (node);
1867       if (fs)
1868 	fprintf (dump_file, "%s: %s\n", node->name (),
1869 		 malloc_state_names[fs->malloc_state]);
1870     }
1871 }
1872 
1873 /* Propagate malloc attribute across the callgraph.  */
1874 
1875 static void
propagate_malloc(void)1876 propagate_malloc (void)
1877 {
1878   cgraph_node *node;
1879   FOR_EACH_FUNCTION (node)
1880     {
1881       if (DECL_IS_MALLOC (node->decl))
1882 	if (!funct_state_summaries->exists (node))
1883 	  {
1884 	    funct_state fs = funct_state_summaries->get_create (node);
1885 	    fs->malloc_state = STATE_MALLOC;
1886 	  }
1887     }
1888 
1889   dump_malloc_lattice (dump_file, "Initial");
1890   struct cgraph_node **order
1891     = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1892   int order_pos = ipa_reverse_postorder (order);
1893   bool changed = true;
1894 
1895   while (changed)
1896     {
1897       changed = false;
1898       /* Walk in postorder.  */
1899       for (int i = order_pos - 1; i >= 0; --i)
1900 	{
1901 	  cgraph_node *node = order[i];
1902 	  if (node->alias
1903 	      || !node->definition
1904 	      || !funct_state_summaries->exists (node))
1905 	    continue;
1906 
1907 	  funct_state l = funct_state_summaries->get (node);
1908 
1909 	  /* FIXME: add support for indirect-calls.  */
1910 	  if (node->indirect_calls)
1911 	    {
1912 	      l->malloc_state = STATE_MALLOC_BOTTOM;
1913 	      continue;
1914 	    }
1915 
1916 	  if (node->get_availability () <= AVAIL_INTERPOSABLE)
1917 	    {
1918 	      l->malloc_state = STATE_MALLOC_BOTTOM;
1919 	      continue;
1920 	    }
1921 
1922 	  if (l->malloc_state == STATE_MALLOC_BOTTOM)
1923 	    continue;
1924 
1925 	  vec<cgraph_node *> callees = vNULL;
1926 	  for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1927 	    {
1928 	      ipa_call_summary *es = ipa_call_summaries->get_create (cs);
1929 	      if (es && es->is_return_callee_uncaptured)
1930 		callees.safe_push (cs->callee);
1931 	    }
1932 
1933 	  malloc_state_e new_state = l->malloc_state;
1934 	  for (unsigned j = 0; j < callees.length (); j++)
1935 	    {
1936 	      cgraph_node *callee = callees[j];
1937 	      if (!funct_state_summaries->exists (node))
1938 		{
1939 		  new_state = STATE_MALLOC_BOTTOM;
1940 		  break;
1941 		}
1942 	      malloc_state_e callee_state
1943 		= funct_state_summaries->get_create (callee)->malloc_state;
1944 	      if (new_state < callee_state)
1945 		new_state = callee_state;
1946 	    }
1947 	  if (new_state != l->malloc_state)
1948 	    {
1949 	      changed = true;
1950 	      l->malloc_state = new_state;
1951 	    }
1952 	}
1953     }
1954 
1955   FOR_EACH_DEFINED_FUNCTION (node)
1956     if (funct_state_summaries->exists (node))
1957       {
1958 	funct_state l = funct_state_summaries->get (node);
1959 	if (!node->alias
1960 	    && l->malloc_state == STATE_MALLOC
1961 	    && !node->global.inlined_to)
1962 	  {
1963 	    if (dump_file && (dump_flags & TDF_DETAILS))
1964 	      fprintf (dump_file, "Function %s found to be malloc\n",
1965 		       node->name ());
1966 
1967 	    bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1968 	    node->set_malloc_flag (true);
1969 	    if (!malloc_decl_p && warn_suggest_attribute_malloc)
1970 		warn_function_malloc (node->decl);
1971 	  }
1972       }
1973 
1974   dump_malloc_lattice (dump_file, "after propagation");
1975   ipa_free_postorder_info ();
1976   free (order);
1977 }
1978 
1979 /* Produce the global information by preforming a transitive closure
1980    on the local information that was produced by generate_summary.  */
1981 
1982 unsigned int
1983 pass_ipa_pure_const::
execute(function *)1984 execute (function *)
1985 {
1986   bool remove_p;
1987 
1988   /* Nothrow makes more function to not lead to return and improve
1989      later analysis.  */
1990   propagate_nothrow ();
1991   propagate_malloc ();
1992   remove_p = propagate_pure_const ();
1993 
1994   delete funct_state_summaries;
1995   return remove_p ? TODO_remove_functions : 0;
1996 }
1997 
1998 static bool
gate_pure_const(void)1999 gate_pure_const (void)
2000 {
2001   return flag_ipa_pure_const || in_lto_p;
2002 }
2003 
pass_ipa_pure_const(gcc::context * ctxt)2004 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2005     : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2006 		     pure_const_generate_summary, /* generate_summary */
2007 		     pure_const_write_summary, /* write_summary */
2008 		     pure_const_read_summary, /* read_summary */
2009 		     NULL, /* write_optimization_summary */
2010 		     NULL, /* read_optimization_summary */
2011 		     NULL, /* stmt_fixup */
2012 		     0, /* function_transform_todo_flags_start */
2013 		     NULL, /* function_transform */
2014 		     NULL), /* variable_transform */
2015   init_p (false) {}
2016 
2017 ipa_opt_pass_d *
make_pass_ipa_pure_const(gcc::context * ctxt)2018 make_pass_ipa_pure_const (gcc::context *ctxt)
2019 {
2020   return new pass_ipa_pure_const (ctxt);
2021 }
2022 
2023 /* Return true if function should be skipped for local pure const analysis.  */
2024 
2025 static bool
skip_function_for_local_pure_const(struct cgraph_node * node)2026 skip_function_for_local_pure_const (struct cgraph_node *node)
2027 {
2028   /* Because we do not schedule pass_fixup_cfg over whole program after early
2029      optimizations we must not promote functions that are called by already
2030      processed functions.  */
2031 
2032   if (function_called_by_processed_nodes_p ())
2033     {
2034       if (dump_file)
2035         fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2036       return true;
2037     }
2038   /* Save some work and do not analyze functions which are interposable and
2039      do not have any non-interposable aliases.  */
2040   if (node->get_availability () <= AVAIL_INTERPOSABLE
2041       && !node->has_aliases_p ())
2042     {
2043       if (dump_file)
2044         fprintf (dump_file,
2045 		 "Function is interposable; not analyzing.\n");
2046       return true;
2047     }
2048   return false;
2049 }
2050 
2051 /* Simple local pass for pure const discovery reusing the analysis from
2052    ipa_pure_const.   This pass is effective when executed together with
2053    other optimization passes in early optimization pass queue.  */
2054 
2055 namespace {
2056 
2057 const pass_data pass_data_local_pure_const =
2058 {
2059   GIMPLE_PASS, /* type */
2060   "local-pure-const", /* name */
2061   OPTGROUP_NONE, /* optinfo_flags */
2062   TV_IPA_PURE_CONST, /* tv_id */
2063   0, /* properties_required */
2064   0, /* properties_provided */
2065   0, /* properties_destroyed */
2066   0, /* todo_flags_start */
2067   0, /* todo_flags_finish */
2068 };
2069 
2070 class pass_local_pure_const : public gimple_opt_pass
2071 {
2072 public:
pass_local_pure_const(gcc::context * ctxt)2073   pass_local_pure_const (gcc::context *ctxt)
2074     : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2075   {}
2076 
2077   /* opt_pass methods: */
clone()2078   opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
gate(function *)2079   virtual bool gate (function *) { return gate_pure_const (); }
2080   virtual unsigned int execute (function *);
2081 
2082 }; // class pass_local_pure_const
2083 
2084 unsigned int
execute(function * fun)2085 pass_local_pure_const::execute (function *fun)
2086 {
2087   bool changed = false;
2088   funct_state l;
2089   bool skip;
2090   struct cgraph_node *node;
2091 
2092   node = cgraph_node::get (current_function_decl);
2093   skip = skip_function_for_local_pure_const (node);
2094 
2095   if (!warn_suggest_attribute_const
2096       && !warn_suggest_attribute_pure
2097       && skip)
2098     return 0;
2099 
2100   l = analyze_function (node, false);
2101 
2102   /* Do NORETURN discovery.  */
2103   if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2104       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2105     {
2106       warn_function_noreturn (fun->decl);
2107       if (dump_file)
2108 	fprintf (dump_file, "Function found to be noreturn: %s\n",
2109 		 current_function_name ());
2110 
2111       /* Update declaration and reduce profile to executed once.  */
2112       TREE_THIS_VOLATILE (current_function_decl) = 1;
2113       if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2114 	node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2115 
2116       changed = true;
2117     }
2118 
2119   switch (l->pure_const_state)
2120     {
2121     case IPA_CONST:
2122       if (!TREE_READONLY (current_function_decl))
2123 	{
2124 	  warn_function_const (current_function_decl, !l->looping);
2125 	  if (dump_file)
2126 	    fprintf (dump_file, "Function found to be %sconst: %s\n",
2127 		     l->looping ? "looping " : "",
2128 		     current_function_name ());
2129 	}
2130       else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2131 	       && !l->looping)
2132 	{
2133 	  if (dump_file)
2134 	    fprintf (dump_file, "Function found to be non-looping: %s\n",
2135 		     current_function_name ());
2136 	}
2137       if (!skip && node->set_const_flag (true, l->looping))
2138 	{
2139 	  if (dump_file)
2140 	    fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2141 		     l->looping ? "looping " : "",
2142 		     current_function_name ());
2143 	  changed = true;
2144 	}
2145       break;
2146 
2147     case IPA_PURE:
2148       if (!DECL_PURE_P (current_function_decl))
2149 	{
2150 	  warn_function_pure (current_function_decl, !l->looping);
2151 	  if (dump_file)
2152 	    fprintf (dump_file, "Function found to be %spure: %s\n",
2153 		     l->looping ? "looping " : "",
2154 		     current_function_name ());
2155 	}
2156       else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2157 	       && !l->looping)
2158 	{
2159 	  if (dump_file)
2160 	    fprintf (dump_file, "Function found to be non-looping: %s\n",
2161 		     current_function_name ());
2162 	}
2163       if (!skip && node->set_pure_flag (true, l->looping))
2164 	{
2165 	  if (dump_file)
2166 	    fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2167 		     l->looping ? "looping " : "",
2168 		     current_function_name ());
2169 	  changed = true;
2170 	}
2171       break;
2172 
2173     default:
2174       break;
2175     }
2176   if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2177     {
2178       node->set_nothrow_flag (true);
2179       changed = true;
2180       if (dump_file)
2181 	fprintf (dump_file, "Function found to be nothrow: %s\n",
2182 		 current_function_name ());
2183     }
2184 
2185   if (l->malloc_state == STATE_MALLOC
2186       && !DECL_IS_MALLOC (current_function_decl))
2187     {
2188       node->set_malloc_flag (true);
2189       if (warn_suggest_attribute_malloc)
2190 	warn_function_malloc (node->decl);
2191       changed = true;
2192       if (dump_file)
2193 	fprintf (dump_file, "Function found to be malloc: %s\n",
2194 		 node->name ());
2195     }
2196 
2197   free (l);
2198   if (changed)
2199     return execute_fixup_cfg ();
2200   else
2201     return 0;
2202 }
2203 
2204 } // anon namespace
2205 
2206 gimple_opt_pass *
make_pass_local_pure_const(gcc::context * ctxt)2207 make_pass_local_pure_const (gcc::context *ctxt)
2208 {
2209   return new pass_local_pure_const (ctxt);
2210 }
2211 
2212 /* Emit noreturn warnings.  */
2213 
2214 namespace {
2215 
2216 const pass_data pass_data_warn_function_noreturn =
2217 {
2218   GIMPLE_PASS, /* type */
2219   "*warn_function_noreturn", /* name */
2220   OPTGROUP_NONE, /* optinfo_flags */
2221   TV_NONE, /* tv_id */
2222   PROP_cfg, /* properties_required */
2223   0, /* properties_provided */
2224   0, /* properties_destroyed */
2225   0, /* todo_flags_start */
2226   0, /* todo_flags_finish */
2227 };
2228 
2229 class pass_warn_function_noreturn : public gimple_opt_pass
2230 {
2231 public:
pass_warn_function_noreturn(gcc::context * ctxt)2232   pass_warn_function_noreturn (gcc::context *ctxt)
2233     : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2234   {}
2235 
2236   /* opt_pass methods: */
gate(function *)2237   virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
execute(function * fun)2238   virtual unsigned int execute (function *fun)
2239     {
2240       if (!TREE_THIS_VOLATILE (current_function_decl)
2241 	  && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2242 	warn_function_noreturn (current_function_decl);
2243       return 0;
2244     }
2245 
2246 }; // class pass_warn_function_noreturn
2247 
2248 } // anon namespace
2249 
2250 gimple_opt_pass *
make_pass_warn_function_noreturn(gcc::context * ctxt)2251 make_pass_warn_function_noreturn (gcc::context *ctxt)
2252 {
2253   return new pass_warn_function_noreturn (ctxt);
2254 }
2255 
2256 /* Simple local pass for pure const discovery reusing the analysis from
2257    ipa_pure_const.   This pass is effective when executed together with
2258    other optimization passes in early optimization pass queue.  */
2259 
2260 namespace {
2261 
2262 const pass_data pass_data_nothrow =
2263 {
2264   GIMPLE_PASS, /* type */
2265   "nothrow", /* name */
2266   OPTGROUP_NONE, /* optinfo_flags */
2267   TV_IPA_PURE_CONST, /* tv_id */
2268   0, /* properties_required */
2269   0, /* properties_provided */
2270   0, /* properties_destroyed */
2271   0, /* todo_flags_start */
2272   0, /* todo_flags_finish */
2273 };
2274 
2275 class pass_nothrow : public gimple_opt_pass
2276 {
2277 public:
pass_nothrow(gcc::context * ctxt)2278   pass_nothrow (gcc::context *ctxt)
2279     : gimple_opt_pass (pass_data_nothrow, ctxt)
2280   {}
2281 
2282   /* opt_pass methods: */
clone()2283   opt_pass * clone () { return new pass_nothrow (m_ctxt); }
gate(function *)2284   virtual bool gate (function *) { return optimize; }
2285   virtual unsigned int execute (function *);
2286 
2287 }; // class pass_nothrow
2288 
2289 unsigned int
execute(function *)2290 pass_nothrow::execute (function *)
2291 {
2292   struct cgraph_node *node;
2293   basic_block this_block;
2294 
2295   if (TREE_NOTHROW (current_function_decl))
2296     return 0;
2297 
2298   node = cgraph_node::get (current_function_decl);
2299 
2300   /* We run during lowering, we cannot really use availability yet.  */
2301   if (cgraph_node::get (current_function_decl)->get_availability ()
2302       <= AVAIL_INTERPOSABLE)
2303     {
2304       if (dump_file)
2305         fprintf (dump_file, "Function is interposable;"
2306 	         " not analyzing.\n");
2307       return true;
2308     }
2309 
2310   FOR_EACH_BB_FN (this_block, cfun)
2311     {
2312       for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2313 	   !gsi_end_p (gsi);
2314 	   gsi_next (&gsi))
2315         if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
2316 	  {
2317 	    if (is_gimple_call (gsi_stmt (gsi)))
2318 	      {
2319 		tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2320 		if (callee_t && recursive_call_p (current_function_decl,
2321 						  callee_t))
2322 		  continue;
2323 	      }
2324 
2325 	    if (dump_file)
2326 	      {
2327 		fprintf (dump_file, "Statement can throw: ");
2328 		print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2329 	      }
2330 	    return 0;
2331 	  }
2332     }
2333 
2334   node->set_nothrow_flag (true);
2335 
2336   bool cfg_changed = false;
2337   if (self_recursive_p (node))
2338     FOR_EACH_BB_FN (this_block, cfun)
2339       if (gimple *g = last_stmt (this_block))
2340 	if (is_gimple_call (g))
2341 	  {
2342 	    tree callee_t = gimple_call_fndecl (g);
2343 	    if (callee_t
2344 		&& recursive_call_p (current_function_decl, callee_t)
2345 		&& maybe_clean_eh_stmt (g)
2346 		&& gimple_purge_dead_eh_edges (this_block))
2347 	      cfg_changed = true;
2348 	  }
2349 
2350   if (dump_file)
2351     fprintf (dump_file, "Function found to be nothrow: %s\n",
2352 	     current_function_name ());
2353   return cfg_changed ? TODO_cleanup_cfg : 0;
2354 }
2355 
2356 } // anon namespace
2357 
2358 gimple_opt_pass *
make_pass_nothrow(gcc::context * ctxt)2359 make_pass_nothrow (gcc::context *ctxt)
2360 {
2361   return new pass_nothrow (ctxt);
2362 }
2363