1 /* Callgraph based analysis of static variables.
2    Copyright (C) 2004-2021 Free Software Foundation, Inc.
3    Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* This file marks functions as being either const (TREE_READONLY) or
22    pure (DECL_PURE_P).  It can also set a variant of these that
23    are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24 
25    This must be run after inlining decisions have been made since
26    otherwise, the local sets will not contain information that is
27    consistent with post inlined state.  The global sets are not prone
28    to this problem since they are by definition transitive.  */
29 
30 /* The code in this module is called by the ipa pass manager. It
31    should be one of the later passes since it's information is used by
32    the rest of the compilation. */
33 
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "ipa-prop.h"
63 #include "ipa-fnsummary.h"
64 #include "symtab-thunks.h"
65 
66 /* Lattice values for const and pure functions.  Everything starts out
67    being const, then may drop to pure and then neither depending on
68    what is found.  */
69 enum pure_const_state_e
70 {
71   IPA_CONST,
72   IPA_PURE,
73   IPA_NEITHER
74 };
75 
76 static const char *pure_const_names[3] = {"const", "pure", "neither"};
77 
78 enum malloc_state_e
79 {
80   STATE_MALLOC_TOP,
81   STATE_MALLOC,
82   STATE_MALLOC_BOTTOM
83 };
84 
85 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
86 
87 /* Holder for the const_state.  There is one of these per function
88    decl.  */
89 class funct_state_d
90 {
91 public:
funct_state_d()92   funct_state_d (): pure_const_state (IPA_NEITHER),
93     state_previously_known (IPA_NEITHER), looping_previously_known (true),
94     looping (true), can_throw (true), can_free (true),
95     malloc_state (STATE_MALLOC_BOTTOM) {}
96 
funct_state_d(const funct_state_d & s)97   funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
98     state_previously_known (s.state_previously_known),
99     looping_previously_known (s.looping_previously_known),
100     looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
101     malloc_state (s.malloc_state) {}
102 
103   /* See above.  */
104   enum pure_const_state_e pure_const_state;
105   /* What user set here; we can be always sure about this.  */
106   enum pure_const_state_e state_previously_known;
107   bool looping_previously_known;
108 
109   /* True if the function could possibly infinite loop.  There are a
110      lot of ways that this could be determined.  We are pretty
111      conservative here.  While it is possible to cse pure and const
112      calls, it is not legal to have dce get rid of the call if there
113      is a possibility that the call could infinite loop since this is
114      a behavioral change.  */
115   bool looping;
116 
117   bool can_throw;
118 
119   /* If function can call free, munmap or otherwise make previously
120      non-trapping memory accesses trapping.  */
121   bool can_free;
122 
123   enum malloc_state_e malloc_state;
124 };
125 
126 typedef class funct_state_d * funct_state;
127 
128 /* The storage of the funct_state is abstracted because there is the
129    possibility that it may be desirable to move this to the cgraph
130    local info.  */
131 
132 class funct_state_summary_t:
133   public fast_function_summary <funct_state_d *, va_heap>
134 {
135 public:
funct_state_summary_t(symbol_table * symtab)136   funct_state_summary_t (symbol_table *symtab):
137     fast_function_summary <funct_state_d *, va_heap> (symtab) {}
138 
139   virtual void insert (cgraph_node *, funct_state_d *state);
140   virtual void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
141 			  funct_state_d *src_data,
142 			  funct_state_d *dst_data);
143 };
144 
145 static funct_state_summary_t *funct_state_summaries = NULL;
146 
147 static bool gate_pure_const (void);
148 
149 namespace {
150 
151 const pass_data pass_data_ipa_pure_const =
152 {
153   IPA_PASS, /* type */
154   "pure-const", /* name */
155   OPTGROUP_NONE, /* optinfo_flags */
156   TV_IPA_PURE_CONST, /* tv_id */
157   0, /* properties_required */
158   0, /* properties_provided */
159   0, /* properties_destroyed */
160   0, /* todo_flags_start */
161   0, /* todo_flags_finish */
162 };
163 
164 class pass_ipa_pure_const : public ipa_opt_pass_d
165 {
166 public:
167   pass_ipa_pure_const(gcc::context *ctxt);
168 
169   /* opt_pass methods: */
gate(function *)170   bool gate (function *) { return gate_pure_const (); }
171   unsigned int execute (function *fun);
172 
173   void register_hooks (void);
174 
175 private:
176   bool init_p;
177 }; // class pass_ipa_pure_const
178 
179 } // anon namespace
180 
181 /* Try to guess if function body will always be visible to compiler
182    when compiling the call and whether compiler will be able
183    to propagate the information by itself.  */
184 
185 static bool
function_always_visible_to_compiler_p(tree decl)186 function_always_visible_to_compiler_p (tree decl)
187 {
188   return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
189 	  || DECL_COMDAT (decl));
190 }
191 
192 /* Emit suggestion about attribute ATTRIB_NAME for DECL.  KNOWN_FINITE
193    is true if the function is known to be finite.  The diagnostic is
194    controlled by OPTION.  WARNED_ABOUT is a hash_set<tree> unique for
195    OPTION, this function may initialize it and it is always returned
196    by the function.  */
197 
198 static hash_set<tree> *
suggest_attribute(int option,tree decl,bool known_finite,hash_set<tree> * warned_about,const char * attrib_name)199 suggest_attribute (int option, tree decl, bool known_finite,
200 		   hash_set<tree> *warned_about,
201 		   const char * attrib_name)
202 {
203   if (!option_enabled (option, lang_hooks.option_lang_mask (), &global_options))
204     return warned_about;
205   if (TREE_THIS_VOLATILE (decl)
206       || (known_finite && function_always_visible_to_compiler_p (decl)))
207     return warned_about;
208 
209   if (!warned_about)
210     warned_about = new hash_set<tree>;
211   if (warned_about->contains (decl))
212     return warned_about;
213   warned_about->add (decl);
214   warning_at (DECL_SOURCE_LOCATION (decl),
215 	      option,
216 	      known_finite
217 	      ? G_("function might be candidate for attribute %qs")
218 	      : G_("function might be candidate for attribute %qs"
219 		   " if it is known to return normally"), attrib_name);
220   return warned_about;
221 }
222 
223 /* Emit suggestion about __attribute_((pure)) for DECL.  KNOWN_FINITE
224    is true if the function is known to be finite.  */
225 
226 static void
warn_function_pure(tree decl,bool known_finite)227 warn_function_pure (tree decl, bool known_finite)
228 {
229   /* Declaring a void function pure makes no sense and is diagnosed
230      by -Wattributes because calling it would have no effect.  */
231   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
232     return;
233 
234   static hash_set<tree> *warned_about;
235   warned_about
236     = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
237 			 known_finite, warned_about, "pure");
238 }
239 
240 /* Emit suggestion about __attribute_((const)) for DECL.  KNOWN_FINITE
241    is true if the function is known to be finite.  */
242 
243 static void
warn_function_const(tree decl,bool known_finite)244 warn_function_const (tree decl, bool known_finite)
245 {
246   /* Declaring a void function const makes no sense is diagnosed
247      by -Wattributes because calling it would have no effect.  */
248   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
249     return;
250 
251   static hash_set<tree> *warned_about;
252   warned_about
253     = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
254 			 known_finite, warned_about, "const");
255 }
256 
257 /* Emit suggestion about __attribute__((malloc)) for DECL.  */
258 
259 static void
warn_function_malloc(tree decl)260 warn_function_malloc (tree decl)
261 {
262   static hash_set<tree> *warned_about;
263   warned_about
264     = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
265 			 true, warned_about, "malloc");
266 }
267 
268 /* Emit suggestion about __attribute__((noreturn)) for DECL.  */
269 
270 static void
warn_function_noreturn(tree decl)271 warn_function_noreturn (tree decl)
272 {
273   tree original_decl = decl;
274 
275   static hash_set<tree> *warned_about;
276   if (!lang_hooks.missing_noreturn_ok_p (decl)
277       && targetm.warn_func_return (decl))
278     warned_about
279       = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
280 			   true, warned_about, "noreturn");
281 }
282 
283 void
warn_function_cold(tree decl)284 warn_function_cold (tree decl)
285 {
286   tree original_decl = decl;
287 
288   static hash_set<tree> *warned_about;
289   warned_about
290     = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
291 			 true, warned_about, "cold");
292 }
293 
294 /* Check to see if the use (or definition when CHECKING_WRITE is true)
295    variable T is legal in a function that is either pure or const.  */
296 
297 static inline void
check_decl(funct_state local,tree t,bool checking_write,bool ipa)298 check_decl (funct_state local,
299 	    tree t, bool checking_write, bool ipa)
300 {
301   /* Do not want to do anything with volatile except mark any
302      function that uses one to be not const or pure.  */
303   if (TREE_THIS_VOLATILE (t))
304     {
305       local->pure_const_state = IPA_NEITHER;
306       if (dump_file)
307         fprintf (dump_file, "    Volatile operand is not const/pure\n");
308       return;
309     }
310 
311   /* Do not care about a local automatic that is not static.  */
312   if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
313     return;
314 
315   /* If the variable has the "used" attribute, treat it as if it had a
316      been touched by the devil.  */
317   if (DECL_PRESERVE_P (t))
318     {
319       local->pure_const_state = IPA_NEITHER;
320       if (dump_file)
321         fprintf (dump_file, "    Used static/global variable is not const/pure\n");
322       return;
323     }
324 
325   /* In IPA mode we are not interested in checking actual loads and stores;
326      they will be processed at propagation time using ipa_ref.  */
327   if (ipa)
328     return;
329 
330   /* Since we have dealt with the locals and params cases above, if we
331      are CHECKING_WRITE, this cannot be a pure or constant
332      function.  */
333   if (checking_write)
334     {
335       local->pure_const_state = IPA_NEITHER;
336       if (dump_file)
337         fprintf (dump_file, "    static/global memory write is not const/pure\n");
338       return;
339     }
340 
341   if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
342     {
343       /* Readonly reads are safe.  */
344       if (TREE_READONLY (t))
345 	return; /* Read of a constant, do not change the function state.  */
346       else
347 	{
348           if (dump_file)
349             fprintf (dump_file, "    global memory read is not const\n");
350 	  /* Just a regular read.  */
351 	  if (local->pure_const_state == IPA_CONST)
352 	    local->pure_const_state = IPA_PURE;
353 	}
354     }
355   else
356     {
357       /* Compilation level statics can be read if they are readonly
358 	 variables.  */
359       if (TREE_READONLY (t))
360 	return;
361 
362       if (dump_file)
363 	fprintf (dump_file, "    static memory read is not const\n");
364       /* Just a regular read.  */
365       if (local->pure_const_state == IPA_CONST)
366 	local->pure_const_state = IPA_PURE;
367     }
368 }
369 
370 
371 /* Check to see if the use (or definition when CHECKING_WRITE is true)
372    variable T is legal in a function that is either pure or const.  */
373 
374 static inline void
check_op(funct_state local,tree t,bool checking_write)375 check_op (funct_state local, tree t, bool checking_write)
376 {
377   t = get_base_address (t);
378   if (t && TREE_THIS_VOLATILE (t))
379     {
380       local->pure_const_state = IPA_NEITHER;
381       if (dump_file)
382 	fprintf (dump_file, "    Volatile indirect ref is not const/pure\n");
383       return;
384     }
385   else if (refs_local_or_readonly_memory_p (t))
386     {
387       if (dump_file)
388 	fprintf (dump_file, "    Indirect ref to local or readonly "
389 		 "memory is OK\n");
390       return;
391     }
392   else if (checking_write)
393     {
394       local->pure_const_state = IPA_NEITHER;
395       if (dump_file)
396 	fprintf (dump_file, "    Indirect ref write is not const/pure\n");
397       return;
398     }
399   else
400     {
401       if (dump_file)
402 	fprintf (dump_file, "    Indirect ref read is not const\n");
403       if (local->pure_const_state == IPA_CONST)
404 	local->pure_const_state = IPA_PURE;
405     }
406 }
407 
408 /* compute state based on ECF FLAGS and store to STATE and LOOPING.  */
409 
410 static void
state_from_flags(enum pure_const_state_e * state,bool * looping,int flags,bool cannot_lead_to_return)411 state_from_flags (enum pure_const_state_e *state, bool *looping,
412 	          int flags, bool cannot_lead_to_return)
413 {
414   *looping = false;
415   if (flags & ECF_LOOPING_CONST_OR_PURE)
416     {
417       *looping = true;
418       if (dump_file && (dump_flags & TDF_DETAILS))
419 	fprintf (dump_file, " looping\n");
420     }
421   if (flags & ECF_CONST)
422     {
423       *state = IPA_CONST;
424       if (dump_file && (dump_flags & TDF_DETAILS))
425 	fprintf (dump_file, " const\n");
426     }
427   else if (flags & ECF_PURE)
428     {
429       *state = IPA_PURE;
430       if (dump_file && (dump_flags & TDF_DETAILS))
431 	fprintf (dump_file, " pure\n");
432     }
433   else if (cannot_lead_to_return)
434     {
435       *state = IPA_PURE;
436       *looping = true;
437       if (dump_file && (dump_flags & TDF_DETAILS))
438 	fprintf (dump_file, " ignoring side effects->pure looping\n");
439     }
440   else
441     {
442       if (dump_file && (dump_flags & TDF_DETAILS))
443 	fprintf (dump_file, " neither\n");
444       *state = IPA_NEITHER;
445       *looping = true;
446     }
447 }
448 
449 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
450    into STATE and LOOPING better of the two variants.
451    Be sure to merge looping correctly.  IPA_NEITHER functions
452    have looping 0 even if they don't have to return.  */
453 
454 static inline void
better_state(enum pure_const_state_e * state,bool * looping,enum pure_const_state_e state2,bool looping2)455 better_state (enum pure_const_state_e *state, bool *looping,
456 	      enum pure_const_state_e state2, bool looping2)
457 {
458   if (state2 < *state)
459     {
460       if (*state == IPA_NEITHER)
461 	*looping = looping2;
462       else
463 	*looping = MIN (*looping, looping2);
464       *state = state2;
465     }
466   else if (state2 != IPA_NEITHER)
467     *looping = MIN (*looping, looping2);
468 }
469 
470 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
471    into STATE and LOOPING worse of the two variants.
472    N is the actual node called.  */
473 
474 static inline void
worse_state(enum pure_const_state_e * state,bool * looping,enum pure_const_state_e state2,bool looping2,struct symtab_node * from,struct symtab_node * to)475 worse_state (enum pure_const_state_e *state, bool *looping,
476 	     enum pure_const_state_e state2, bool looping2,
477 	     struct symtab_node *from,
478 	     struct symtab_node *to)
479 {
480   /* Consider function:
481 
482      bool a(int *p)
483      {
484        return *p==*p;
485      }
486 
487      During early optimization we will turn this into:
488 
489      bool a(int *p)
490      {
491        return true;
492      }
493 
494      Now if this function will be detected as CONST however when interposed it
495      may end up being just pure.  We always must assume the worst scenario here.
496    */
497   if (*state == IPA_CONST && state2 == IPA_CONST
498       && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
499     {
500       if (dump_file && (dump_flags & TDF_DETAILS))
501 	fprintf (dump_file, "Dropping state to PURE because call to %s may not "
502 		 "bind to current def.\n", to->dump_name ());
503       state2 = IPA_PURE;
504     }
505   *state = MAX (*state, state2);
506   *looping = MAX (*looping, looping2);
507 }
508 
509 /* Recognize special cases of builtins that are by themselves not pure or const
510    but function using them is.  */
511 static bool
special_builtin_state(enum pure_const_state_e * state,bool * looping,tree callee)512 special_builtin_state (enum pure_const_state_e *state, bool *looping,
513 		       tree callee)
514 {
515   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
516     switch (DECL_FUNCTION_CODE (callee))
517       {
518       case BUILT_IN_RETURN:
519       case BUILT_IN_UNREACHABLE:
520       CASE_BUILT_IN_ALLOCA:
521       case BUILT_IN_STACK_SAVE:
522       case BUILT_IN_STACK_RESTORE:
523       case BUILT_IN_EH_POINTER:
524       case BUILT_IN_EH_FILTER:
525       case BUILT_IN_UNWIND_RESUME:
526       case BUILT_IN_CXA_END_CLEANUP:
527       case BUILT_IN_EH_COPY_VALUES:
528       case BUILT_IN_FRAME_ADDRESS:
529       case BUILT_IN_APPLY_ARGS:
530       case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
531       case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
532 	*looping = false;
533 	*state = IPA_CONST;
534 	return true;
535       case BUILT_IN_PREFETCH:
536 	*looping = true;
537 	*state = IPA_CONST;
538 	return true;
539       default:
540 	break;
541       }
542   return false;
543 }
544 
545 /* Check the parameters of a function call to CALL_EXPR to see if
546    there are any references in the parameters that are not allowed for
547    pure or const functions.  Also check to see if this is either an
548    indirect call, a call outside the compilation unit, or has special
549    attributes that may also effect the purity.  The CALL_EXPR node for
550    the entire call expression.  */
551 
552 static void
check_call(funct_state local,gcall * call,bool ipa)553 check_call (funct_state local, gcall *call, bool ipa)
554 {
555   int flags = gimple_call_flags (call);
556   tree callee_t = gimple_call_fndecl (call);
557   bool possibly_throws = stmt_could_throw_p (cfun, call);
558   bool possibly_throws_externally = (possibly_throws
559   				     && stmt_can_throw_external (cfun, call));
560 
561   if (possibly_throws)
562     {
563       unsigned int i;
564       for (i = 0; i < gimple_num_ops (call); i++)
565         if (gimple_op (call, i)
566 	    && tree_could_throw_p (gimple_op (call, i)))
567 	  {
568 	    if (possibly_throws && cfun->can_throw_non_call_exceptions)
569 	      {
570 		if (dump_file)
571 		  fprintf (dump_file, "    operand can throw; looping\n");
572 		local->looping = true;
573 	      }
574 	    if (possibly_throws_externally)
575 	      {
576 		if (dump_file)
577 		  fprintf (dump_file, "    operand can throw externally\n");
578 		local->can_throw = true;
579 	      }
580 	  }
581     }
582 
583   /* The const and pure flags are set by a variety of places in the
584      compiler (including here).  If someone has already set the flags
585      for the callee, (such as for some of the builtins) we will use
586      them, otherwise we will compute our own information.
587 
588      Const and pure functions have less clobber effects than other
589      functions so we process these first.  Otherwise if it is a call
590      outside the compilation unit or an indirect call we punt.  This
591      leaves local calls which will be processed by following the call
592      graph.  */
593   if (callee_t)
594     {
595       enum pure_const_state_e call_state;
596       bool call_looping;
597 
598       if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
599 	  && !nonfreeing_call_p (call))
600 	local->can_free = true;
601 
602       if (special_builtin_state (&call_state, &call_looping, callee_t))
603 	{
604 	  worse_state (&local->pure_const_state, &local->looping,
605 		       call_state, call_looping,
606 		       NULL, NULL);
607 	  return;
608 	}
609       /* When bad things happen to bad functions, they cannot be const
610 	 or pure.  */
611       if (setjmp_call_p (callee_t))
612 	{
613 	  if (dump_file)
614 	    fprintf (dump_file, "    setjmp is not const/pure\n");
615           local->looping = true;
616 	  local->pure_const_state = IPA_NEITHER;
617 	}
618 
619       if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
620 	switch (DECL_FUNCTION_CODE (callee_t))
621 	  {
622 	  case BUILT_IN_LONGJMP:
623 	  case BUILT_IN_NONLOCAL_GOTO:
624 	    if (dump_file)
625 	      fprintf (dump_file,
626 		       "    longjmp and nonlocal goto is not const/pure\n");
627 	    local->pure_const_state = IPA_NEITHER;
628 	    local->looping = true;
629 	    break;
630 	  default:
631 	    break;
632 	  }
633     }
634   else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
635     local->can_free = true;
636 
637   /* When not in IPA mode, we can still handle self recursion.  */
638   if (!ipa && callee_t
639       && recursive_call_p (current_function_decl, callee_t))
640     {
641       if (dump_file)
642         fprintf (dump_file, "    Recursive call can loop.\n");
643       local->looping = true;
644     }
645   /* Either callee is unknown or we are doing local analysis.
646      Look to see if there are any bits available for the callee (such as by
647      declaration or because it is builtin) and process solely on the basis of
648      those bits.  Handle internal calls always, those calls don't have
649      corresponding cgraph edges and thus aren't processed during
650      the propagation.  */
651   else if (!ipa || gimple_call_internal_p (call))
652     {
653       enum pure_const_state_e call_state;
654       bool call_looping;
655       if (possibly_throws && cfun->can_throw_non_call_exceptions)
656         {
657 	  if (dump_file)
658 	    fprintf (dump_file, "    can throw; looping\n");
659           local->looping = true;
660 	}
661       if (possibly_throws_externally)
662         {
663 	  if (dump_file)
664 	    {
665 	      fprintf (dump_file, "    can throw externally to lp %i\n",
666 	      	       lookup_stmt_eh_lp (call));
667 	      if (callee_t)
668 		fprintf (dump_file, "     callee:%s\n",
669 			 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
670 	    }
671           local->can_throw = true;
672 	}
673       if (dump_file && (dump_flags & TDF_DETAILS))
674 	fprintf (dump_file, "    checking flags for call:");
675       state_from_flags (&call_state, &call_looping, flags,
676 			((flags & (ECF_NORETURN | ECF_NOTHROW))
677 			 == (ECF_NORETURN | ECF_NOTHROW))
678 			|| (!flag_exceptions && (flags & ECF_NORETURN)));
679       worse_state (&local->pure_const_state, &local->looping,
680 		   call_state, call_looping, NULL, NULL);
681     }
682   /* Direct functions calls are handled by IPA propagation.  */
683 }
684 
685 /* Wrapper around check_decl for loads in local more.  */
686 
687 static bool
check_load(gimple *,tree op,tree,void * data)688 check_load (gimple *, tree op, tree, void *data)
689 {
690   if (DECL_P (op))
691     check_decl ((funct_state)data, op, false, false);
692   else
693     check_op ((funct_state)data, op, false);
694   return false;
695 }
696 
697 /* Wrapper around check_decl for stores in local more.  */
698 
699 static bool
check_store(gimple *,tree op,tree,void * data)700 check_store (gimple *, tree op, tree, void *data)
701 {
702   if (DECL_P (op))
703     check_decl ((funct_state)data, op, true, false);
704   else
705     check_op ((funct_state)data, op, true);
706   return false;
707 }
708 
709 /* Wrapper around check_decl for loads in ipa mode.  */
710 
711 static bool
check_ipa_load(gimple *,tree op,tree,void * data)712 check_ipa_load (gimple *, tree op, tree, void *data)
713 {
714   if (DECL_P (op))
715     check_decl ((funct_state)data, op, false, true);
716   else
717     check_op ((funct_state)data, op, false);
718   return false;
719 }
720 
721 /* Wrapper around check_decl for stores in ipa mode.  */
722 
723 static bool
check_ipa_store(gimple *,tree op,tree,void * data)724 check_ipa_store (gimple *, tree op, tree, void *data)
725 {
726   if (DECL_P (op))
727     check_decl ((funct_state)data, op, true, true);
728   else
729     check_op ((funct_state)data, op, true);
730   return false;
731 }
732 
733 /* Look into pointer pointed to by GSIP and figure out what interesting side
734    effects it has.  */
735 static void
check_stmt(gimple_stmt_iterator * gsip,funct_state local,bool ipa)736 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
737 {
738   gimple *stmt = gsi_stmt (*gsip);
739 
740   if (is_gimple_debug (stmt))
741     return;
742 
743   /* Do consider clobber as side effects before IPA, so we rather inline
744      C++ destructors and keep clobber semantics than eliminate them.
745 
746      Similar logic is in ipa-modref.
747 
748      TODO: We may get smarter during early optimizations on these and let
749      functions containing only clobbers to be optimized more.  This is a common
750      case of C++ destructors.  */
751 
752   if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
753     return;
754 
755   if (dump_file)
756     {
757       fprintf (dump_file, "  scanning: ");
758       print_gimple_stmt (dump_file, stmt, 0);
759     }
760 
761   if (gimple_has_volatile_ops (stmt)
762       && !gimple_clobber_p (stmt))
763     {
764       local->pure_const_state = IPA_NEITHER;
765       if (dump_file)
766 	fprintf (dump_file, "    Volatile stmt is not const/pure\n");
767     }
768 
769   /* Look for loads and stores.  */
770   walk_stmt_load_store_ops (stmt, local,
771 			    ipa ? check_ipa_load : check_load,
772 			    ipa ? check_ipa_store :  check_store);
773 
774   if (gimple_code (stmt) != GIMPLE_CALL
775       && stmt_could_throw_p (cfun, stmt))
776     {
777       if (cfun->can_throw_non_call_exceptions)
778 	{
779 	  if (dump_file)
780 	    fprintf (dump_file, "    can throw; looping\n");
781 	  local->looping = true;
782 	}
783       if (stmt_can_throw_external (cfun, stmt))
784 	{
785 	  if (dump_file)
786 	    fprintf (dump_file, "    can throw externally\n");
787 	  local->can_throw = true;
788 	}
789       else
790 	if (dump_file)
791 	  fprintf (dump_file, "    can throw\n");
792     }
793   switch (gimple_code (stmt))
794     {
795     case GIMPLE_CALL:
796       check_call (local, as_a <gcall *> (stmt), ipa);
797       break;
798     case GIMPLE_LABEL:
799       if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
800 	/* Target of long jump. */
801 	{
802           if (dump_file)
803             fprintf (dump_file, "    nonlocal label is not const/pure\n");
804 	  local->pure_const_state = IPA_NEITHER;
805 	}
806       break;
807     case GIMPLE_ASM:
808       if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
809 	{
810 	  if (dump_file)
811 	    fprintf (dump_file, "    memory asm clobber is not const/pure\n");
812 	  /* Abandon all hope, ye who enter here. */
813 	  local->pure_const_state = IPA_NEITHER;
814 	  local->can_free = true;
815 	}
816       if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
817 	{
818 	  if (dump_file)
819 	    fprintf (dump_file, "    volatile is not const/pure\n");
820 	  /* Abandon all hope, ye who enter here. */
821 	  local->pure_const_state = IPA_NEITHER;
822 	  local->looping = true;
823 	  local->can_free = true;
824 	}
825       return;
826     default:
827       break;
828     }
829 }
830 
831 /* Check that RETVAL is used only in STMT and in comparisons against 0.
832    RETVAL is return value of the function and STMT is return stmt.  */
833 
834 static bool
check_retval_uses(tree retval,gimple * stmt)835 check_retval_uses (tree retval, gimple *stmt)
836 {
837   imm_use_iterator use_iter;
838   gimple *use_stmt;
839 
840   FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
841     if (gcond *cond = dyn_cast<gcond *> (use_stmt))
842       {
843 	tree op2 = gimple_cond_rhs (cond);
844 	if (!integer_zerop (op2))
845 	  return false;
846       }
847     else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
848       {
849 	enum tree_code code = gimple_assign_rhs_code (ga);
850 	if (TREE_CODE_CLASS (code) != tcc_comparison)
851 	  return false;
852 	if (!integer_zerop (gimple_assign_rhs2 (ga)))
853 	  return false;
854       }
855     else if (is_gimple_debug (use_stmt))
856       ;
857     else if (use_stmt != stmt)
858       return false;
859 
860   return true;
861 }
862 
863 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
864    attribute. Currently this function does a very conservative analysis.
865    FUN is considered to be a candidate if
866    1) It returns a value of pointer type.
867    2) SSA_NAME_DEF_STMT (return_value) is either a function call or
868       a phi, and element of phi is either NULL or
869       SSA_NAME_DEF_STMT(element) is function call.
870    3) The return-value has immediate uses only within comparisons (gcond or gassign)
871       and return_stmt (and likewise a phi arg has immediate use only within comparison
872       or the phi stmt).  */
873 
874 #define DUMP_AND_RETURN(reason)  \
875 {  \
876   if (dump_file && (dump_flags & TDF_DETAILS))  \
877     fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
878 	     (node->dump_name ()), (reason));  \
879   return false;  \
880 }
881 
882 static bool
malloc_candidate_p_1(function * fun,tree retval,gimple * ret_stmt,bool ipa,bitmap visited)883 malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
884 		      bitmap visited)
885 {
886   cgraph_node *node = cgraph_node::get_create (fun->decl);
887   if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
888     return true;
889 
890   if (!check_retval_uses (retval, ret_stmt))
891     DUMP_AND_RETURN("Return value has uses outside return stmt"
892 		    " and comparisons against 0.")
893 
894   gimple *def = SSA_NAME_DEF_STMT (retval);
895 
896   if (gcall *call_stmt = dyn_cast<gcall *> (def))
897     {
898       tree callee_decl = gimple_call_fndecl (call_stmt);
899       if (!callee_decl)
900 	return false;
901 
902       if (!ipa && !DECL_IS_MALLOC (callee_decl))
903 	DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
904 			" non-ipa mode.")
905 
906       cgraph_edge *cs = node->get_edge (call_stmt);
907       if (cs)
908 	{
909 	  ipa_call_summary *es = ipa_call_summaries->get_create (cs);
910 	  es->is_return_callee_uncaptured = true;
911 	}
912     }
913 
914     else if (gphi *phi = dyn_cast<gphi *> (def))
915       {
916 	bool all_args_zero = true;
917 	for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
918 	  {
919 	    tree arg = gimple_phi_arg_def (phi, i);
920 	    if (integer_zerop (arg))
921 	      continue;
922 
923 	    all_args_zero = false;
924 	    if (TREE_CODE (arg) != SSA_NAME)
925 	      DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
926 	    if (!check_retval_uses (arg, phi))
927 	      DUMP_AND_RETURN ("phi arg has uses outside phi"
928 				 " and comparisons against 0.")
929 
930 	    gimple *arg_def = SSA_NAME_DEF_STMT (arg);
931 	    if (is_a<gphi *> (arg_def))
932 	      {
933 		if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
934 		    DUMP_AND_RETURN ("nested phi fail")
935 		continue;
936 	      }
937 
938 	    gcall *call_stmt = dyn_cast<gcall *> (arg_def);
939 	    if (!call_stmt)
940 	      DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
941 
942 	    tree callee_decl = gimple_call_fndecl (call_stmt);
943 	    if (!callee_decl)
944 	      return false;
945 	    if (!ipa && !DECL_IS_MALLOC (callee_decl))
946 	      DUMP_AND_RETURN("callee_decl does not have malloc attribute"
947 			      " for non-ipa mode.")
948 
949 	    cgraph_edge *cs = node->get_edge (call_stmt);
950 	    if (cs)
951 	      {
952 		ipa_call_summary *es = ipa_call_summaries->get_create (cs);
953 		es->is_return_callee_uncaptured = true;
954 	      }
955 	  }
956 
957 	if (all_args_zero)
958 	  DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
959       }
960 
961     else
962       DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
963 
964   return true;
965 }
966 
967 static bool
malloc_candidate_p(function * fun,bool ipa)968 malloc_candidate_p (function *fun, bool ipa)
969 {
970   basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
971   edge e;
972   edge_iterator ei;
973   cgraph_node *node = cgraph_node::get_create (fun->decl);
974 
975   if (EDGE_COUNT (exit_block->preds) == 0
976       || !flag_delete_null_pointer_checks)
977     return false;
978 
979   auto_bitmap visited;
980   FOR_EACH_EDGE (e, ei, exit_block->preds)
981     {
982       gimple_stmt_iterator gsi = gsi_last_bb (e->src);
983       greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
984 
985       if (!ret_stmt)
986 	return false;
987 
988       tree retval = gimple_return_retval (ret_stmt);
989       if (!retval)
990 	DUMP_AND_RETURN("No return value.")
991 
992       if (TREE_CODE (retval) != SSA_NAME
993 	  || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
994 	DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
995 
996       if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
997 	return false;
998     }
999 
1000   if (dump_file && (dump_flags & TDF_DETAILS))
1001     fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1002 	     IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1003   return true;
1004 }
1005 
1006 #undef DUMP_AND_RETURN
1007 
1008 /* This is the main routine for finding the reference patterns for
1009    global variables within a function FN.  */
1010 
1011 static funct_state
analyze_function(struct cgraph_node * fn,bool ipa)1012 analyze_function (struct cgraph_node *fn, bool ipa)
1013 {
1014   tree decl = fn->decl;
1015   funct_state l;
1016   basic_block this_block;
1017 
1018   l = XCNEW (class funct_state_d);
1019   l->pure_const_state = IPA_CONST;
1020   l->state_previously_known = IPA_NEITHER;
1021   l->looping_previously_known = true;
1022   l->looping = false;
1023   l->can_throw = false;
1024   l->can_free = false;
1025   state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1026 		    flags_from_decl_or_type (fn->decl),
1027 		    fn->cannot_return_p ());
1028 
1029   if (fn->thunk || fn->alias)
1030     {
1031       /* Thunk gets propagated through, so nothing interesting happens.  */
1032       gcc_assert (ipa);
1033       if (fn->thunk && thunk_info::get (fn)->virtual_offset_p)
1034 	l->pure_const_state = IPA_NEITHER;
1035       return l;
1036     }
1037 
1038   if (dump_file)
1039     {
1040       fprintf (dump_file, "\n\n local analysis of %s\n ",
1041 	       fn->dump_name ());
1042     }
1043 
1044   push_cfun (DECL_STRUCT_FUNCTION (decl));
1045 
1046   FOR_EACH_BB_FN (this_block, cfun)
1047     {
1048       gimple_stmt_iterator gsi;
1049       struct walk_stmt_info wi;
1050 
1051       memset (&wi, 0, sizeof (wi));
1052       for (gsi = gsi_start_bb (this_block);
1053 	   !gsi_end_p (gsi);
1054 	   gsi_next (&gsi))
1055 	{
1056 	  check_stmt (&gsi, l, ipa);
1057 	  if (l->pure_const_state == IPA_NEITHER
1058 	      && l->looping
1059 	      && l->can_throw
1060 	      && l->can_free)
1061 	    goto end;
1062 	}
1063     }
1064 
1065 end:
1066   if (l->pure_const_state != IPA_NEITHER)
1067     {
1068       /* Const functions cannot have back edges (an
1069 	 indication of possible infinite loop side
1070 	 effect.  */
1071       if (mark_dfs_back_edges ())
1072         {
1073 	  /* Preheaders are needed for SCEV to work.
1074 	     Simple latches and recorded exits improve chances that loop will
1075 	     proved to be finite in testcases such as in loop-15.c
1076 	     and loop-24.c  */
1077 	  loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1078 			       | LOOPS_HAVE_SIMPLE_LATCHES
1079 			       | LOOPS_HAVE_RECORDED_EXITS);
1080 	  if (dump_file && (dump_flags & TDF_DETAILS))
1081 	    flow_loops_dump (dump_file, NULL, 0);
1082 	  if (mark_irreducible_loops ())
1083 	    {
1084 	      if (dump_file)
1085 	        fprintf (dump_file, "    has irreducible loops\n");
1086 	      l->looping = true;
1087 	    }
1088 	  else
1089 	    {
1090 	      class loop *loop;
1091 	      scev_initialize ();
1092 	      FOR_EACH_LOOP (loop, 0)
1093 		if (!finite_loop_p (loop))
1094 		  {
1095 		    if (dump_file)
1096 		      fprintf (dump_file, "    cannot prove finiteness of "
1097 			       "loop %i\n", loop->num);
1098 		    l->looping =true;
1099 		    break;
1100 		  }
1101 	      scev_finalize ();
1102 	    }
1103           loop_optimizer_finalize ();
1104 	}
1105     }
1106 
1107   if (dump_file && (dump_flags & TDF_DETAILS))
1108     fprintf (dump_file, "    checking previously known:");
1109 
1110   better_state (&l->pure_const_state, &l->looping,
1111 		l->state_previously_known,
1112 		l->looping_previously_known);
1113   if (TREE_NOTHROW (decl))
1114     l->can_throw = false;
1115 
1116   l->malloc_state = STATE_MALLOC_BOTTOM;
1117   if (DECL_IS_MALLOC (decl))
1118     l->malloc_state = STATE_MALLOC;
1119   else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1120     l->malloc_state = STATE_MALLOC_TOP;
1121   else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1122     l->malloc_state = STATE_MALLOC;
1123 
1124   pop_cfun ();
1125   if (dump_file)
1126     {
1127       if (l->looping)
1128         fprintf (dump_file, "Function is locally looping.\n");
1129       if (l->can_throw)
1130         fprintf (dump_file, "Function is locally throwing.\n");
1131       if (l->pure_const_state == IPA_CONST)
1132         fprintf (dump_file, "Function is locally const.\n");
1133       if (l->pure_const_state == IPA_PURE)
1134         fprintf (dump_file, "Function is locally pure.\n");
1135       if (l->can_free)
1136 	fprintf (dump_file, "Function can locally free.\n");
1137       if (l->malloc_state == STATE_MALLOC)
1138 	fprintf (dump_file, "Function is locally malloc.\n");
1139     }
1140   return l;
1141 }
1142 
1143 void
insert(cgraph_node * node,funct_state_d * state)1144 funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1145 {
1146   /* There are some shared nodes, in particular the initializers on
1147      static declarations.  We do not need to scan them more than once
1148      since all we would be interested in are the addressof
1149      operations.  */
1150   if (opt_for_fn (node->decl, flag_ipa_pure_const))
1151     {
1152       funct_state_d *a = analyze_function (node, true);
1153       new (state) funct_state_d (*a);
1154       free (a);
1155     }
1156   else
1157     /* Do not keep stale summaries.  */
1158     funct_state_summaries->remove (node);
1159 }
1160 
1161 /* Called when new clone is inserted to callgraph late.  */
1162 
1163 void
duplicate(cgraph_node *,cgraph_node * dst,funct_state_d * src_data,funct_state_d * dst_data)1164 funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
1165 				  funct_state_d *src_data,
1166 				  funct_state_d *dst_data)
1167 {
1168   new (dst_data) funct_state_d (*src_data);
1169   if (dst_data->malloc_state == STATE_MALLOC
1170       && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
1171     dst_data->malloc_state = STATE_MALLOC_BOTTOM;
1172 }
1173 
1174 
1175 void
1176 pass_ipa_pure_const::
register_hooks(void)1177 register_hooks (void)
1178 {
1179   if (init_p)
1180     return;
1181 
1182   init_p = true;
1183 
1184   funct_state_summaries = new funct_state_summary_t (symtab);
1185 }
1186 
1187 
1188 /* Analyze each function in the cgraph to see if it is locally PURE or
1189    CONST.  */
1190 
1191 static void
pure_const_generate_summary(void)1192 pure_const_generate_summary (void)
1193 {
1194   struct cgraph_node *node;
1195 
1196   pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1197   pass->register_hooks ();
1198 
1199   /* Process all of the functions.
1200 
1201      We process AVAIL_INTERPOSABLE functions.  We cannot use the results
1202      by default, but the info can be used at LTO with -fwhole-program or
1203      when function got cloned and the clone is AVAILABLE.  */
1204 
1205   FOR_EACH_DEFINED_FUNCTION (node)
1206     if (opt_for_fn (node->decl, flag_ipa_pure_const))
1207       {
1208 	funct_state_d *a = analyze_function (node, true);
1209 	new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1210 	free (a);
1211       }
1212 }
1213 
1214 
1215 /* Serialize the ipa info for lto.  */
1216 
1217 static void
pure_const_write_summary(void)1218 pure_const_write_summary (void)
1219 {
1220   struct cgraph_node *node;
1221   struct lto_simple_output_block *ob
1222     = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1223   unsigned int count = 0;
1224   lto_symtab_encoder_iterator lsei;
1225   lto_symtab_encoder_t encoder;
1226 
1227   encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1228 
1229   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1230        lsei_next_function_in_partition (&lsei))
1231     {
1232       node = lsei_cgraph_node (lsei);
1233       if (node->definition && funct_state_summaries->exists (node))
1234 	count++;
1235     }
1236 
1237   streamer_write_uhwi_stream (ob->main_stream, count);
1238 
1239   /* Process all of the functions.  */
1240   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1241        lsei_next_function_in_partition (&lsei))
1242     {
1243       node = lsei_cgraph_node (lsei);
1244       funct_state_d *fs = funct_state_summaries->get (node);
1245       if (node->definition && fs != NULL)
1246 	{
1247 	  struct bitpack_d bp;
1248 	  int node_ref;
1249 	  lto_symtab_encoder_t encoder;
1250 
1251 	  encoder = ob->decl_state->symtab_node_encoder;
1252 	  node_ref = lto_symtab_encoder_encode (encoder, node);
1253 	  streamer_write_uhwi_stream (ob->main_stream, node_ref);
1254 
1255 	  /* Note that flags will need to be read in the opposite
1256 	     order as we are pushing the bitflags into FLAGS.  */
1257 	  bp = bitpack_create (ob->main_stream);
1258 	  bp_pack_value (&bp, fs->pure_const_state, 2);
1259 	  bp_pack_value (&bp, fs->state_previously_known, 2);
1260 	  bp_pack_value (&bp, fs->looping_previously_known, 1);
1261 	  bp_pack_value (&bp, fs->looping, 1);
1262 	  bp_pack_value (&bp, fs->can_throw, 1);
1263 	  bp_pack_value (&bp, fs->can_free, 1);
1264 	  bp_pack_value (&bp, fs->malloc_state, 2);
1265 	  streamer_write_bitpack (&bp);
1266 	}
1267     }
1268 
1269   lto_destroy_simple_output_block (ob);
1270 }
1271 
1272 
1273 /* Deserialize the ipa info for lto.  */
1274 
1275 static void
pure_const_read_summary(void)1276 pure_const_read_summary (void)
1277 {
1278   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1279   struct lto_file_decl_data *file_data;
1280   unsigned int j = 0;
1281 
1282   pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1283   pass->register_hooks ();
1284 
1285   while ((file_data = file_data_vec[j++]))
1286     {
1287       const char *data;
1288       size_t len;
1289       class lto_input_block *ib
1290 	= lto_create_simple_input_block (file_data,
1291 					 LTO_section_ipa_pure_const,
1292 					 &data, &len);
1293       if (ib)
1294 	{
1295 	  unsigned int i;
1296 	  unsigned int count = streamer_read_uhwi (ib);
1297 
1298 	  for (i = 0; i < count; i++)
1299 	    {
1300 	      unsigned int index;
1301 	      struct cgraph_node *node;
1302 	      struct bitpack_d bp;
1303 	      funct_state fs;
1304 	      lto_symtab_encoder_t encoder;
1305 
1306 	      index = streamer_read_uhwi (ib);
1307 	      encoder = file_data->symtab_node_encoder;
1308 	      node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1309 									index));
1310 
1311 	      fs = funct_state_summaries->get_create (node);
1312 	      /* Note that the flags must be read in the opposite
1313 		 order in which they were written (the bitflags were
1314 		 pushed into FLAGS).  */
1315 	      bp = streamer_read_bitpack (ib);
1316 	      fs->pure_const_state
1317 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1318 	      fs->state_previously_known
1319 			= (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1320 	      fs->looping_previously_known = bp_unpack_value (&bp, 1);
1321 	      fs->looping = bp_unpack_value (&bp, 1);
1322 	      fs->can_throw = bp_unpack_value (&bp, 1);
1323 	      fs->can_free = bp_unpack_value (&bp, 1);
1324 	      fs->malloc_state
1325 			= (enum malloc_state_e) bp_unpack_value (&bp, 2);
1326 
1327 	      if (dump_file)
1328 		{
1329 		  int flags = flags_from_decl_or_type (node->decl);
1330 		  fprintf (dump_file, "Read info for %s ", node->dump_name ());
1331 		  if (flags & ECF_CONST)
1332 		    fprintf (dump_file, " const");
1333 		  if (flags & ECF_PURE)
1334 		    fprintf (dump_file, " pure");
1335 		  if (flags & ECF_NOTHROW)
1336 		    fprintf (dump_file, " nothrow");
1337 		  fprintf (dump_file, "\n  pure const state: %s\n",
1338 			   pure_const_names[fs->pure_const_state]);
1339 		  fprintf (dump_file, "  previously known state: %s\n",
1340 			   pure_const_names[fs->state_previously_known]);
1341 		  if (fs->looping)
1342 		    fprintf (dump_file,"  function is locally looping\n");
1343 		  if (fs->looping_previously_known)
1344 		    fprintf (dump_file,"  function is previously known looping\n");
1345 		  if (fs->can_throw)
1346 		    fprintf (dump_file,"  function is locally throwing\n");
1347 		  if (fs->can_free)
1348 		    fprintf (dump_file,"  function can locally free\n");
1349 		  fprintf (dump_file, "\n malloc state: %s\n",
1350 			   malloc_state_names[fs->malloc_state]);
1351 		}
1352 	    }
1353 
1354 	  lto_destroy_simple_input_block (file_data,
1355 					  LTO_section_ipa_pure_const,
1356 					  ib, data, len);
1357 	}
1358     }
1359 }
1360 
1361 /* We only propagate across edges that can throw externally and their callee
1362    is not interposable.  */
1363 
1364 static bool
ignore_edge_for_nothrow(struct cgraph_edge * e)1365 ignore_edge_for_nothrow (struct cgraph_edge *e)
1366 {
1367   if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1368     return true;
1369 
1370   enum availability avail;
1371   cgraph_node *ultimate_target
1372     = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1373   if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
1374     return true;
1375   return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1376 	   && !e->callee->binds_to_current_def_p (e->caller))
1377 	  || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1378 	  || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
1379 }
1380 
1381 /* Return true if NODE is self recursive function.
1382    Indirectly recursive functions appears as non-trivial strongly
1383    connected components, so we need to care about self recursion
1384    only.  */
1385 
1386 static bool
self_recursive_p(struct cgraph_node * node)1387 self_recursive_p (struct cgraph_node *node)
1388 {
1389   struct cgraph_edge *e;
1390   for (e = node->callees; e; e = e->next_callee)
1391     if (e->callee->function_symbol () == node)
1392       return true;
1393   return false;
1394 }
1395 
1396 /* Return true if N is cdtor that is not const or pure.  In this case we may
1397    need to remove unreachable function if it is marked const/pure.  */
1398 
1399 static bool
cdtor_p(cgraph_node * n,void *)1400 cdtor_p (cgraph_node *n, void *)
1401 {
1402   if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1403     return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1404 	    || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1405   return false;
1406 }
1407 
1408 /* Skip edges from and to nodes without ipa_pure_const enabled.
1409    Ignore not available symbols.  */
1410 
1411 static bool
ignore_edge_for_pure_const(struct cgraph_edge * e)1412 ignore_edge_for_pure_const (struct cgraph_edge *e)
1413 {
1414   enum availability avail;
1415   cgraph_node *ultimate_target
1416     = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1417 
1418   return (avail <= AVAIL_INTERPOSABLE
1419 	  || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1420 	  || !opt_for_fn (ultimate_target->decl,
1421 			  flag_ipa_pure_const));
1422 }
1423 
1424 /* Produce transitive closure over the callgraph and compute pure/const
1425    attributes.  */
1426 
1427 static bool
propagate_pure_const(void)1428 propagate_pure_const (void)
1429 {
1430   struct cgraph_node *node;
1431   struct cgraph_node *w;
1432   struct cgraph_node **order =
1433     XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1434   int order_pos;
1435   int i;
1436   struct ipa_dfs_info * w_info;
1437   bool remove_p = false;
1438   bool has_cdtor;
1439 
1440   order_pos = ipa_reduced_postorder (order, true,
1441 				     ignore_edge_for_pure_const);
1442   if (dump_file)
1443     {
1444       cgraph_node::dump_cgraph (dump_file);
1445       ipa_print_order (dump_file, "reduced", order, order_pos);
1446     }
1447 
1448   /* Propagate the local information through the call graph to produce
1449      the global information.  All the nodes within a cycle will have
1450      the same info so we collapse cycles first.  Then we can do the
1451      propagation in one pass from the leaves to the roots.  */
1452   for (i = 0; i < order_pos; i++ )
1453     {
1454       enum pure_const_state_e pure_const_state = IPA_CONST;
1455       bool looping = false;
1456       int count = 0;
1457       node = order[i];
1458 
1459       if (node->alias)
1460 	continue;
1461 
1462       if (dump_file && (dump_flags & TDF_DETAILS))
1463 	fprintf (dump_file, "Starting cycle\n");
1464 
1465       /* Find the worst state for any node in the cycle.  */
1466       w = node;
1467       while (w && pure_const_state != IPA_NEITHER)
1468 	{
1469 	  struct cgraph_edge *e;
1470 	  struct cgraph_edge *ie;
1471 	  int i;
1472 	  struct ipa_ref *ref = NULL;
1473 
1474 	  funct_state w_l = funct_state_summaries->get_create (w);
1475 	  if (dump_file && (dump_flags & TDF_DETAILS))
1476 	    fprintf (dump_file, "  Visiting %s state:%s looping %i\n",
1477 		     w->dump_name (),
1478 		     pure_const_names[w_l->pure_const_state],
1479 		     w_l->looping);
1480 
1481 	  /* First merge in function body properties.
1482 	     We are safe to pass NULL as FROM and TO because we will take care
1483 	     of possible interposition when walking callees.  */
1484 	  worse_state (&pure_const_state, &looping,
1485 		       w_l->pure_const_state, w_l->looping,
1486 		       NULL, NULL);
1487 	  if (pure_const_state == IPA_NEITHER)
1488 	    break;
1489 
1490 	  count++;
1491 
1492 	  /* We consider recursive cycles as possibly infinite.
1493 	     This might be relaxed since infinite recursion leads to stack
1494 	     overflow.  */
1495 	  if (count > 1)
1496 	    looping = true;
1497 
1498 	  /* Now walk the edges and merge in callee properties.  */
1499 	  for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1500 	       e = e->next_callee)
1501 	    {
1502 	      enum availability avail;
1503 	      struct cgraph_node *y = e->callee->
1504 				function_or_virtual_thunk_symbol (&avail,
1505 								  e->caller);
1506 	      enum pure_const_state_e edge_state = IPA_CONST;
1507 	      bool edge_looping = false;
1508 
1509 	      if (dump_file && (dump_flags & TDF_DETAILS))
1510 		{
1511 		  fprintf (dump_file, "    Call to %s",
1512 			   e->callee->dump_name ());
1513 		}
1514 	      if (avail > AVAIL_INTERPOSABLE)
1515 		{
1516 		  funct_state y_l = funct_state_summaries->get_create (y);
1517 
1518 		  if (dump_file && (dump_flags & TDF_DETAILS))
1519 		    {
1520 		      fprintf (dump_file,
1521 			       " state:%s looping:%i\n",
1522 			       pure_const_names[y_l->pure_const_state],
1523 			       y_l->looping);
1524 		    }
1525 		  if (y_l->pure_const_state > IPA_PURE
1526 		      && e->cannot_lead_to_return_p ())
1527 		    {
1528 		      if (dump_file && (dump_flags & TDF_DETAILS))
1529 			fprintf (dump_file,
1530 				 "        Ignoring side effects"
1531 				 " -> pure, looping\n");
1532 		      edge_state = IPA_PURE;
1533 		      edge_looping = true;
1534 		    }
1535 		  else
1536 		    {
1537 		      edge_state = y_l->pure_const_state;
1538 		      edge_looping = y_l->looping;
1539 		    }
1540 		}
1541 	      else if (special_builtin_state (&edge_state, &edge_looping,
1542 					      y->decl))
1543 		;
1544 	      else
1545 		state_from_flags (&edge_state, &edge_looping,
1546 				  flags_from_decl_or_type (y->decl),
1547 				  e->cannot_lead_to_return_p ());
1548 
1549 	      /* Merge the results with what we already know.  */
1550 	      better_state (&edge_state, &edge_looping,
1551 			    w_l->state_previously_known,
1552 			    w_l->looping_previously_known);
1553 	      worse_state (&pure_const_state, &looping,
1554 			   edge_state, edge_looping, e->caller, e->callee);
1555 	      if (pure_const_state == IPA_NEITHER)
1556 	        break;
1557 	    }
1558 
1559 	  /* Now process the indirect call.  */
1560           for (ie = w->indirect_calls;
1561 	       ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1562 	    {
1563 	      enum pure_const_state_e edge_state = IPA_CONST;
1564 	      bool edge_looping = false;
1565 
1566 	      if (dump_file && (dump_flags & TDF_DETAILS))
1567 		fprintf (dump_file, "    Indirect call");
1568 	      state_from_flags (&edge_state, &edge_looping,
1569 			        ie->indirect_info->ecf_flags,
1570 				ie->cannot_lead_to_return_p ());
1571 	      /* Merge the results with what we already know.  */
1572 	      better_state (&edge_state, &edge_looping,
1573 			    w_l->state_previously_known,
1574 			    w_l->looping_previously_known);
1575 	      worse_state (&pure_const_state, &looping,
1576 			   edge_state, edge_looping, NULL, NULL);
1577 	      if (pure_const_state == IPA_NEITHER)
1578 	        break;
1579 	    }
1580 
1581 	  /* And finally all loads and stores.  */
1582 	  for (i = 0; w->iterate_reference (i, ref)
1583 	       && pure_const_state != IPA_NEITHER; i++)
1584 	    {
1585 	      enum pure_const_state_e ref_state = IPA_CONST;
1586 	      bool ref_looping = false;
1587 	      switch (ref->use)
1588 		{
1589 		case IPA_REF_LOAD:
1590 		  /* readonly reads are safe.  */
1591 		  if (TREE_READONLY (ref->referred->decl))
1592 		    break;
1593 		  if (dump_file && (dump_flags & TDF_DETAILS))
1594 		    fprintf (dump_file, "    nonreadonly global var read\n");
1595 		  ref_state = IPA_PURE;
1596 		  break;
1597 		case IPA_REF_STORE:
1598 		  if (ref->cannot_lead_to_return ())
1599 		    break;
1600 		  ref_state = IPA_NEITHER;
1601 		  if (dump_file && (dump_flags & TDF_DETAILS))
1602 		    fprintf (dump_file, "    global var write\n");
1603 		  break;
1604 		case IPA_REF_ADDR:
1605 		  break;
1606 		default:
1607 		  gcc_unreachable ();
1608 		}
1609 	      better_state (&ref_state, &ref_looping,
1610 			    w_l->state_previously_known,
1611 			    w_l->looping_previously_known);
1612 	      worse_state (&pure_const_state, &looping,
1613 			   ref_state, ref_looping, NULL, NULL);
1614 	      if (pure_const_state == IPA_NEITHER)
1615 		break;
1616 	    }
1617 	  w_info = (struct ipa_dfs_info *) w->aux;
1618 	  w = w_info->next_cycle;
1619 	}
1620       if (dump_file && (dump_flags & TDF_DETAILS))
1621 	fprintf (dump_file, "Result %s looping %i\n",
1622 		 pure_const_names [pure_const_state],
1623 		 looping);
1624 
1625       /* Find the worst state of can_free for any node in the cycle.  */
1626       bool can_free = false;
1627       w = node;
1628       while (w && !can_free)
1629 	{
1630 	  struct cgraph_edge *e;
1631 	  funct_state w_l = funct_state_summaries->get (w);
1632 
1633 	  if (w_l->can_free
1634 	      || w->get_availability () == AVAIL_INTERPOSABLE
1635 	      || w->indirect_calls)
1636 	    can_free = true;
1637 
1638 	  for (e = w->callees; e && !can_free; e = e->next_callee)
1639 	    {
1640 	      enum availability avail;
1641 	      struct cgraph_node *y = e->callee->
1642 				function_or_virtual_thunk_symbol (&avail,
1643 								  e->caller);
1644 
1645 	      if (avail > AVAIL_INTERPOSABLE)
1646 		can_free = funct_state_summaries->get (y)->can_free;
1647 	      else
1648 		can_free = true;
1649 	    }
1650 	  w_info = (struct ipa_dfs_info *) w->aux;
1651 	  w = w_info->next_cycle;
1652 	}
1653 
1654       /* Copy back the region's pure_const_state which is shared by
1655 	 all nodes in the region.  */
1656       w = node;
1657       while (w)
1658 	{
1659 	  funct_state w_l = funct_state_summaries->get (w);
1660 	  enum pure_const_state_e this_state = pure_const_state;
1661 	  bool this_looping = looping;
1662 
1663 	  w_l->can_free = can_free;
1664 	  w->nonfreeing_fn = !can_free;
1665 	  if (!can_free && dump_file)
1666 	    fprintf (dump_file, "Function found not to call free: %s\n",
1667 		     w->dump_name ());
1668 
1669 	  if (w_l->state_previously_known != IPA_NEITHER
1670 	      && this_state > w_l->state_previously_known)
1671 	    {
1672               this_state = w_l->state_previously_known;
1673 	      if (this_state == IPA_NEITHER)
1674 	        this_looping = w_l->looping_previously_known;
1675 	    }
1676 	  if (!this_looping && self_recursive_p (w))
1677 	    this_looping = true;
1678 	  if (!w_l->looping_previously_known)
1679 	    this_looping = false;
1680 
1681 	  /* All nodes within a cycle share the same info.  */
1682 	  w_l->pure_const_state = this_state;
1683 	  w_l->looping = this_looping;
1684 
1685 	  /* Inline clones share declaration with their offline copies;
1686 	     do not modify their declarations since the offline copy may
1687 	     be different.  */
1688 	  if (!w->inlined_to)
1689 	    switch (this_state)
1690 	      {
1691 	      case IPA_CONST:
1692 		if (!TREE_READONLY (w->decl))
1693 		  {
1694 		    warn_function_const (w->decl, !this_looping);
1695 		    if (dump_file)
1696 		      fprintf (dump_file, "Function found to be %sconst: %s\n",
1697 			       this_looping ? "looping " : "",
1698 			       w->dump_name ());
1699 		  }
1700 		/* Turning constructor or destructor to non-looping const/pure
1701 		   enables us to possibly remove the function completely.  */
1702 		if (this_looping)
1703 		  has_cdtor = false;
1704 		else
1705 		  has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1706 							      NULL, true);
1707 		if (w->set_const_flag (true, this_looping))
1708 		  {
1709 		    if (dump_file)
1710 		      fprintf (dump_file,
1711 			       "Declaration updated to be %sconst: %s\n",
1712 			       this_looping ? "looping " : "",
1713 			       w->dump_name ());
1714 		    remove_p |= has_cdtor;
1715 		  }
1716 		break;
1717 
1718 	      case IPA_PURE:
1719 		if (!DECL_PURE_P (w->decl))
1720 		  {
1721 		    warn_function_pure (w->decl, !this_looping);
1722 		    if (dump_file)
1723 		      fprintf (dump_file, "Function found to be %spure: %s\n",
1724 			       this_looping ? "looping " : "",
1725 			       w->dump_name ());
1726 		  }
1727 		if (this_looping)
1728 		  has_cdtor = false;
1729 		else
1730 		  has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1731 							      NULL, true);
1732 		if (w->set_pure_flag (true, this_looping))
1733 		  {
1734 		    if (dump_file)
1735 		      fprintf (dump_file,
1736 			       "Declaration updated to be %spure: %s\n",
1737 			       this_looping ? "looping " : "",
1738 			       w->dump_name ());
1739 		    remove_p |= has_cdtor;
1740 		  }
1741 		break;
1742 
1743 	      default:
1744 		break;
1745 	      }
1746 	  w_info = (struct ipa_dfs_info *) w->aux;
1747 	  w = w_info->next_cycle;
1748 	}
1749     }
1750 
1751   ipa_free_postorder_info ();
1752   free (order);
1753   return remove_p;
1754 }
1755 
1756 /* Produce transitive closure over the callgraph and compute nothrow
1757    attributes.  */
1758 
1759 static void
propagate_nothrow(void)1760 propagate_nothrow (void)
1761 {
1762   struct cgraph_node *node;
1763   struct cgraph_node *w;
1764   struct cgraph_node **order =
1765     XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1766   int order_pos;
1767   int i;
1768   struct ipa_dfs_info * w_info;
1769 
1770   order_pos = ipa_reduced_postorder (order, true,
1771 				     ignore_edge_for_nothrow);
1772   if (dump_file)
1773     {
1774       cgraph_node::dump_cgraph (dump_file);
1775       ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1776     }
1777 
1778   /* Propagate the local information through the call graph to produce
1779      the global information.  All the nodes within a cycle will have
1780      the same info so we collapse cycles first.  Then we can do the
1781      propagation in one pass from the leaves to the roots.  */
1782   for (i = 0; i < order_pos; i++ )
1783     {
1784       bool can_throw = false;
1785       node = order[i];
1786 
1787       if (node->alias)
1788 	continue;
1789 
1790       /* Find the worst state for any node in the cycle.  */
1791       w = node;
1792       while (w && !can_throw)
1793 	{
1794 	  struct cgraph_edge *e, *ie;
1795 
1796 	  if (!TREE_NOTHROW (w->decl))
1797 	    {
1798 	      funct_state w_l = funct_state_summaries->get_create (w);
1799 
1800 	      if (w_l->can_throw
1801 		  || w->get_availability () == AVAIL_INTERPOSABLE)
1802 		can_throw = true;
1803 
1804 	      for (e = w->callees; e && !can_throw; e = e->next_callee)
1805 		{
1806 		  enum availability avail;
1807 
1808 		  if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1809 		    continue;
1810 
1811 		  struct cgraph_node *y = e->callee->
1812 				   function_or_virtual_thunk_symbol (&avail,
1813 								     e->caller);
1814 
1815 		  /* We can use info about the callee only if we know it
1816 		     cannot be interposed.
1817 		     When callee is compiled with non-call exceptions we also
1818 		     must check that the declaration is bound to current
1819 		     body as other semantically equivalent body may still
1820 		     throw.  */
1821 		  if (avail <= AVAIL_INTERPOSABLE
1822 		      || (!TREE_NOTHROW (y->decl)
1823 			  && (funct_state_summaries->get_create (y)->can_throw
1824 			      || (opt_for_fn (y->decl, flag_non_call_exceptions)
1825 				  && !e->callee->binds_to_current_def_p (w)))))
1826 		    can_throw = true;
1827 		}
1828 	      for (ie = w->indirect_calls; ie && !can_throw;
1829 		   ie = ie->next_callee)
1830 		if (ie->can_throw_external
1831 		    && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1832 		  can_throw = true;
1833 	    }
1834 	  w_info = (struct ipa_dfs_info *) w->aux;
1835 	  w = w_info->next_cycle;
1836 	}
1837 
1838       /* Copy back the region's pure_const_state which is shared by
1839 	 all nodes in the region.  */
1840       w = node;
1841       while (w)
1842 	{
1843 	  funct_state w_l = funct_state_summaries->get_create (w);
1844 	  if (!can_throw && !TREE_NOTHROW (w->decl))
1845 	    {
1846 	      /* Inline clones share declaration with their offline copies;
1847 		 do not modify their declarations since the offline copy may
1848 		 be different.  */
1849 	      if (!w->inlined_to)
1850 		{
1851 		  w->set_nothrow_flag (true);
1852 		  if (dump_file)
1853 		    fprintf (dump_file, "Function found to be nothrow: %s\n",
1854 			     w->dump_name ());
1855 		}
1856 	    }
1857 	  else if (can_throw && !TREE_NOTHROW (w->decl))
1858 	    w_l->can_throw = true;
1859 	  w_info = (struct ipa_dfs_info *) w->aux;
1860 	  w = w_info->next_cycle;
1861 	}
1862     }
1863 
1864   ipa_free_postorder_info ();
1865   free (order);
1866 }
1867 
1868 /* Debugging function to dump state of malloc lattice.  */
1869 
1870 DEBUG_FUNCTION
1871 static void
dump_malloc_lattice(FILE * dump_file,const char * s)1872 dump_malloc_lattice (FILE *dump_file, const char *s)
1873 {
1874   if (!dump_file)
1875     return;
1876 
1877   fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1878   cgraph_node *node;
1879   FOR_EACH_FUNCTION (node)
1880     {
1881       funct_state fs = funct_state_summaries->get (node);
1882       if (fs)
1883 	fprintf (dump_file, "%s: %s\n", node->dump_name (),
1884 		 malloc_state_names[fs->malloc_state]);
1885     }
1886 }
1887 
1888 /* Propagate malloc attribute across the callgraph.  */
1889 
1890 static void
propagate_malloc(void)1891 propagate_malloc (void)
1892 {
1893   cgraph_node *node;
1894   FOR_EACH_FUNCTION (node)
1895     {
1896       if (DECL_IS_MALLOC (node->decl))
1897 	if (!funct_state_summaries->exists (node))
1898 	  {
1899 	    funct_state fs = funct_state_summaries->get_create (node);
1900 	    fs->malloc_state = STATE_MALLOC;
1901 	  }
1902     }
1903 
1904   dump_malloc_lattice (dump_file, "Initial");
1905   struct cgraph_node **order
1906     = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1907   int order_pos = ipa_reverse_postorder (order);
1908   bool changed = true;
1909 
1910   while (changed)
1911     {
1912       changed = false;
1913       /* Walk in postorder.  */
1914       for (int i = order_pos - 1; i >= 0; --i)
1915 	{
1916 	  cgraph_node *node = order[i];
1917 	  if (node->alias
1918 	      || !node->definition
1919 	      || !funct_state_summaries->exists (node))
1920 	    continue;
1921 
1922 	  funct_state l = funct_state_summaries->get (node);
1923 
1924 	  /* FIXME: add support for indirect-calls.  */
1925 	  if (node->indirect_calls)
1926 	    {
1927 	      l->malloc_state = STATE_MALLOC_BOTTOM;
1928 	      continue;
1929 	    }
1930 
1931 	  if (node->get_availability () <= AVAIL_INTERPOSABLE)
1932 	    {
1933 	      l->malloc_state = STATE_MALLOC_BOTTOM;
1934 	      continue;
1935 	    }
1936 
1937 	  if (l->malloc_state == STATE_MALLOC_BOTTOM)
1938 	    continue;
1939 
1940 	  auto_vec<cgraph_node *, 16> callees;
1941 	  for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1942 	    {
1943 	      ipa_call_summary *es = ipa_call_summaries->get_create (cs);
1944 	      if (es && es->is_return_callee_uncaptured)
1945 		callees.safe_push (cs->callee);
1946 	    }
1947 
1948 	  malloc_state_e new_state = l->malloc_state;
1949 	  for (unsigned j = 0; j < callees.length (); j++)
1950 	    {
1951 	      cgraph_node *callee = callees[j];
1952 	      if (!funct_state_summaries->exists (node))
1953 		{
1954 		  new_state = STATE_MALLOC_BOTTOM;
1955 		  break;
1956 		}
1957 	      malloc_state_e callee_state
1958 		= funct_state_summaries->get_create (callee)->malloc_state;
1959 	      if (new_state < callee_state)
1960 		new_state = callee_state;
1961 	    }
1962 	  if (new_state != l->malloc_state)
1963 	    {
1964 	      changed = true;
1965 	      l->malloc_state = new_state;
1966 	    }
1967 	}
1968     }
1969 
1970   FOR_EACH_DEFINED_FUNCTION (node)
1971     if (funct_state_summaries->exists (node))
1972       {
1973 	funct_state l = funct_state_summaries->get (node);
1974 	if (!node->alias
1975 	    && l->malloc_state == STATE_MALLOC
1976 	    && !node->inlined_to
1977 	    && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (node->decl))))
1978 	  {
1979 	    if (dump_file && (dump_flags & TDF_DETAILS))
1980 	      fprintf (dump_file, "Function %s found to be malloc\n",
1981 		       node->dump_name ());
1982 
1983 	    bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1984 	    node->set_malloc_flag (true);
1985 	    if (!malloc_decl_p && warn_suggest_attribute_malloc)
1986 		warn_function_malloc (node->decl);
1987 	  }
1988       }
1989 
1990   dump_malloc_lattice (dump_file, "after propagation");
1991   ipa_free_postorder_info ();
1992   free (order);
1993 }
1994 
1995 /* Produce the global information by preforming a transitive closure
1996    on the local information that was produced by generate_summary.  */
1997 
1998 unsigned int
1999 pass_ipa_pure_const::
execute(function *)2000 execute (function *)
2001 {
2002   bool remove_p;
2003 
2004   /* Nothrow makes more function to not lead to return and improve
2005      later analysis.  */
2006   propagate_nothrow ();
2007   propagate_malloc ();
2008   remove_p = propagate_pure_const ();
2009 
2010   delete funct_state_summaries;
2011   return remove_p ? TODO_remove_functions : 0;
2012 }
2013 
2014 static bool
gate_pure_const(void)2015 gate_pure_const (void)
2016 {
2017   return flag_ipa_pure_const || in_lto_p;
2018 }
2019 
pass_ipa_pure_const(gcc::context * ctxt)2020 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2021     : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2022 		     pure_const_generate_summary, /* generate_summary */
2023 		     pure_const_write_summary, /* write_summary */
2024 		     pure_const_read_summary, /* read_summary */
2025 		     NULL, /* write_optimization_summary */
2026 		     NULL, /* read_optimization_summary */
2027 		     NULL, /* stmt_fixup */
2028 		     0, /* function_transform_todo_flags_start */
2029 		     NULL, /* function_transform */
2030 		     NULL), /* variable_transform */
2031   init_p (false) {}
2032 
2033 ipa_opt_pass_d *
make_pass_ipa_pure_const(gcc::context * ctxt)2034 make_pass_ipa_pure_const (gcc::context *ctxt)
2035 {
2036   return new pass_ipa_pure_const (ctxt);
2037 }
2038 
2039 /* Return true if function should be skipped for local pure const analysis.  */
2040 
2041 static bool
skip_function_for_local_pure_const(struct cgraph_node * node)2042 skip_function_for_local_pure_const (struct cgraph_node *node)
2043 {
2044   /* Because we do not schedule pass_fixup_cfg over whole program after early
2045      optimizations we must not promote functions that are called by already
2046      processed functions.  */
2047 
2048   if (function_called_by_processed_nodes_p ())
2049     {
2050       if (dump_file)
2051         fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2052       return true;
2053     }
2054   /* Save some work and do not analyze functions which are interposable and
2055      do not have any non-interposable aliases.  */
2056   if (node->get_availability () <= AVAIL_INTERPOSABLE
2057       && !node->has_aliases_p ())
2058     {
2059       if (dump_file)
2060         fprintf (dump_file,
2061 		 "Function is interposable; not analyzing.\n");
2062       return true;
2063     }
2064   return false;
2065 }
2066 
2067 /* Simple local pass for pure const discovery reusing the analysis from
2068    ipa_pure_const.   This pass is effective when executed together with
2069    other optimization passes in early optimization pass queue.  */
2070 
2071 namespace {
2072 
2073 const pass_data pass_data_local_pure_const =
2074 {
2075   GIMPLE_PASS, /* type */
2076   "local-pure-const", /* name */
2077   OPTGROUP_NONE, /* optinfo_flags */
2078   TV_IPA_PURE_CONST, /* tv_id */
2079   0, /* properties_required */
2080   0, /* properties_provided */
2081   0, /* properties_destroyed */
2082   0, /* todo_flags_start */
2083   0, /* todo_flags_finish */
2084 };
2085 
2086 class pass_local_pure_const : public gimple_opt_pass
2087 {
2088 public:
pass_local_pure_const(gcc::context * ctxt)2089   pass_local_pure_const (gcc::context *ctxt)
2090     : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2091   {}
2092 
2093   /* opt_pass methods: */
clone()2094   opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
gate(function *)2095   virtual bool gate (function *) { return gate_pure_const (); }
2096   virtual unsigned int execute (function *);
2097 
2098 }; // class pass_local_pure_const
2099 
2100 unsigned int
execute(function * fun)2101 pass_local_pure_const::execute (function *fun)
2102 {
2103   bool changed = false;
2104   funct_state l;
2105   bool skip;
2106   struct cgraph_node *node;
2107 
2108   node = cgraph_node::get (current_function_decl);
2109   skip = skip_function_for_local_pure_const (node);
2110 
2111   if (!warn_suggest_attribute_const
2112       && !warn_suggest_attribute_pure
2113       && skip)
2114     return 0;
2115 
2116   l = analyze_function (node, false);
2117 
2118   /* Do NORETURN discovery.  */
2119   if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2120       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2121     {
2122       warn_function_noreturn (fun->decl);
2123       if (dump_file)
2124 	fprintf (dump_file, "Function found to be noreturn: %s\n",
2125 		 current_function_name ());
2126 
2127       /* Update declaration and reduce profile to executed once.  */
2128       TREE_THIS_VOLATILE (current_function_decl) = 1;
2129       if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2130 	node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2131 
2132       changed = true;
2133     }
2134 
2135   switch (l->pure_const_state)
2136     {
2137     case IPA_CONST:
2138       if (!TREE_READONLY (current_function_decl))
2139 	{
2140 	  warn_function_const (current_function_decl, !l->looping);
2141 	  if (dump_file)
2142 	    fprintf (dump_file, "Function found to be %sconst: %s\n",
2143 		     l->looping ? "looping " : "",
2144 		     current_function_name ());
2145 	}
2146       else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2147 	       && !l->looping)
2148 	{
2149 	  if (dump_file)
2150 	    fprintf (dump_file, "Function found to be non-looping: %s\n",
2151 		     current_function_name ());
2152 	}
2153       if (!skip && node->set_const_flag (true, l->looping))
2154 	{
2155 	  if (dump_file)
2156 	    fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2157 		     l->looping ? "looping " : "",
2158 		     current_function_name ());
2159 	  changed = true;
2160 	}
2161       break;
2162 
2163     case IPA_PURE:
2164       if (!DECL_PURE_P (current_function_decl))
2165 	{
2166 	  warn_function_pure (current_function_decl, !l->looping);
2167 	  if (dump_file)
2168 	    fprintf (dump_file, "Function found to be %spure: %s\n",
2169 		     l->looping ? "looping " : "",
2170 		     current_function_name ());
2171 	}
2172       else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2173 	       && !l->looping)
2174 	{
2175 	  if (dump_file)
2176 	    fprintf (dump_file, "Function found to be non-looping: %s\n",
2177 		     current_function_name ());
2178 	}
2179       if (!skip && node->set_pure_flag (true, l->looping))
2180 	{
2181 	  if (dump_file)
2182 	    fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2183 		     l->looping ? "looping " : "",
2184 		     current_function_name ());
2185 	  changed = true;
2186 	}
2187       break;
2188 
2189     default:
2190       break;
2191     }
2192   if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2193     {
2194       node->set_nothrow_flag (true);
2195       changed = true;
2196       if (dump_file)
2197 	fprintf (dump_file, "Function found to be nothrow: %s\n",
2198 		 current_function_name ());
2199     }
2200 
2201   if (l->malloc_state == STATE_MALLOC
2202       && !DECL_IS_MALLOC (current_function_decl))
2203     {
2204       node->set_malloc_flag (true);
2205       if (warn_suggest_attribute_malloc)
2206 	warn_function_malloc (node->decl);
2207       changed = true;
2208       if (dump_file)
2209 	fprintf (dump_file, "Function found to be malloc: %s\n",
2210 		 node->dump_name ());
2211     }
2212 
2213   free (l);
2214   if (changed)
2215     return execute_fixup_cfg ();
2216   else
2217     return 0;
2218 }
2219 
2220 } // anon namespace
2221 
2222 gimple_opt_pass *
make_pass_local_pure_const(gcc::context * ctxt)2223 make_pass_local_pure_const (gcc::context *ctxt)
2224 {
2225   return new pass_local_pure_const (ctxt);
2226 }
2227 
2228 /* Emit noreturn warnings.  */
2229 
2230 namespace {
2231 
2232 const pass_data pass_data_warn_function_noreturn =
2233 {
2234   GIMPLE_PASS, /* type */
2235   "*warn_function_noreturn", /* name */
2236   OPTGROUP_NONE, /* optinfo_flags */
2237   TV_NONE, /* tv_id */
2238   PROP_cfg, /* properties_required */
2239   0, /* properties_provided */
2240   0, /* properties_destroyed */
2241   0, /* todo_flags_start */
2242   0, /* todo_flags_finish */
2243 };
2244 
2245 class pass_warn_function_noreturn : public gimple_opt_pass
2246 {
2247 public:
pass_warn_function_noreturn(gcc::context * ctxt)2248   pass_warn_function_noreturn (gcc::context *ctxt)
2249     : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2250   {}
2251 
2252   /* opt_pass methods: */
gate(function *)2253   virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
execute(function * fun)2254   virtual unsigned int execute (function *fun)
2255     {
2256       if (!TREE_THIS_VOLATILE (current_function_decl)
2257 	  && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2258 	warn_function_noreturn (current_function_decl);
2259       return 0;
2260     }
2261 
2262 }; // class pass_warn_function_noreturn
2263 
2264 } // anon namespace
2265 
2266 gimple_opt_pass *
make_pass_warn_function_noreturn(gcc::context * ctxt)2267 make_pass_warn_function_noreturn (gcc::context *ctxt)
2268 {
2269   return new pass_warn_function_noreturn (ctxt);
2270 }
2271 
2272 /* Simple local pass for pure const discovery reusing the analysis from
2273    ipa_pure_const.   This pass is effective when executed together with
2274    other optimization passes in early optimization pass queue.  */
2275 
2276 namespace {
2277 
2278 const pass_data pass_data_nothrow =
2279 {
2280   GIMPLE_PASS, /* type */
2281   "nothrow", /* name */
2282   OPTGROUP_NONE, /* optinfo_flags */
2283   TV_IPA_PURE_CONST, /* tv_id */
2284   0, /* properties_required */
2285   0, /* properties_provided */
2286   0, /* properties_destroyed */
2287   0, /* todo_flags_start */
2288   0, /* todo_flags_finish */
2289 };
2290 
2291 class pass_nothrow : public gimple_opt_pass
2292 {
2293 public:
pass_nothrow(gcc::context * ctxt)2294   pass_nothrow (gcc::context *ctxt)
2295     : gimple_opt_pass (pass_data_nothrow, ctxt)
2296   {}
2297 
2298   /* opt_pass methods: */
clone()2299   opt_pass * clone () { return new pass_nothrow (m_ctxt); }
gate(function *)2300   virtual bool gate (function *) { return optimize; }
2301   virtual unsigned int execute (function *);
2302 
2303 }; // class pass_nothrow
2304 
2305 unsigned int
execute(function *)2306 pass_nothrow::execute (function *)
2307 {
2308   struct cgraph_node *node;
2309   basic_block this_block;
2310 
2311   if (TREE_NOTHROW (current_function_decl))
2312     return 0;
2313 
2314   node = cgraph_node::get (current_function_decl);
2315 
2316   /* We run during lowering, we cannot really use availability yet.  */
2317   if (cgraph_node::get (current_function_decl)->get_availability ()
2318       <= AVAIL_INTERPOSABLE)
2319     {
2320       if (dump_file)
2321         fprintf (dump_file, "Function is interposable;"
2322 	         " not analyzing.\n");
2323       return true;
2324     }
2325 
2326   FOR_EACH_BB_FN (this_block, cfun)
2327     {
2328       for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2329 	   !gsi_end_p (gsi);
2330 	   gsi_next (&gsi))
2331         if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
2332 	  {
2333 	    if (is_gimple_call (gsi_stmt (gsi)))
2334 	      {
2335 		tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2336 		if (callee_t && recursive_call_p (current_function_decl,
2337 						  callee_t))
2338 		  continue;
2339 	      }
2340 
2341 	    if (dump_file)
2342 	      {
2343 		fprintf (dump_file, "Statement can throw: ");
2344 		print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2345 	      }
2346 	    return 0;
2347 	  }
2348     }
2349 
2350   node->set_nothrow_flag (true);
2351 
2352   bool cfg_changed = false;
2353   if (self_recursive_p (node))
2354     FOR_EACH_BB_FN (this_block, cfun)
2355       if (gimple *g = last_stmt (this_block))
2356 	if (is_gimple_call (g))
2357 	  {
2358 	    tree callee_t = gimple_call_fndecl (g);
2359 	    if (callee_t
2360 		&& recursive_call_p (current_function_decl, callee_t)
2361 		&& maybe_clean_eh_stmt (g)
2362 		&& gimple_purge_dead_eh_edges (this_block))
2363 	      cfg_changed = true;
2364 	  }
2365 
2366   if (dump_file)
2367     fprintf (dump_file, "Function found to be nothrow: %s\n",
2368 	     current_function_name ());
2369   return cfg_changed ? TODO_cleanup_cfg : 0;
2370 }
2371 
2372 } // anon namespace
2373 
2374 gimple_opt_pass *
make_pass_nothrow(gcc::context * ctxt)2375 make_pass_nothrow (gcc::context *ctxt)
2376 {
2377   return new pass_nothrow (ctxt);
2378 }
2379