1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59
60 /* Lattice values for const and pure functions. Everything starts out
61 being const, then may drop to pure and then neither depending on
62 what is found. */
63 enum pure_const_state_e
64 {
65 IPA_CONST,
66 IPA_PURE,
67 IPA_NEITHER
68 };
69
70 const char *pure_const_names[3] = {"const", "pure", "neither"};
71
72 /* Holder for the const_state. There is one of these per function
73 decl. */
74 struct funct_state_d
75 {
76 /* See above. */
77 enum pure_const_state_e pure_const_state;
78 /* What user set here; we can be always sure about this. */
79 enum pure_const_state_e state_previously_known;
80 bool looping_previously_known;
81
82 /* True if the function could possibly infinite loop. There are a
83 lot of ways that this could be determined. We are pretty
84 conservative here. While it is possible to cse pure and const
85 calls, it is not legal to have dce get rid of the call if there
86 is a possibility that the call could infinite loop since this is
87 a behavioral change. */
88 bool looping;
89
90 bool can_throw;
91
92 /* If function can call free, munmap or otherwise make previously
93 non-trapping memory accesses trapping. */
94 bool can_free;
95 };
96
97 /* State used when we know nothing about function. */
98 static struct funct_state_d varying_state
99 = { IPA_NEITHER, IPA_NEITHER, true, true, true, true };
100
101
102 typedef struct funct_state_d * funct_state;
103
104 /* The storage of the funct_state is abstracted because there is the
105 possibility that it may be desirable to move this to the cgraph
106 local info. */
107
108 /* Array, indexed by cgraph node uid, of function states. */
109
110 static vec<funct_state> funct_state_vec;
111
112 static bool gate_pure_const (void);
113
114 namespace {
115
116 const pass_data pass_data_ipa_pure_const =
117 {
118 IPA_PASS, /* type */
119 "pure-const", /* name */
120 OPTGROUP_NONE, /* optinfo_flags */
121 TV_IPA_PURE_CONST, /* tv_id */
122 0, /* properties_required */
123 0, /* properties_provided */
124 0, /* properties_destroyed */
125 0, /* todo_flags_start */
126 0, /* todo_flags_finish */
127 };
128
129 class pass_ipa_pure_const : public ipa_opt_pass_d
130 {
131 public:
132 pass_ipa_pure_const(gcc::context *ctxt);
133
134 /* opt_pass methods: */
gate(function *)135 bool gate (function *) { return gate_pure_const (); }
136 unsigned int execute (function *fun);
137
138 void register_hooks (void);
139
140 private:
141 bool init_p;
142
143 /* Holders of ipa cgraph hooks: */
144 struct cgraph_node_hook_list *function_insertion_hook_holder;
145 struct cgraph_2node_hook_list *node_duplication_hook_holder;
146 struct cgraph_node_hook_list *node_removal_hook_holder;
147
148 }; // class pass_ipa_pure_const
149
150 } // anon namespace
151
152 /* Try to guess if function body will always be visible to compiler
153 when compiling the call and whether compiler will be able
154 to propagate the information by itself. */
155
156 static bool
function_always_visible_to_compiler_p(tree decl)157 function_always_visible_to_compiler_p (tree decl)
158 {
159 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
160 }
161
162 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
163 is true if the function is known to be finite. The diagnostic is
164 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
165 OPTION, this function may initialize it and it is always returned
166 by the function. */
167
168 static hash_set<tree> *
suggest_attribute(int option,tree decl,bool known_finite,hash_set<tree> * warned_about,const char * attrib_name)169 suggest_attribute (int option, tree decl, bool known_finite,
170 hash_set<tree> *warned_about,
171 const char * attrib_name)
172 {
173 if (!option_enabled (option, &global_options))
174 return warned_about;
175 if (TREE_THIS_VOLATILE (decl)
176 || (known_finite && function_always_visible_to_compiler_p (decl)))
177 return warned_about;
178
179 if (!warned_about)
180 warned_about = new hash_set<tree>;
181 if (warned_about->contains (decl))
182 return warned_about;
183 warned_about->add (decl);
184 warning_at (DECL_SOURCE_LOCATION (decl),
185 option,
186 known_finite
187 ? _("function might be candidate for attribute %<%s%>")
188 : _("function might be candidate for attribute %<%s%>"
189 " if it is known to return normally"), attrib_name);
190 return warned_about;
191 }
192
193 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
194 is true if the function is known to be finite. */
195
196 static void
warn_function_pure(tree decl,bool known_finite)197 warn_function_pure (tree decl, bool known_finite)
198 {
199 static hash_set<tree> *warned_about;
200
201 warned_about
202 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
203 known_finite, warned_about, "pure");
204 }
205
206 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
207 is true if the function is known to be finite. */
208
209 static void
warn_function_const(tree decl,bool known_finite)210 warn_function_const (tree decl, bool known_finite)
211 {
212 static hash_set<tree> *warned_about;
213 warned_about
214 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
215 known_finite, warned_about, "const");
216 }
217
218 static void
warn_function_noreturn(tree decl)219 warn_function_noreturn (tree decl)
220 {
221 tree original_decl = decl;
222
223 cgraph_node *node = cgraph_node::get (decl);
224 if (node->instrumentation_clone)
225 decl = node->instrumented_version->decl;
226
227 static hash_set<tree> *warned_about;
228 if (!lang_hooks.missing_noreturn_ok_p (decl)
229 && targetm.warn_func_return (decl))
230 warned_about
231 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
232 true, warned_about, "noreturn");
233 }
234
235 /* Return true if we have a function state for NODE. */
236
237 static inline bool
has_function_state(struct cgraph_node * node)238 has_function_state (struct cgraph_node *node)
239 {
240 if (!funct_state_vec.exists ()
241 || funct_state_vec.length () <= (unsigned int)node->uid)
242 return false;
243 return funct_state_vec[node->uid] != NULL;
244 }
245
246 /* Return the function state from NODE. */
247
248 static inline funct_state
get_function_state(struct cgraph_node * node)249 get_function_state (struct cgraph_node *node)
250 {
251 if (!funct_state_vec.exists ()
252 || funct_state_vec.length () <= (unsigned int)node->uid
253 || !funct_state_vec[node->uid])
254 /* We might want to put correct previously_known state into varying. */
255 return &varying_state;
256 return funct_state_vec[node->uid];
257 }
258
259 /* Set the function state S for NODE. */
260
261 static inline void
set_function_state(struct cgraph_node * node,funct_state s)262 set_function_state (struct cgraph_node *node, funct_state s)
263 {
264 if (!funct_state_vec.exists ()
265 || funct_state_vec.length () <= (unsigned int)node->uid)
266 funct_state_vec.safe_grow_cleared (node->uid + 1);
267 funct_state_vec[node->uid] = s;
268 }
269
270 /* Check to see if the use (or definition when CHECKING_WRITE is true)
271 variable T is legal in a function that is either pure or const. */
272
273 static inline void
check_decl(funct_state local,tree t,bool checking_write,bool ipa)274 check_decl (funct_state local,
275 tree t, bool checking_write, bool ipa)
276 {
277 /* Do not want to do anything with volatile except mark any
278 function that uses one to be not const or pure. */
279 if (TREE_THIS_VOLATILE (t))
280 {
281 local->pure_const_state = IPA_NEITHER;
282 if (dump_file)
283 fprintf (dump_file, " Volatile operand is not const/pure");
284 return;
285 }
286
287 /* Do not care about a local automatic that is not static. */
288 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
289 return;
290
291 /* If the variable has the "used" attribute, treat it as if it had a
292 been touched by the devil. */
293 if (DECL_PRESERVE_P (t))
294 {
295 local->pure_const_state = IPA_NEITHER;
296 if (dump_file)
297 fprintf (dump_file, " Used static/global variable is not const/pure\n");
298 return;
299 }
300
301 /* In IPA mode we are not interested in checking actual loads and stores;
302 they will be processed at propagation time using ipa_ref. */
303 if (ipa)
304 return;
305
306 /* Since we have dealt with the locals and params cases above, if we
307 are CHECKING_WRITE, this cannot be a pure or constant
308 function. */
309 if (checking_write)
310 {
311 local->pure_const_state = IPA_NEITHER;
312 if (dump_file)
313 fprintf (dump_file, " static/global memory write is not const/pure\n");
314 return;
315 }
316
317 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
318 {
319 /* Readonly reads are safe. */
320 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
321 return; /* Read of a constant, do not change the function state. */
322 else
323 {
324 if (dump_file)
325 fprintf (dump_file, " global memory read is not const\n");
326 /* Just a regular read. */
327 if (local->pure_const_state == IPA_CONST)
328 local->pure_const_state = IPA_PURE;
329 }
330 }
331 else
332 {
333 /* Compilation level statics can be read if they are readonly
334 variables. */
335 if (TREE_READONLY (t))
336 return;
337
338 if (dump_file)
339 fprintf (dump_file, " static memory read is not const\n");
340 /* Just a regular read. */
341 if (local->pure_const_state == IPA_CONST)
342 local->pure_const_state = IPA_PURE;
343 }
344 }
345
346
347 /* Check to see if the use (or definition when CHECKING_WRITE is true)
348 variable T is legal in a function that is either pure or const. */
349
350 static inline void
check_op(funct_state local,tree t,bool checking_write)351 check_op (funct_state local, tree t, bool checking_write)
352 {
353 t = get_base_address (t);
354 if (t && TREE_THIS_VOLATILE (t))
355 {
356 local->pure_const_state = IPA_NEITHER;
357 if (dump_file)
358 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
359 return;
360 }
361 else if (t
362 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
363 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
364 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
365 {
366 if (dump_file)
367 fprintf (dump_file, " Indirect ref to local memory is OK\n");
368 return;
369 }
370 else if (checking_write)
371 {
372 local->pure_const_state = IPA_NEITHER;
373 if (dump_file)
374 fprintf (dump_file, " Indirect ref write is not const/pure\n");
375 return;
376 }
377 else
378 {
379 if (dump_file)
380 fprintf (dump_file, " Indirect ref read is not const\n");
381 if (local->pure_const_state == IPA_CONST)
382 local->pure_const_state = IPA_PURE;
383 }
384 }
385
386 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
387
388 static void
state_from_flags(enum pure_const_state_e * state,bool * looping,int flags,bool cannot_lead_to_return)389 state_from_flags (enum pure_const_state_e *state, bool *looping,
390 int flags, bool cannot_lead_to_return)
391 {
392 *looping = false;
393 if (flags & ECF_LOOPING_CONST_OR_PURE)
394 {
395 *looping = true;
396 if (dump_file && (dump_flags & TDF_DETAILS))
397 fprintf (dump_file, " looping");
398 }
399 if (flags & ECF_CONST)
400 {
401 *state = IPA_CONST;
402 if (dump_file && (dump_flags & TDF_DETAILS))
403 fprintf (dump_file, " const\n");
404 }
405 else if (flags & ECF_PURE)
406 {
407 *state = IPA_PURE;
408 if (dump_file && (dump_flags & TDF_DETAILS))
409 fprintf (dump_file, " pure\n");
410 }
411 else if (cannot_lead_to_return)
412 {
413 *state = IPA_PURE;
414 *looping = true;
415 if (dump_file && (dump_flags & TDF_DETAILS))
416 fprintf (dump_file, " ignoring side effects->pure looping\n");
417 }
418 else
419 {
420 if (dump_file && (dump_flags & TDF_DETAILS))
421 fprintf (dump_file, " neither\n");
422 *state = IPA_NEITHER;
423 *looping = true;
424 }
425 }
426
427 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
428 into STATE and LOOPING better of the two variants.
429 Be sure to merge looping correctly. IPA_NEITHER functions
430 have looping 0 even if they don't have to return. */
431
432 static inline void
better_state(enum pure_const_state_e * state,bool * looping,enum pure_const_state_e state2,bool looping2)433 better_state (enum pure_const_state_e *state, bool *looping,
434 enum pure_const_state_e state2, bool looping2)
435 {
436 if (state2 < *state)
437 {
438 if (*state == IPA_NEITHER)
439 *looping = looping2;
440 else
441 *looping = MIN (*looping, looping2);
442 *state = state2;
443 }
444 else if (state2 != IPA_NEITHER)
445 *looping = MIN (*looping, looping2);
446 }
447
448 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
449 into STATE and LOOPING worse of the two variants. */
450
451 static inline void
worse_state(enum pure_const_state_e * state,bool * looping,enum pure_const_state_e state2,bool looping2)452 worse_state (enum pure_const_state_e *state, bool *looping,
453 enum pure_const_state_e state2, bool looping2)
454 {
455 *state = MAX (*state, state2);
456 *looping = MAX (*looping, looping2);
457 }
458
459 /* Recognize special cases of builtins that are by themselves not pure or const
460 but function using them is. */
461 static bool
special_builtin_state(enum pure_const_state_e * state,bool * looping,tree callee)462 special_builtin_state (enum pure_const_state_e *state, bool *looping,
463 tree callee)
464 {
465 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
466 switch (DECL_FUNCTION_CODE (callee))
467 {
468 case BUILT_IN_RETURN:
469 case BUILT_IN_UNREACHABLE:
470 case BUILT_IN_ALLOCA:
471 case BUILT_IN_ALLOCA_WITH_ALIGN:
472 case BUILT_IN_STACK_SAVE:
473 case BUILT_IN_STACK_RESTORE:
474 case BUILT_IN_EH_POINTER:
475 case BUILT_IN_EH_FILTER:
476 case BUILT_IN_UNWIND_RESUME:
477 case BUILT_IN_CXA_END_CLEANUP:
478 case BUILT_IN_EH_COPY_VALUES:
479 case BUILT_IN_FRAME_ADDRESS:
480 case BUILT_IN_APPLY:
481 case BUILT_IN_APPLY_ARGS:
482 *looping = false;
483 *state = IPA_CONST;
484 return true;
485 case BUILT_IN_PREFETCH:
486 *looping = true;
487 *state = IPA_CONST;
488 return true;
489 default:
490 break;
491 }
492 return false;
493 }
494
495 /* Check the parameters of a function call to CALL_EXPR to see if
496 there are any references in the parameters that are not allowed for
497 pure or const functions. Also check to see if this is either an
498 indirect call, a call outside the compilation unit, or has special
499 attributes that may also effect the purity. The CALL_EXPR node for
500 the entire call expression. */
501
502 static void
check_call(funct_state local,gcall * call,bool ipa)503 check_call (funct_state local, gcall *call, bool ipa)
504 {
505 int flags = gimple_call_flags (call);
506 tree callee_t = gimple_call_fndecl (call);
507 bool possibly_throws = stmt_could_throw_p (call);
508 bool possibly_throws_externally = (possibly_throws
509 && stmt_can_throw_external (call));
510
511 if (possibly_throws)
512 {
513 unsigned int i;
514 for (i = 0; i < gimple_num_ops (call); i++)
515 if (gimple_op (call, i)
516 && tree_could_throw_p (gimple_op (call, i)))
517 {
518 if (possibly_throws && cfun->can_throw_non_call_exceptions)
519 {
520 if (dump_file)
521 fprintf (dump_file, " operand can throw; looping\n");
522 local->looping = true;
523 }
524 if (possibly_throws_externally)
525 {
526 if (dump_file)
527 fprintf (dump_file, " operand can throw externally\n");
528 local->can_throw = true;
529 }
530 }
531 }
532
533 /* The const and pure flags are set by a variety of places in the
534 compiler (including here). If someone has already set the flags
535 for the callee, (such as for some of the builtins) we will use
536 them, otherwise we will compute our own information.
537
538 Const and pure functions have less clobber effects than other
539 functions so we process these first. Otherwise if it is a call
540 outside the compilation unit or an indirect call we punt. This
541 leaves local calls which will be processed by following the call
542 graph. */
543 if (callee_t)
544 {
545 enum pure_const_state_e call_state;
546 bool call_looping;
547
548 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
549 && !nonfreeing_call_p (call))
550 local->can_free = true;
551
552 if (special_builtin_state (&call_state, &call_looping, callee_t))
553 {
554 worse_state (&local->pure_const_state, &local->looping,
555 call_state, call_looping);
556 return;
557 }
558 /* When bad things happen to bad functions, they cannot be const
559 or pure. */
560 if (setjmp_call_p (callee_t))
561 {
562 if (dump_file)
563 fprintf (dump_file, " setjmp is not const/pure\n");
564 local->looping = true;
565 local->pure_const_state = IPA_NEITHER;
566 }
567
568 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
569 switch (DECL_FUNCTION_CODE (callee_t))
570 {
571 case BUILT_IN_LONGJMP:
572 case BUILT_IN_NONLOCAL_GOTO:
573 if (dump_file)
574 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
575 local->pure_const_state = IPA_NEITHER;
576 local->looping = true;
577 break;
578 default:
579 break;
580 }
581 }
582 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
583 local->can_free = true;
584
585 /* When not in IPA mode, we can still handle self recursion. */
586 if (!ipa && callee_t
587 && recursive_call_p (current_function_decl, callee_t))
588 {
589 if (dump_file)
590 fprintf (dump_file, " Recursive call can loop.\n");
591 local->looping = true;
592 }
593 /* Either callee is unknown or we are doing local analysis.
594 Look to see if there are any bits available for the callee (such as by
595 declaration or because it is builtin) and process solely on the basis of
596 those bits. */
597 else if (!ipa)
598 {
599 enum pure_const_state_e call_state;
600 bool call_looping;
601 if (possibly_throws && cfun->can_throw_non_call_exceptions)
602 {
603 if (dump_file)
604 fprintf (dump_file, " can throw; looping\n");
605 local->looping = true;
606 }
607 if (possibly_throws_externally)
608 {
609 if (dump_file)
610 {
611 fprintf (dump_file, " can throw externally to lp %i\n",
612 lookup_stmt_eh_lp (call));
613 if (callee_t)
614 fprintf (dump_file, " callee:%s\n",
615 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
616 }
617 local->can_throw = true;
618 }
619 if (dump_file && (dump_flags & TDF_DETAILS))
620 fprintf (dump_file, " checking flags for call:");
621 state_from_flags (&call_state, &call_looping, flags,
622 ((flags & (ECF_NORETURN | ECF_NOTHROW))
623 == (ECF_NORETURN | ECF_NOTHROW))
624 || (!flag_exceptions && (flags & ECF_NORETURN)));
625 worse_state (&local->pure_const_state, &local->looping,
626 call_state, call_looping);
627 }
628 /* Direct functions calls are handled by IPA propagation. */
629 }
630
631 /* Wrapper around check_decl for loads in local more. */
632
633 static bool
check_load(gimple *,tree op,tree,void * data)634 check_load (gimple *, tree op, tree, void *data)
635 {
636 if (DECL_P (op))
637 check_decl ((funct_state)data, op, false, false);
638 else
639 check_op ((funct_state)data, op, false);
640 return false;
641 }
642
643 /* Wrapper around check_decl for stores in local more. */
644
645 static bool
check_store(gimple *,tree op,tree,void * data)646 check_store (gimple *, tree op, tree, void *data)
647 {
648 if (DECL_P (op))
649 check_decl ((funct_state)data, op, true, false);
650 else
651 check_op ((funct_state)data, op, true);
652 return false;
653 }
654
655 /* Wrapper around check_decl for loads in ipa mode. */
656
657 static bool
check_ipa_load(gimple *,tree op,tree,void * data)658 check_ipa_load (gimple *, tree op, tree, void *data)
659 {
660 if (DECL_P (op))
661 check_decl ((funct_state)data, op, false, true);
662 else
663 check_op ((funct_state)data, op, false);
664 return false;
665 }
666
667 /* Wrapper around check_decl for stores in ipa mode. */
668
669 static bool
check_ipa_store(gimple *,tree op,tree,void * data)670 check_ipa_store (gimple *, tree op, tree, void *data)
671 {
672 if (DECL_P (op))
673 check_decl ((funct_state)data, op, true, true);
674 else
675 check_op ((funct_state)data, op, true);
676 return false;
677 }
678
679 /* Look into pointer pointed to by GSIP and figure out what interesting side
680 effects it has. */
681 static void
check_stmt(gimple_stmt_iterator * gsip,funct_state local,bool ipa)682 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
683 {
684 gimple *stmt = gsi_stmt (*gsip);
685
686 if (is_gimple_debug (stmt))
687 return;
688
689 /* Do consider clobber as side effects before IPA, so we rather inline
690 C++ destructors and keep clobber semantics than eliminate them.
691
692 TODO: We may get smarter during early optimizations on these and let
693 functions containing only clobbers to be optimized more. This is a common
694 case of C++ destructors. */
695
696 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
697 return;
698
699 if (dump_file)
700 {
701 fprintf (dump_file, " scanning: ");
702 print_gimple_stmt (dump_file, stmt, 0, 0);
703 }
704
705 if (gimple_has_volatile_ops (stmt)
706 && !gimple_clobber_p (stmt))
707 {
708 local->pure_const_state = IPA_NEITHER;
709 if (dump_file)
710 fprintf (dump_file, " Volatile stmt is not const/pure\n");
711 }
712
713 /* Look for loads and stores. */
714 walk_stmt_load_store_ops (stmt, local,
715 ipa ? check_ipa_load : check_load,
716 ipa ? check_ipa_store : check_store);
717
718 if (gimple_code (stmt) != GIMPLE_CALL
719 && stmt_could_throw_p (stmt))
720 {
721 if (cfun->can_throw_non_call_exceptions)
722 {
723 if (dump_file)
724 fprintf (dump_file, " can throw; looping\n");
725 local->looping = true;
726 }
727 if (stmt_can_throw_external (stmt))
728 {
729 if (dump_file)
730 fprintf (dump_file, " can throw externally\n");
731 local->can_throw = true;
732 }
733 else
734 if (dump_file)
735 fprintf (dump_file, " can throw\n");
736 }
737 switch (gimple_code (stmt))
738 {
739 case GIMPLE_CALL:
740 check_call (local, as_a <gcall *> (stmt), ipa);
741 break;
742 case GIMPLE_LABEL:
743 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
744 /* Target of long jump. */
745 {
746 if (dump_file)
747 fprintf (dump_file, " nonlocal label is not const/pure\n");
748 local->pure_const_state = IPA_NEITHER;
749 }
750 break;
751 case GIMPLE_ASM:
752 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
753 {
754 if (dump_file)
755 fprintf (dump_file, " memory asm clobber is not const/pure\n");
756 /* Abandon all hope, ye who enter here. */
757 local->pure_const_state = IPA_NEITHER;
758 local->can_free = true;
759 }
760 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
761 {
762 if (dump_file)
763 fprintf (dump_file, " volatile is not const/pure\n");
764 /* Abandon all hope, ye who enter here. */
765 local->pure_const_state = IPA_NEITHER;
766 local->looping = true;
767 local->can_free = true;
768 }
769 return;
770 default:
771 break;
772 }
773 }
774
775
776 /* This is the main routine for finding the reference patterns for
777 global variables within a function FN. */
778
779 static funct_state
analyze_function(struct cgraph_node * fn,bool ipa)780 analyze_function (struct cgraph_node *fn, bool ipa)
781 {
782 tree decl = fn->decl;
783 funct_state l;
784 basic_block this_block;
785
786 l = XCNEW (struct funct_state_d);
787 l->pure_const_state = IPA_CONST;
788 l->state_previously_known = IPA_NEITHER;
789 l->looping_previously_known = true;
790 l->looping = false;
791 l->can_throw = false;
792 l->can_free = false;
793 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
794 flags_from_decl_or_type (fn->decl),
795 fn->cannot_return_p ());
796
797 if (fn->thunk.thunk_p || fn->alias)
798 {
799 /* Thunk gets propagated through, so nothing interesting happens. */
800 gcc_assert (ipa);
801 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
802 l->pure_const_state = IPA_NEITHER;
803 return l;
804 }
805
806 if (dump_file)
807 {
808 fprintf (dump_file, "\n\n local analysis of %s\n ",
809 fn->name ());
810 }
811
812 push_cfun (DECL_STRUCT_FUNCTION (decl));
813
814 FOR_EACH_BB_FN (this_block, cfun)
815 {
816 gimple_stmt_iterator gsi;
817 struct walk_stmt_info wi;
818
819 memset (&wi, 0, sizeof (wi));
820 for (gsi = gsi_start_bb (this_block);
821 !gsi_end_p (gsi);
822 gsi_next (&gsi))
823 {
824 check_stmt (&gsi, l, ipa);
825 if (l->pure_const_state == IPA_NEITHER
826 && l->looping
827 && l->can_throw
828 && l->can_free)
829 goto end;
830 }
831 }
832
833 end:
834 if (l->pure_const_state != IPA_NEITHER)
835 {
836 /* Const functions cannot have back edges (an
837 indication of possible infinite loop side
838 effect. */
839 if (mark_dfs_back_edges ())
840 {
841 /* Preheaders are needed for SCEV to work.
842 Simple latches and recorded exits improve chances that loop will
843 proved to be finite in testcases such as in loop-15.c
844 and loop-24.c */
845 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
846 | LOOPS_HAVE_SIMPLE_LATCHES
847 | LOOPS_HAVE_RECORDED_EXITS);
848 if (dump_file && (dump_flags & TDF_DETAILS))
849 flow_loops_dump (dump_file, NULL, 0);
850 if (mark_irreducible_loops ())
851 {
852 if (dump_file)
853 fprintf (dump_file, " has irreducible loops\n");
854 l->looping = true;
855 }
856 else
857 {
858 struct loop *loop;
859 scev_initialize ();
860 FOR_EACH_LOOP (loop, 0)
861 if (!finite_loop_p (loop))
862 {
863 if (dump_file)
864 fprintf (dump_file, " can not prove finiteness of "
865 "loop %i\n", loop->num);
866 l->looping =true;
867 break;
868 }
869 scev_finalize ();
870 }
871 loop_optimizer_finalize ();
872 }
873 }
874
875 if (dump_file && (dump_flags & TDF_DETAILS))
876 fprintf (dump_file, " checking previously known:");
877
878 better_state (&l->pure_const_state, &l->looping,
879 l->state_previously_known,
880 l->looping_previously_known);
881 if (TREE_NOTHROW (decl))
882 l->can_throw = false;
883
884 pop_cfun ();
885 if (dump_file)
886 {
887 if (l->looping)
888 fprintf (dump_file, "Function is locally looping.\n");
889 if (l->can_throw)
890 fprintf (dump_file, "Function is locally throwing.\n");
891 if (l->pure_const_state == IPA_CONST)
892 fprintf (dump_file, "Function is locally const.\n");
893 if (l->pure_const_state == IPA_PURE)
894 fprintf (dump_file, "Function is locally pure.\n");
895 if (l->can_free)
896 fprintf (dump_file, "Function can locally free.\n");
897 }
898 return l;
899 }
900
901 /* Called when new function is inserted to callgraph late. */
902 static void
add_new_function(struct cgraph_node * node,void * data ATTRIBUTE_UNUSED)903 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
904 {
905 if (node->get_availability () < AVAIL_INTERPOSABLE)
906 return;
907 /* There are some shared nodes, in particular the initializers on
908 static declarations. We do not need to scan them more than once
909 since all we would be interested in are the addressof
910 operations. */
911 if (node->get_availability () > AVAIL_INTERPOSABLE
912 && opt_for_fn (node->decl, flag_ipa_pure_const))
913 set_function_state (node, analyze_function (node, true));
914 }
915
916 /* Called when new clone is inserted to callgraph late. */
917
918 static void
duplicate_node_data(struct cgraph_node * src,struct cgraph_node * dst,void * data ATTRIBUTE_UNUSED)919 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
920 void *data ATTRIBUTE_UNUSED)
921 {
922 if (has_function_state (src))
923 {
924 funct_state l = XNEW (struct funct_state_d);
925 gcc_assert (!has_function_state (dst));
926 memcpy (l, get_function_state (src), sizeof (*l));
927 set_function_state (dst, l);
928 }
929 }
930
931 /* Called when new clone is inserted to callgraph late. */
932
933 static void
remove_node_data(struct cgraph_node * node,void * data ATTRIBUTE_UNUSED)934 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
935 {
936 if (has_function_state (node))
937 {
938 funct_state l = get_function_state (node);
939 if (l != &varying_state)
940 free (l);
941 set_function_state (node, NULL);
942 }
943 }
944
945
946 void
947 pass_ipa_pure_const::
register_hooks(void)948 register_hooks (void)
949 {
950 if (init_p)
951 return;
952
953 init_p = true;
954
955 node_removal_hook_holder =
956 symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
957 node_duplication_hook_holder =
958 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
959 function_insertion_hook_holder =
960 symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
961 }
962
963
964 /* Analyze each function in the cgraph to see if it is locally PURE or
965 CONST. */
966
967 static void
pure_const_generate_summary(void)968 pure_const_generate_summary (void)
969 {
970 struct cgraph_node *node;
971
972 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
973 pass->register_hooks ();
974
975 /* Process all of the functions.
976
977 We process AVAIL_INTERPOSABLE functions. We can not use the results
978 by default, but the info can be used at LTO with -fwhole-program or
979 when function got cloned and the clone is AVAILABLE. */
980
981 FOR_EACH_DEFINED_FUNCTION (node)
982 if (node->get_availability () >= AVAIL_INTERPOSABLE
983 && opt_for_fn (node->decl, flag_ipa_pure_const))
984 set_function_state (node, analyze_function (node, true));
985 }
986
987
988 /* Serialize the ipa info for lto. */
989
990 static void
pure_const_write_summary(void)991 pure_const_write_summary (void)
992 {
993 struct cgraph_node *node;
994 struct lto_simple_output_block *ob
995 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
996 unsigned int count = 0;
997 lto_symtab_encoder_iterator lsei;
998 lto_symtab_encoder_t encoder;
999
1000 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1001
1002 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1003 lsei_next_function_in_partition (&lsei))
1004 {
1005 node = lsei_cgraph_node (lsei);
1006 if (node->definition && has_function_state (node))
1007 count++;
1008 }
1009
1010 streamer_write_uhwi_stream (ob->main_stream, count);
1011
1012 /* Process all of the functions. */
1013 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1014 lsei_next_function_in_partition (&lsei))
1015 {
1016 node = lsei_cgraph_node (lsei);
1017 if (node->definition && has_function_state (node))
1018 {
1019 struct bitpack_d bp;
1020 funct_state fs;
1021 int node_ref;
1022 lto_symtab_encoder_t encoder;
1023
1024 fs = get_function_state (node);
1025
1026 encoder = ob->decl_state->symtab_node_encoder;
1027 node_ref = lto_symtab_encoder_encode (encoder, node);
1028 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1029
1030 /* Note that flags will need to be read in the opposite
1031 order as we are pushing the bitflags into FLAGS. */
1032 bp = bitpack_create (ob->main_stream);
1033 bp_pack_value (&bp, fs->pure_const_state, 2);
1034 bp_pack_value (&bp, fs->state_previously_known, 2);
1035 bp_pack_value (&bp, fs->looping_previously_known, 1);
1036 bp_pack_value (&bp, fs->looping, 1);
1037 bp_pack_value (&bp, fs->can_throw, 1);
1038 bp_pack_value (&bp, fs->can_free, 1);
1039 streamer_write_bitpack (&bp);
1040 }
1041 }
1042
1043 lto_destroy_simple_output_block (ob);
1044 }
1045
1046
1047 /* Deserialize the ipa info for lto. */
1048
1049 static void
pure_const_read_summary(void)1050 pure_const_read_summary (void)
1051 {
1052 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1053 struct lto_file_decl_data *file_data;
1054 unsigned int j = 0;
1055
1056 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1057 pass->register_hooks ();
1058
1059 while ((file_data = file_data_vec[j++]))
1060 {
1061 const char *data;
1062 size_t len;
1063 struct lto_input_block *ib
1064 = lto_create_simple_input_block (file_data,
1065 LTO_section_ipa_pure_const,
1066 &data, &len);
1067 if (ib)
1068 {
1069 unsigned int i;
1070 unsigned int count = streamer_read_uhwi (ib);
1071
1072 for (i = 0; i < count; i++)
1073 {
1074 unsigned int index;
1075 struct cgraph_node *node;
1076 struct bitpack_d bp;
1077 funct_state fs;
1078 lto_symtab_encoder_t encoder;
1079
1080 fs = XCNEW (struct funct_state_d);
1081 index = streamer_read_uhwi (ib);
1082 encoder = file_data->symtab_node_encoder;
1083 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1084 index));
1085 set_function_state (node, fs);
1086
1087 /* Note that the flags must be read in the opposite
1088 order in which they were written (the bitflags were
1089 pushed into FLAGS). */
1090 bp = streamer_read_bitpack (ib);
1091 fs->pure_const_state
1092 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1093 fs->state_previously_known
1094 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1095 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1096 fs->looping = bp_unpack_value (&bp, 1);
1097 fs->can_throw = bp_unpack_value (&bp, 1);
1098 fs->can_free = bp_unpack_value (&bp, 1);
1099 if (dump_file)
1100 {
1101 int flags = flags_from_decl_or_type (node->decl);
1102 fprintf (dump_file, "Read info for %s/%i ",
1103 node->name (),
1104 node->order);
1105 if (flags & ECF_CONST)
1106 fprintf (dump_file, " const");
1107 if (flags & ECF_PURE)
1108 fprintf (dump_file, " pure");
1109 if (flags & ECF_NOTHROW)
1110 fprintf (dump_file, " nothrow");
1111 fprintf (dump_file, "\n pure const state: %s\n",
1112 pure_const_names[fs->pure_const_state]);
1113 fprintf (dump_file, " previously known state: %s\n",
1114 pure_const_names[fs->looping_previously_known]);
1115 if (fs->looping)
1116 fprintf (dump_file," function is locally looping\n");
1117 if (fs->looping_previously_known)
1118 fprintf (dump_file," function is previously known looping\n");
1119 if (fs->can_throw)
1120 fprintf (dump_file," function is locally throwing\n");
1121 if (fs->can_free)
1122 fprintf (dump_file," function can locally free\n");
1123 }
1124 }
1125
1126 lto_destroy_simple_input_block (file_data,
1127 LTO_section_ipa_pure_const,
1128 ib, data, len);
1129 }
1130 }
1131 }
1132
1133 /* We only propagate across edges that can throw externally and their callee
1134 is not interposable. */
1135
1136 static bool
ignore_edge_for_nothrow(struct cgraph_edge * e)1137 ignore_edge_for_nothrow (struct cgraph_edge *e)
1138 {
1139 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1140 return true;
1141
1142 enum availability avail;
1143 cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail);
1144 return (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl));
1145 }
1146
1147 /* Return true if NODE is self recursive function.
1148 Indirectly recursive functions appears as non-trivial strongly
1149 connected components, so we need to care about self recursion
1150 only. */
1151
1152 static bool
self_recursive_p(struct cgraph_node * node)1153 self_recursive_p (struct cgraph_node *node)
1154 {
1155 struct cgraph_edge *e;
1156 for (e = node->callees; e; e = e->next_callee)
1157 if (e->callee->function_symbol () == node)
1158 return true;
1159 return false;
1160 }
1161
1162 /* Return true if N is cdtor that is not const or pure. In this case we may
1163 need to remove unreachable function if it is marked const/pure. */
1164
1165 static bool
cdtor_p(cgraph_node * n,void *)1166 cdtor_p (cgraph_node *n, void *)
1167 {
1168 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1169 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1170 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1171 return false;
1172 }
1173
1174 /* We only propagate across edges with non-interposable callee. */
1175
1176 static bool
ignore_edge_for_pure_const(struct cgraph_edge * e)1177 ignore_edge_for_pure_const (struct cgraph_edge *e)
1178 {
1179 enum availability avail;
1180 e->callee->function_or_virtual_thunk_symbol (&avail);
1181 return (avail <= AVAIL_INTERPOSABLE);
1182 }
1183
1184
1185 /* Produce transitive closure over the callgraph and compute pure/const
1186 attributes. */
1187
1188 static bool
propagate_pure_const(void)1189 propagate_pure_const (void)
1190 {
1191 struct cgraph_node *node;
1192 struct cgraph_node *w;
1193 struct cgraph_node **order =
1194 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1195 int order_pos;
1196 int i;
1197 struct ipa_dfs_info * w_info;
1198 bool remove_p = false;
1199
1200 order_pos = ipa_reduced_postorder (order, true, false,
1201 ignore_edge_for_pure_const);
1202 if (dump_file)
1203 {
1204 cgraph_node::dump_cgraph (dump_file);
1205 ipa_print_order (dump_file, "reduced", order, order_pos);
1206 }
1207
1208 /* Propagate the local information through the call graph to produce
1209 the global information. All the nodes within a cycle will have
1210 the same info so we collapse cycles first. Then we can do the
1211 propagation in one pass from the leaves to the roots. */
1212 for (i = 0; i < order_pos; i++ )
1213 {
1214 enum pure_const_state_e pure_const_state = IPA_CONST;
1215 bool looping = false;
1216 int count = 0;
1217 node = order[i];
1218
1219 if (node->alias)
1220 continue;
1221
1222 if (dump_file && (dump_flags & TDF_DETAILS))
1223 fprintf (dump_file, "Starting cycle\n");
1224
1225 /* Find the worst state for any node in the cycle. */
1226 w = node;
1227 while (w && pure_const_state != IPA_NEITHER)
1228 {
1229 struct cgraph_edge *e;
1230 struct cgraph_edge *ie;
1231 int i;
1232 struct ipa_ref *ref = NULL;
1233
1234 funct_state w_l = get_function_state (w);
1235 if (dump_file && (dump_flags & TDF_DETAILS))
1236 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
1237 w->name (),
1238 w->order,
1239 pure_const_names[w_l->pure_const_state],
1240 w_l->looping);
1241
1242 /* First merge in function body properties. */
1243 worse_state (&pure_const_state, &looping,
1244 w_l->pure_const_state, w_l->looping);
1245 if (pure_const_state == IPA_NEITHER)
1246 break;
1247
1248 /* For interposable nodes we can not assume anything. */
1249 if (w->get_availability () == AVAIL_INTERPOSABLE)
1250 {
1251 worse_state (&pure_const_state, &looping,
1252 w_l->state_previously_known,
1253 w_l->looping_previously_known);
1254 if (dump_file && (dump_flags & TDF_DETAILS))
1255 {
1256 fprintf (dump_file,
1257 " Interposable. state %s looping %i\n",
1258 pure_const_names[w_l->state_previously_known],
1259 w_l->looping_previously_known);
1260 }
1261 break;
1262 }
1263
1264 count++;
1265
1266 /* We consider recursive cycles as possibly infinite.
1267 This might be relaxed since infinite recursion leads to stack
1268 overflow. */
1269 if (count > 1)
1270 looping = true;
1271
1272 /* Now walk the edges and merge in callee properties. */
1273 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1274 e = e->next_callee)
1275 {
1276 enum availability avail;
1277 struct cgraph_node *y = e->callee->
1278 function_or_virtual_thunk_symbol (&avail);
1279 enum pure_const_state_e edge_state = IPA_CONST;
1280 bool edge_looping = false;
1281
1282 if (dump_file && (dump_flags & TDF_DETAILS))
1283 {
1284 fprintf (dump_file,
1285 " Call to %s/%i",
1286 e->callee->name (),
1287 e->callee->order);
1288 }
1289 if (avail > AVAIL_INTERPOSABLE)
1290 {
1291 funct_state y_l = get_function_state (y);
1292 if (dump_file && (dump_flags & TDF_DETAILS))
1293 {
1294 fprintf (dump_file,
1295 " state:%s looping:%i\n",
1296 pure_const_names[y_l->pure_const_state],
1297 y_l->looping);
1298 }
1299 if (y_l->pure_const_state > IPA_PURE
1300 && e->cannot_lead_to_return_p ())
1301 {
1302 if (dump_file && (dump_flags & TDF_DETAILS))
1303 fprintf (dump_file,
1304 " Ignoring side effects"
1305 " -> pure, looping\n");
1306 edge_state = IPA_PURE;
1307 edge_looping = true;
1308 }
1309 else
1310 {
1311 edge_state = y_l->pure_const_state;
1312 edge_looping = y_l->looping;
1313 }
1314 }
1315 else if (special_builtin_state (&edge_state, &edge_looping,
1316 y->decl))
1317 ;
1318 else
1319 state_from_flags (&edge_state, &edge_looping,
1320 flags_from_decl_or_type (y->decl),
1321 e->cannot_lead_to_return_p ());
1322
1323 /* Merge the results with what we already know. */
1324 better_state (&edge_state, &edge_looping,
1325 w_l->state_previously_known,
1326 w_l->looping_previously_known);
1327 worse_state (&pure_const_state, &looping,
1328 edge_state, edge_looping);
1329 if (pure_const_state == IPA_NEITHER)
1330 break;
1331 }
1332
1333 /* Now process the indirect call. */
1334 for (ie = w->indirect_calls;
1335 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1336 {
1337 enum pure_const_state_e edge_state = IPA_CONST;
1338 bool edge_looping = false;
1339
1340 if (dump_file && (dump_flags & TDF_DETAILS))
1341 fprintf (dump_file, " Indirect call");
1342 state_from_flags (&edge_state, &edge_looping,
1343 ie->indirect_info->ecf_flags,
1344 ie->cannot_lead_to_return_p ());
1345 /* Merge the results with what we already know. */
1346 better_state (&edge_state, &edge_looping,
1347 w_l->state_previously_known,
1348 w_l->looping_previously_known);
1349 worse_state (&pure_const_state, &looping,
1350 edge_state, edge_looping);
1351 if (pure_const_state == IPA_NEITHER)
1352 break;
1353 }
1354
1355 /* And finally all loads and stores. */
1356 for (i = 0; w->iterate_reference (i, ref)
1357 && pure_const_state != IPA_NEITHER; i++)
1358 {
1359 enum pure_const_state_e ref_state = IPA_CONST;
1360 bool ref_looping = false;
1361 switch (ref->use)
1362 {
1363 case IPA_REF_LOAD:
1364 /* readonly reads are safe. */
1365 if (TREE_READONLY (ref->referred->decl))
1366 break;
1367 if (dump_file && (dump_flags & TDF_DETAILS))
1368 fprintf (dump_file, " nonreadonly global var read\n");
1369 ref_state = IPA_PURE;
1370 break;
1371 case IPA_REF_STORE:
1372 if (ref->cannot_lead_to_return ())
1373 break;
1374 ref_state = IPA_NEITHER;
1375 if (dump_file && (dump_flags & TDF_DETAILS))
1376 fprintf (dump_file, " global var write\n");
1377 break;
1378 case IPA_REF_ADDR:
1379 case IPA_REF_CHKP:
1380 break;
1381 default:
1382 gcc_unreachable ();
1383 }
1384 better_state (&ref_state, &ref_looping,
1385 w_l->state_previously_known,
1386 w_l->looping_previously_known);
1387 worse_state (&pure_const_state, &looping,
1388 ref_state, ref_looping);
1389 if (pure_const_state == IPA_NEITHER)
1390 break;
1391 }
1392 w_info = (struct ipa_dfs_info *) w->aux;
1393 w = w_info->next_cycle;
1394 }
1395 if (dump_file && (dump_flags & TDF_DETAILS))
1396 fprintf (dump_file, "Result %s looping %i\n",
1397 pure_const_names [pure_const_state],
1398 looping);
1399
1400 /* Find the worst state of can_free for any node in the cycle. */
1401 bool can_free = false;
1402 w = node;
1403 while (w && !can_free)
1404 {
1405 struct cgraph_edge *e;
1406 funct_state w_l = get_function_state (w);
1407
1408 if (w_l->can_free
1409 || w->get_availability () == AVAIL_INTERPOSABLE
1410 || w->indirect_calls)
1411 can_free = true;
1412
1413 for (e = w->callees; e && !can_free; e = e->next_callee)
1414 {
1415 enum availability avail;
1416 struct cgraph_node *y = e->callee->
1417 function_or_virtual_thunk_symbol (&avail);
1418
1419 if (avail > AVAIL_INTERPOSABLE)
1420 can_free = get_function_state (y)->can_free;
1421 else
1422 can_free = true;
1423 }
1424 w_info = (struct ipa_dfs_info *) w->aux;
1425 w = w_info->next_cycle;
1426 }
1427
1428 /* Copy back the region's pure_const_state which is shared by
1429 all nodes in the region. */
1430 w = node;
1431 while (w)
1432 {
1433 funct_state w_l = get_function_state (w);
1434 enum pure_const_state_e this_state = pure_const_state;
1435 bool this_looping = looping;
1436
1437 w_l->can_free = can_free;
1438 w->nonfreeing_fn = !can_free;
1439 if (!can_free && dump_file)
1440 fprintf (dump_file, "Function found not to call free: %s\n",
1441 w->name ());
1442
1443 if (w_l->state_previously_known != IPA_NEITHER
1444 && this_state > w_l->state_previously_known)
1445 {
1446 this_state = w_l->state_previously_known;
1447 if (this_state == IPA_NEITHER)
1448 this_looping = w_l->looping_previously_known;
1449 }
1450 if (!this_looping && self_recursive_p (w))
1451 this_looping = true;
1452 if (!w_l->looping_previously_known)
1453 this_looping = false;
1454
1455 /* All nodes within a cycle share the same info. */
1456 w_l->pure_const_state = this_state;
1457 w_l->looping = this_looping;
1458
1459 /* Inline clones share declaration with their offline copies;
1460 do not modify their declarations since the offline copy may
1461 be different. */
1462 if (!w->global.inlined_to)
1463 switch (this_state)
1464 {
1465 case IPA_CONST:
1466 if (!TREE_READONLY (w->decl))
1467 {
1468 warn_function_const (w->decl, !this_looping);
1469 if (dump_file)
1470 fprintf (dump_file, "Function found to be %sconst: %s\n",
1471 this_looping ? "looping " : "",
1472 w->name ());
1473 }
1474 remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
1475 NULL, true);
1476 w->set_const_flag (true, this_looping);
1477 break;
1478
1479 case IPA_PURE:
1480 if (!DECL_PURE_P (w->decl))
1481 {
1482 warn_function_pure (w->decl, !this_looping);
1483 if (dump_file)
1484 fprintf (dump_file, "Function found to be %spure: %s\n",
1485 this_looping ? "looping " : "",
1486 w->name ());
1487 }
1488 remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
1489 NULL, true);
1490 w->set_pure_flag (true, this_looping);
1491 break;
1492
1493 default:
1494 break;
1495 }
1496 w_info = (struct ipa_dfs_info *) w->aux;
1497 w = w_info->next_cycle;
1498 }
1499 }
1500
1501 ipa_free_postorder_info ();
1502 free (order);
1503 return remove_p;
1504 }
1505
1506 /* Produce transitive closure over the callgraph and compute nothrow
1507 attributes. */
1508
1509 static void
propagate_nothrow(void)1510 propagate_nothrow (void)
1511 {
1512 struct cgraph_node *node;
1513 struct cgraph_node *w;
1514 struct cgraph_node **order =
1515 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1516 int order_pos;
1517 int i;
1518 struct ipa_dfs_info * w_info;
1519
1520 order_pos = ipa_reduced_postorder (order, true, false,
1521 ignore_edge_for_nothrow);
1522 if (dump_file)
1523 {
1524 cgraph_node::dump_cgraph (dump_file);
1525 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1526 }
1527
1528 /* Propagate the local information through the call graph to produce
1529 the global information. All the nodes within a cycle will have
1530 the same info so we collapse cycles first. Then we can do the
1531 propagation in one pass from the leaves to the roots. */
1532 for (i = 0; i < order_pos; i++ )
1533 {
1534 bool can_throw = false;
1535 node = order[i];
1536
1537 if (node->alias)
1538 continue;
1539
1540 /* Find the worst state for any node in the cycle. */
1541 w = node;
1542 while (w && !can_throw)
1543 {
1544 struct cgraph_edge *e, *ie;
1545
1546 if (!TREE_NOTHROW (w->decl))
1547 {
1548 funct_state w_l = get_function_state (w);
1549
1550 if (w_l->can_throw
1551 || w->get_availability () == AVAIL_INTERPOSABLE)
1552 can_throw = true;
1553
1554 for (e = w->callees; e && !can_throw; e = e->next_callee)
1555 {
1556 enum availability avail;
1557
1558 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1559 continue;
1560
1561 struct cgraph_node *y = e->callee->
1562 function_or_virtual_thunk_symbol (&avail);
1563
1564 /* We can use info about the callee only if we know it can
1565 not be interposed. */
1566 if (avail <= AVAIL_INTERPOSABLE
1567 || (!TREE_NOTHROW (y->decl)
1568 && get_function_state (y)->can_throw))
1569 can_throw = true;
1570 }
1571 for (ie = w->indirect_calls; ie && !can_throw;
1572 ie = ie->next_callee)
1573 if (ie->can_throw_external
1574 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1575 can_throw = true;
1576 }
1577 w_info = (struct ipa_dfs_info *) w->aux;
1578 w = w_info->next_cycle;
1579 }
1580
1581 /* Copy back the region's pure_const_state which is shared by
1582 all nodes in the region. */
1583 w = node;
1584 while (w)
1585 {
1586 funct_state w_l = get_function_state (w);
1587 if (!can_throw && !TREE_NOTHROW (w->decl))
1588 {
1589 /* Inline clones share declaration with their offline copies;
1590 do not modify their declarations since the offline copy may
1591 be different. */
1592 if (!w->global.inlined_to)
1593 {
1594 w->set_nothrow_flag (true);
1595 if (dump_file)
1596 fprintf (dump_file, "Function found to be nothrow: %s\n",
1597 w->name ());
1598 }
1599 }
1600 else if (can_throw && !TREE_NOTHROW (w->decl))
1601 w_l->can_throw = true;
1602 w_info = (struct ipa_dfs_info *) w->aux;
1603 w = w_info->next_cycle;
1604 }
1605 }
1606
1607 ipa_free_postorder_info ();
1608 free (order);
1609 }
1610
1611
1612 /* Produce the global information by preforming a transitive closure
1613 on the local information that was produced by generate_summary. */
1614
1615 unsigned int
1616 pass_ipa_pure_const::
execute(function *)1617 execute (function *)
1618 {
1619 struct cgraph_node *node;
1620 bool remove_p;
1621
1622 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
1623 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
1624 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
1625
1626 /* Nothrow makes more function to not lead to return and improve
1627 later analysis. */
1628 propagate_nothrow ();
1629 remove_p = propagate_pure_const ();
1630
1631 /* Cleanup. */
1632 FOR_EACH_FUNCTION (node)
1633 if (has_function_state (node))
1634 free (get_function_state (node));
1635 funct_state_vec.release ();
1636 return remove_p ? TODO_remove_functions : 0;
1637 }
1638
1639 static bool
gate_pure_const(void)1640 gate_pure_const (void)
1641 {
1642 return flag_ipa_pure_const || in_lto_p;
1643 }
1644
pass_ipa_pure_const(gcc::context * ctxt)1645 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
1646 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
1647 pure_const_generate_summary, /* generate_summary */
1648 pure_const_write_summary, /* write_summary */
1649 pure_const_read_summary, /* read_summary */
1650 NULL, /* write_optimization_summary */
1651 NULL, /* read_optimization_summary */
1652 NULL, /* stmt_fixup */
1653 0, /* function_transform_todo_flags_start */
1654 NULL, /* function_transform */
1655 NULL), /* variable_transform */
1656 init_p(false),
1657 function_insertion_hook_holder(NULL),
1658 node_duplication_hook_holder(NULL),
1659 node_removal_hook_holder(NULL)
1660 {
1661 }
1662
1663 ipa_opt_pass_d *
make_pass_ipa_pure_const(gcc::context * ctxt)1664 make_pass_ipa_pure_const (gcc::context *ctxt)
1665 {
1666 return new pass_ipa_pure_const (ctxt);
1667 }
1668
1669 /* Return true if function should be skipped for local pure const analysis. */
1670
1671 static bool
skip_function_for_local_pure_const(struct cgraph_node * node)1672 skip_function_for_local_pure_const (struct cgraph_node *node)
1673 {
1674 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1675 we must not promote functions that are called by already processed functions. */
1676
1677 if (function_called_by_processed_nodes_p ())
1678 {
1679 if (dump_file)
1680 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1681 return true;
1682 }
1683 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1684 {
1685 if (dump_file)
1686 fprintf (dump_file, "Function is not available or interposable; not analyzing.\n");
1687 return true;
1688 }
1689 return false;
1690 }
1691
1692 /* Simple local pass for pure const discovery reusing the analysis from
1693 ipa_pure_const. This pass is effective when executed together with
1694 other optimization passes in early optimization pass queue. */
1695
1696 namespace {
1697
1698 const pass_data pass_data_local_pure_const =
1699 {
1700 GIMPLE_PASS, /* type */
1701 "local-pure-const", /* name */
1702 OPTGROUP_NONE, /* optinfo_flags */
1703 TV_IPA_PURE_CONST, /* tv_id */
1704 0, /* properties_required */
1705 0, /* properties_provided */
1706 0, /* properties_destroyed */
1707 0, /* todo_flags_start */
1708 0, /* todo_flags_finish */
1709 };
1710
1711 class pass_local_pure_const : public gimple_opt_pass
1712 {
1713 public:
pass_local_pure_const(gcc::context * ctxt)1714 pass_local_pure_const (gcc::context *ctxt)
1715 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
1716 {}
1717
1718 /* opt_pass methods: */
clone()1719 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
gate(function *)1720 virtual bool gate (function *) { return gate_pure_const (); }
1721 virtual unsigned int execute (function *);
1722
1723 }; // class pass_local_pure_const
1724
1725 unsigned int
execute(function * fun)1726 pass_local_pure_const::execute (function *fun)
1727 {
1728 bool changed = false;
1729 funct_state l;
1730 bool skip;
1731 struct cgraph_node *node;
1732
1733 node = cgraph_node::get (current_function_decl);
1734 skip = skip_function_for_local_pure_const (node);
1735 if (!warn_suggest_attribute_const
1736 && !warn_suggest_attribute_pure
1737 && skip)
1738 return 0;
1739
1740 l = analyze_function (node, false);
1741
1742 /* Do NORETURN discovery. */
1743 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1744 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1745 {
1746 warn_function_noreturn (fun->decl);
1747 if (dump_file)
1748 fprintf (dump_file, "Function found to be noreturn: %s\n",
1749 current_function_name ());
1750
1751 /* Update declaration and reduce profile to executed once. */
1752 TREE_THIS_VOLATILE (current_function_decl) = 1;
1753 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1754 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1755
1756 changed = true;
1757 }
1758
1759 switch (l->pure_const_state)
1760 {
1761 case IPA_CONST:
1762 if (!TREE_READONLY (current_function_decl))
1763 {
1764 warn_function_const (current_function_decl, !l->looping);
1765 if (!skip)
1766 {
1767 node->set_const_flag (true, l->looping);
1768 changed = true;
1769 }
1770 if (dump_file)
1771 fprintf (dump_file, "Function found to be %sconst: %s\n",
1772 l->looping ? "looping " : "",
1773 current_function_name ());
1774 }
1775 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1776 && !l->looping)
1777 {
1778 if (!skip)
1779 {
1780 node->set_const_flag (true, false);
1781 changed = true;
1782 }
1783 if (dump_file)
1784 fprintf (dump_file, "Function found to be non-looping: %s\n",
1785 current_function_name ());
1786 }
1787 break;
1788
1789 case IPA_PURE:
1790 if (!DECL_PURE_P (current_function_decl))
1791 {
1792 if (!skip)
1793 {
1794 node->set_pure_flag (true, l->looping);
1795 changed = true;
1796 }
1797 warn_function_pure (current_function_decl, !l->looping);
1798 if (dump_file)
1799 fprintf (dump_file, "Function found to be %spure: %s\n",
1800 l->looping ? "looping " : "",
1801 current_function_name ());
1802 }
1803 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1804 && !l->looping)
1805 {
1806 if (!skip)
1807 {
1808 node->set_pure_flag (true, false);
1809 changed = true;
1810 }
1811 if (dump_file)
1812 fprintf (dump_file, "Function found to be non-looping: %s\n",
1813 current_function_name ());
1814 }
1815 break;
1816
1817 default:
1818 break;
1819 }
1820 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1821 {
1822 node->set_nothrow_flag (true);
1823 changed = true;
1824 if (dump_file)
1825 fprintf (dump_file, "Function found to be nothrow: %s\n",
1826 current_function_name ());
1827 }
1828 free (l);
1829 if (changed)
1830 return execute_fixup_cfg ();
1831 else
1832 return 0;
1833 }
1834
1835 } // anon namespace
1836
1837 gimple_opt_pass *
make_pass_local_pure_const(gcc::context * ctxt)1838 make_pass_local_pure_const (gcc::context *ctxt)
1839 {
1840 return new pass_local_pure_const (ctxt);
1841 }
1842
1843 /* Emit noreturn warnings. */
1844
1845 namespace {
1846
1847 const pass_data pass_data_warn_function_noreturn =
1848 {
1849 GIMPLE_PASS, /* type */
1850 "*warn_function_noreturn", /* name */
1851 OPTGROUP_NONE, /* optinfo_flags */
1852 TV_NONE, /* tv_id */
1853 PROP_cfg, /* properties_required */
1854 0, /* properties_provided */
1855 0, /* properties_destroyed */
1856 0, /* todo_flags_start */
1857 0, /* todo_flags_finish */
1858 };
1859
1860 class pass_warn_function_noreturn : public gimple_opt_pass
1861 {
1862 public:
pass_warn_function_noreturn(gcc::context * ctxt)1863 pass_warn_function_noreturn (gcc::context *ctxt)
1864 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
1865 {}
1866
1867 /* opt_pass methods: */
gate(function *)1868 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
execute(function * fun)1869 virtual unsigned int execute (function *fun)
1870 {
1871 if (!TREE_THIS_VOLATILE (current_function_decl)
1872 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1873 warn_function_noreturn (current_function_decl);
1874 return 0;
1875 }
1876
1877 }; // class pass_warn_function_noreturn
1878
1879 } // anon namespace
1880
1881 gimple_opt_pass *
make_pass_warn_function_noreturn(gcc::context * ctxt)1882 make_pass_warn_function_noreturn (gcc::context *ctxt)
1883 {
1884 return new pass_warn_function_noreturn (ctxt);
1885 }
1886
1887 /* Simple local pass for pure const discovery reusing the analysis from
1888 ipa_pure_const. This pass is effective when executed together with
1889 other optimization passes in early optimization pass queue. */
1890
1891 namespace {
1892
1893 const pass_data pass_data_nothrow =
1894 {
1895 GIMPLE_PASS, /* type */
1896 "nothrow", /* name */
1897 OPTGROUP_NONE, /* optinfo_flags */
1898 TV_IPA_PURE_CONST, /* tv_id */
1899 0, /* properties_required */
1900 0, /* properties_provided */
1901 0, /* properties_destroyed */
1902 0, /* todo_flags_start */
1903 0, /* todo_flags_finish */
1904 };
1905
1906 class pass_nothrow : public gimple_opt_pass
1907 {
1908 public:
pass_nothrow(gcc::context * ctxt)1909 pass_nothrow (gcc::context *ctxt)
1910 : gimple_opt_pass (pass_data_nothrow, ctxt)
1911 {}
1912
1913 /* opt_pass methods: */
clone()1914 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
gate(function *)1915 virtual bool gate (function *) { return optimize; }
1916 virtual unsigned int execute (function *);
1917
1918 }; // class pass_nothrow
1919
1920 unsigned int
execute(function *)1921 pass_nothrow::execute (function *)
1922 {
1923 struct cgraph_node *node;
1924 basic_block this_block;
1925
1926 if (TREE_NOTHROW (current_function_decl))
1927 return 0;
1928
1929 node = cgraph_node::get (current_function_decl);
1930
1931 /* We run during lowering, we can not really use availability yet. */
1932 if (cgraph_node::get (current_function_decl)->get_availability ()
1933 <= AVAIL_INTERPOSABLE)
1934 {
1935 if (dump_file)
1936 fprintf (dump_file, "Function is interposable;"
1937 " not analyzing.\n");
1938 return true;
1939 }
1940
1941 FOR_EACH_BB_FN (this_block, cfun)
1942 {
1943 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
1944 !gsi_end_p (gsi);
1945 gsi_next (&gsi))
1946 if (stmt_can_throw_external (gsi_stmt (gsi)))
1947 {
1948 if (is_gimple_call (gsi_stmt (gsi)))
1949 {
1950 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
1951 if (callee_t && recursive_call_p (current_function_decl,
1952 callee_t))
1953 continue;
1954 }
1955
1956 if (dump_file)
1957 {
1958 fprintf (dump_file, "Statement can throw: ");
1959 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
1960 }
1961 return 0;
1962 }
1963 }
1964
1965 node->set_nothrow_flag (true);
1966
1967 bool cfg_changed = false;
1968 if (self_recursive_p (node))
1969 FOR_EACH_BB_FN (this_block, cfun)
1970 if (gimple *g = last_stmt (this_block))
1971 if (is_gimple_call (g))
1972 {
1973 tree callee_t = gimple_call_fndecl (g);
1974 if (callee_t
1975 && recursive_call_p (current_function_decl, callee_t)
1976 && maybe_clean_eh_stmt (g)
1977 && gimple_purge_dead_eh_edges (this_block))
1978 cfg_changed = true;
1979 }
1980
1981 if (dump_file)
1982 fprintf (dump_file, "Function found to be nothrow: %s\n",
1983 current_function_name ());
1984 return cfg_changed ? TODO_cleanup_cfg : 0;
1985 }
1986
1987 } // anon namespace
1988
1989 gimple_opt_pass *
make_pass_nothrow(gcc::context * ctxt)1990 make_pass_nothrow (gcc::context *ctxt)
1991 {
1992 return new pass_nothrow (ctxt);
1993 }
1994