]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-pure-const.c
genattrtab.c (write_header): Include hash-set.h...
[thirdparty/gcc.git] / gcc / ipa-pure-const.c
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "hash-set.h"
39 #include "machmode.h"
40 #include "vec.h"
41 #include "double-int.h"
42 #include "input.h"
43 #include "alias.h"
44 #include "symtab.h"
45 #include "wide-int.h"
46 #include "inchash.h"
47 #include "tree.h"
48 #include "fold-const.h"
49 #include "print-tree.h"
50 #include "calls.h"
51 #include "predict.h"
52 #include "hard-reg-set.h"
53 #include "input.h"
54 #include "function.h"
55 #include "dominance.h"
56 #include "cfg.h"
57 #include "cfganal.h"
58 #include "basic-block.h"
59 #include "tree-ssa-alias.h"
60 #include "internal-fn.h"
61 #include "tree-eh.h"
62 #include "gimple-expr.h"
63 #include "is-a.h"
64 #include "gimple.h"
65 #include "gimple-iterator.h"
66 #include "gimple-walk.h"
67 #include "tree-cfg.h"
68 #include "tree-ssa-loop-niter.h"
69 #include "tree-inline.h"
70 #include "tree-pass.h"
71 #include "langhooks.h"
72 #include "hash-map.h"
73 #include "plugin-api.h"
74 #include "ipa-ref.h"
75 #include "cgraph.h"
76 #include "ipa-utils.h"
77 #include "flags.h"
78 #include "diagnostic.h"
79 #include "gimple-pretty-print.h"
80 #include "langhooks.h"
81 #include "target.h"
82 #include "lto-streamer.h"
83 #include "data-streamer.h"
84 #include "tree-streamer.h"
85 #include "cfgloop.h"
86 #include "tree-scalar-evolution.h"
87 #include "intl.h"
88 #include "opts.h"
89
90 /* Lattice values for const and pure functions. Everything starts out
91 being const, then may drop to pure and then neither depending on
92 what is found. */
93 enum pure_const_state_e
94 {
95 IPA_CONST,
96 IPA_PURE,
97 IPA_NEITHER
98 };
99
100 const char *pure_const_names[3] = {"const", "pure", "neither"};
101
102 /* Holder for the const_state. There is one of these per function
103 decl. */
104 struct funct_state_d
105 {
106 /* See above. */
107 enum pure_const_state_e pure_const_state;
108 /* What user set here; we can be always sure about this. */
109 enum pure_const_state_e state_previously_known;
110 bool looping_previously_known;
111
112 /* True if the function could possibly infinite loop. There are a
113 lot of ways that this could be determined. We are pretty
114 conservative here. While it is possible to cse pure and const
115 calls, it is not legal to have dce get rid of the call if there
116 is a possibility that the call could infinite loop since this is
117 a behavioral change. */
118 bool looping;
119
120 bool can_throw;
121
122 /* If function can call free, munmap or otherwise make previously
123 non-trapping memory accesses trapping. */
124 bool can_free;
125 };
126
127 /* State used when we know nothing about function. */
128 static struct funct_state_d varying_state
129 = { IPA_NEITHER, IPA_NEITHER, true, true, true, true };
130
131
132 typedef struct funct_state_d * funct_state;
133
134 /* The storage of the funct_state is abstracted because there is the
135 possibility that it may be desirable to move this to the cgraph
136 local info. */
137
138 /* Array, indexed by cgraph node uid, of function states. */
139
140 static vec<funct_state> funct_state_vec;
141
142 static bool gate_pure_const (void);
143
144 namespace {
145
146 const pass_data pass_data_ipa_pure_const =
147 {
148 IPA_PASS, /* type */
149 "pure-const", /* name */
150 OPTGROUP_NONE, /* optinfo_flags */
151 TV_IPA_PURE_CONST, /* tv_id */
152 0, /* properties_required */
153 0, /* properties_provided */
154 0, /* properties_destroyed */
155 0, /* todo_flags_start */
156 0, /* todo_flags_finish */
157 };
158
159 class pass_ipa_pure_const : public ipa_opt_pass_d
160 {
161 public:
162 pass_ipa_pure_const(gcc::context *ctxt);
163
164 /* opt_pass methods: */
165 bool gate (function *) { return gate_pure_const (); }
166 unsigned int execute (function *fun);
167
168 void register_hooks (void);
169
170 private:
171 bool init_p;
172
173 /* Holders of ipa cgraph hooks: */
174 struct cgraph_node_hook_list *function_insertion_hook_holder;
175 struct cgraph_2node_hook_list *node_duplication_hook_holder;
176 struct cgraph_node_hook_list *node_removal_hook_holder;
177
178 }; // class pass_ipa_pure_const
179
180 } // anon namespace
181
182 /* Try to guess if function body will always be visible to compiler
183 when compiling the call and whether compiler will be able
184 to propagate the information by itself. */
185
186 static bool
187 function_always_visible_to_compiler_p (tree decl)
188 {
189 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
190 }
191
192 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
193 is true if the function is known to be finite. The diagnostic is
194 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
195 OPTION, this function may initialize it and it is always returned
196 by the function. */
197
198 static hash_set<tree> *
199 suggest_attribute (int option, tree decl, bool known_finite,
200 hash_set<tree> *warned_about,
201 const char * attrib_name)
202 {
203 if (!option_enabled (option, &global_options))
204 return warned_about;
205 if (TREE_THIS_VOLATILE (decl)
206 || (known_finite && function_always_visible_to_compiler_p (decl)))
207 return warned_about;
208
209 if (!warned_about)
210 warned_about = new hash_set<tree>;
211 if (warned_about->contains (decl))
212 return warned_about;
213 warned_about->add (decl);
214 warning_at (DECL_SOURCE_LOCATION (decl),
215 option,
216 known_finite
217 ? _("function might be candidate for attribute %<%s%>")
218 : _("function might be candidate for attribute %<%s%>"
219 " if it is known to return normally"), attrib_name);
220 return warned_about;
221 }
222
223 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
224 is true if the function is known to be finite. */
225
226 static void
227 warn_function_pure (tree decl, bool known_finite)
228 {
229 static hash_set<tree> *warned_about;
230
231 warned_about
232 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
233 known_finite, warned_about, "pure");
234 }
235
236 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
237 is true if the function is known to be finite. */
238
239 static void
240 warn_function_const (tree decl, bool known_finite)
241 {
242 static hash_set<tree> *warned_about;
243 warned_about
244 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
245 known_finite, warned_about, "const");
246 }
247
248 static void
249 warn_function_noreturn (tree decl)
250 {
251 static hash_set<tree> *warned_about;
252 if (!lang_hooks.missing_noreturn_ok_p (decl)
253 && targetm.warn_func_return (decl))
254 warned_about
255 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
256 true, warned_about, "noreturn");
257 }
258
259 /* Return true if we have a function state for NODE. */
260
261 static inline bool
262 has_function_state (struct cgraph_node *node)
263 {
264 if (!funct_state_vec.exists ()
265 || funct_state_vec.length () <= (unsigned int)node->uid)
266 return false;
267 return funct_state_vec[node->uid] != NULL;
268 }
269
270 /* Return the function state from NODE. */
271
272 static inline funct_state
273 get_function_state (struct cgraph_node *node)
274 {
275 if (!funct_state_vec.exists ()
276 || funct_state_vec.length () <= (unsigned int)node->uid
277 || !funct_state_vec[node->uid])
278 /* We might want to put correct previously_known state into varying. */
279 return &varying_state;
280 return funct_state_vec[node->uid];
281 }
282
283 /* Set the function state S for NODE. */
284
285 static inline void
286 set_function_state (struct cgraph_node *node, funct_state s)
287 {
288 if (!funct_state_vec.exists ()
289 || funct_state_vec.length () <= (unsigned int)node->uid)
290 funct_state_vec.safe_grow_cleared (node->uid + 1);
291 funct_state_vec[node->uid] = s;
292 }
293
294 /* Check to see if the use (or definition when CHECKING_WRITE is true)
295 variable T is legal in a function that is either pure or const. */
296
297 static inline void
298 check_decl (funct_state local,
299 tree t, bool checking_write, bool ipa)
300 {
301 /* Do not want to do anything with volatile except mark any
302 function that uses one to be not const or pure. */
303 if (TREE_THIS_VOLATILE (t))
304 {
305 local->pure_const_state = IPA_NEITHER;
306 if (dump_file)
307 fprintf (dump_file, " Volatile operand is not const/pure");
308 return;
309 }
310
311 /* Do not care about a local automatic that is not static. */
312 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
313 return;
314
315 /* If the variable has the "used" attribute, treat it as if it had a
316 been touched by the devil. */
317 if (DECL_PRESERVE_P (t))
318 {
319 local->pure_const_state = IPA_NEITHER;
320 if (dump_file)
321 fprintf (dump_file, " Used static/global variable is not const/pure\n");
322 return;
323 }
324
325 /* In IPA mode we are not interested in checking actual loads and stores;
326 they will be processed at propagation time using ipa_ref. */
327 if (ipa)
328 return;
329
330 /* Since we have dealt with the locals and params cases above, if we
331 are CHECKING_WRITE, this cannot be a pure or constant
332 function. */
333 if (checking_write)
334 {
335 local->pure_const_state = IPA_NEITHER;
336 if (dump_file)
337 fprintf (dump_file, " static/global memory write is not const/pure\n");
338 return;
339 }
340
341 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
342 {
343 /* Readonly reads are safe. */
344 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
345 return; /* Read of a constant, do not change the function state. */
346 else
347 {
348 if (dump_file)
349 fprintf (dump_file, " global memory read is not const\n");
350 /* Just a regular read. */
351 if (local->pure_const_state == IPA_CONST)
352 local->pure_const_state = IPA_PURE;
353 }
354 }
355 else
356 {
357 /* Compilation level statics can be read if they are readonly
358 variables. */
359 if (TREE_READONLY (t))
360 return;
361
362 if (dump_file)
363 fprintf (dump_file, " static memory read is not const\n");
364 /* Just a regular read. */
365 if (local->pure_const_state == IPA_CONST)
366 local->pure_const_state = IPA_PURE;
367 }
368 }
369
370
371 /* Check to see if the use (or definition when CHECKING_WRITE is true)
372 variable T is legal in a function that is either pure or const. */
373
374 static inline void
375 check_op (funct_state local, tree t, bool checking_write)
376 {
377 t = get_base_address (t);
378 if (t && TREE_THIS_VOLATILE (t))
379 {
380 local->pure_const_state = IPA_NEITHER;
381 if (dump_file)
382 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
383 return;
384 }
385 else if (t
386 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
387 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
388 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
389 {
390 if (dump_file)
391 fprintf (dump_file, " Indirect ref to local memory is OK\n");
392 return;
393 }
394 else if (checking_write)
395 {
396 local->pure_const_state = IPA_NEITHER;
397 if (dump_file)
398 fprintf (dump_file, " Indirect ref write is not const/pure\n");
399 return;
400 }
401 else
402 {
403 if (dump_file)
404 fprintf (dump_file, " Indirect ref read is not const\n");
405 if (local->pure_const_state == IPA_CONST)
406 local->pure_const_state = IPA_PURE;
407 }
408 }
409
410 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
411
412 static void
413 state_from_flags (enum pure_const_state_e *state, bool *looping,
414 int flags, bool cannot_lead_to_return)
415 {
416 *looping = false;
417 if (flags & ECF_LOOPING_CONST_OR_PURE)
418 {
419 *looping = true;
420 if (dump_file && (dump_flags & TDF_DETAILS))
421 fprintf (dump_file, " looping");
422 }
423 if (flags & ECF_CONST)
424 {
425 *state = IPA_CONST;
426 if (dump_file && (dump_flags & TDF_DETAILS))
427 fprintf (dump_file, " const\n");
428 }
429 else if (flags & ECF_PURE)
430 {
431 *state = IPA_PURE;
432 if (dump_file && (dump_flags & TDF_DETAILS))
433 fprintf (dump_file, " pure\n");
434 }
435 else if (cannot_lead_to_return)
436 {
437 *state = IPA_PURE;
438 *looping = true;
439 if (dump_file && (dump_flags & TDF_DETAILS))
440 fprintf (dump_file, " ignoring side effects->pure looping\n");
441 }
442 else
443 {
444 if (dump_file && (dump_flags & TDF_DETAILS))
445 fprintf (dump_file, " neither\n");
446 *state = IPA_NEITHER;
447 *looping = true;
448 }
449 }
450
451 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
452 into STATE and LOOPING better of the two variants.
453 Be sure to merge looping correctly. IPA_NEITHER functions
454 have looping 0 even if they don't have to return. */
455
456 static inline void
457 better_state (enum pure_const_state_e *state, bool *looping,
458 enum pure_const_state_e state2, bool looping2)
459 {
460 if (state2 < *state)
461 {
462 if (*state == IPA_NEITHER)
463 *looping = looping2;
464 else
465 *looping = MIN (*looping, looping2);
466 *state = state2;
467 }
468 else if (state2 != IPA_NEITHER)
469 *looping = MIN (*looping, looping2);
470 }
471
472 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
473 into STATE and LOOPING worse of the two variants. */
474
475 static inline void
476 worse_state (enum pure_const_state_e *state, bool *looping,
477 enum pure_const_state_e state2, bool looping2)
478 {
479 *state = MAX (*state, state2);
480 *looping = MAX (*looping, looping2);
481 }
482
483 /* Recognize special cases of builtins that are by themselves not pure or const
484 but function using them is. */
485 static bool
486 special_builtin_state (enum pure_const_state_e *state, bool *looping,
487 tree callee)
488 {
489 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
490 switch (DECL_FUNCTION_CODE (callee))
491 {
492 case BUILT_IN_RETURN:
493 case BUILT_IN_UNREACHABLE:
494 case BUILT_IN_ALLOCA:
495 case BUILT_IN_ALLOCA_WITH_ALIGN:
496 case BUILT_IN_STACK_SAVE:
497 case BUILT_IN_STACK_RESTORE:
498 case BUILT_IN_EH_POINTER:
499 case BUILT_IN_EH_FILTER:
500 case BUILT_IN_UNWIND_RESUME:
501 case BUILT_IN_CXA_END_CLEANUP:
502 case BUILT_IN_EH_COPY_VALUES:
503 case BUILT_IN_FRAME_ADDRESS:
504 case BUILT_IN_APPLY:
505 case BUILT_IN_APPLY_ARGS:
506 *looping = false;
507 *state = IPA_CONST;
508 return true;
509 case BUILT_IN_PREFETCH:
510 *looping = true;
511 *state = IPA_CONST;
512 return true;
513 default:
514 break;
515 }
516 return false;
517 }
518
519 /* Check the parameters of a function call to CALL_EXPR to see if
520 there are any references in the parameters that are not allowed for
521 pure or const functions. Also check to see if this is either an
522 indirect call, a call outside the compilation unit, or has special
523 attributes that may also effect the purity. The CALL_EXPR node for
524 the entire call expression. */
525
526 static void
527 check_call (funct_state local, gcall *call, bool ipa)
528 {
529 int flags = gimple_call_flags (call);
530 tree callee_t = gimple_call_fndecl (call);
531 bool possibly_throws = stmt_could_throw_p (call);
532 bool possibly_throws_externally = (possibly_throws
533 && stmt_can_throw_external (call));
534
535 if (possibly_throws)
536 {
537 unsigned int i;
538 for (i = 0; i < gimple_num_ops (call); i++)
539 if (gimple_op (call, i)
540 && tree_could_throw_p (gimple_op (call, i)))
541 {
542 if (possibly_throws && cfun->can_throw_non_call_exceptions)
543 {
544 if (dump_file)
545 fprintf (dump_file, " operand can throw; looping\n");
546 local->looping = true;
547 }
548 if (possibly_throws_externally)
549 {
550 if (dump_file)
551 fprintf (dump_file, " operand can throw externally\n");
552 local->can_throw = true;
553 }
554 }
555 }
556
557 /* The const and pure flags are set by a variety of places in the
558 compiler (including here). If someone has already set the flags
559 for the callee, (such as for some of the builtins) we will use
560 them, otherwise we will compute our own information.
561
562 Const and pure functions have less clobber effects than other
563 functions so we process these first. Otherwise if it is a call
564 outside the compilation unit or an indirect call we punt. This
565 leaves local calls which will be processed by following the call
566 graph. */
567 if (callee_t)
568 {
569 enum pure_const_state_e call_state;
570 bool call_looping;
571
572 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
573 && !nonfreeing_call_p (call))
574 local->can_free = true;
575
576 if (special_builtin_state (&call_state, &call_looping, callee_t))
577 {
578 worse_state (&local->pure_const_state, &local->looping,
579 call_state, call_looping);
580 return;
581 }
582 /* When bad things happen to bad functions, they cannot be const
583 or pure. */
584 if (setjmp_call_p (callee_t))
585 {
586 if (dump_file)
587 fprintf (dump_file, " setjmp is not const/pure\n");
588 local->looping = true;
589 local->pure_const_state = IPA_NEITHER;
590 }
591
592 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
593 switch (DECL_FUNCTION_CODE (callee_t))
594 {
595 case BUILT_IN_LONGJMP:
596 case BUILT_IN_NONLOCAL_GOTO:
597 if (dump_file)
598 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
599 local->pure_const_state = IPA_NEITHER;
600 local->looping = true;
601 break;
602 default:
603 break;
604 }
605 }
606 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
607 local->can_free = true;
608
609 /* When not in IPA mode, we can still handle self recursion. */
610 if (!ipa && callee_t
611 && recursive_call_p (current_function_decl, callee_t))
612 {
613 if (dump_file)
614 fprintf (dump_file, " Recursive call can loop.\n");
615 local->looping = true;
616 }
617 /* Either callee is unknown or we are doing local analysis.
618 Look to see if there are any bits available for the callee (such as by
619 declaration or because it is builtin) and process solely on the basis of
620 those bits. */
621 else if (!ipa)
622 {
623 enum pure_const_state_e call_state;
624 bool call_looping;
625 if (possibly_throws && cfun->can_throw_non_call_exceptions)
626 {
627 if (dump_file)
628 fprintf (dump_file, " can throw; looping\n");
629 local->looping = true;
630 }
631 if (possibly_throws_externally)
632 {
633 if (dump_file)
634 {
635 fprintf (dump_file, " can throw externally to lp %i\n",
636 lookup_stmt_eh_lp (call));
637 if (callee_t)
638 fprintf (dump_file, " callee:%s\n",
639 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
640 }
641 local->can_throw = true;
642 }
643 if (dump_file && (dump_flags & TDF_DETAILS))
644 fprintf (dump_file, " checking flags for call:");
645 state_from_flags (&call_state, &call_looping, flags,
646 ((flags & (ECF_NORETURN | ECF_NOTHROW))
647 == (ECF_NORETURN | ECF_NOTHROW))
648 || (!flag_exceptions && (flags & ECF_NORETURN)));
649 worse_state (&local->pure_const_state, &local->looping,
650 call_state, call_looping);
651 }
652 /* Direct functions calls are handled by IPA propagation. */
653 }
654
655 /* Wrapper around check_decl for loads in local more. */
656
657 static bool
658 check_load (gimple, tree op, tree, void *data)
659 {
660 if (DECL_P (op))
661 check_decl ((funct_state)data, op, false, false);
662 else
663 check_op ((funct_state)data, op, false);
664 return false;
665 }
666
667 /* Wrapper around check_decl for stores in local more. */
668
669 static bool
670 check_store (gimple, tree op, tree, void *data)
671 {
672 if (DECL_P (op))
673 check_decl ((funct_state)data, op, true, false);
674 else
675 check_op ((funct_state)data, op, true);
676 return false;
677 }
678
679 /* Wrapper around check_decl for loads in ipa mode. */
680
681 static bool
682 check_ipa_load (gimple, tree op, tree, void *data)
683 {
684 if (DECL_P (op))
685 check_decl ((funct_state)data, op, false, true);
686 else
687 check_op ((funct_state)data, op, false);
688 return false;
689 }
690
691 /* Wrapper around check_decl for stores in ipa mode. */
692
693 static bool
694 check_ipa_store (gimple, tree op, tree, void *data)
695 {
696 if (DECL_P (op))
697 check_decl ((funct_state)data, op, true, true);
698 else
699 check_op ((funct_state)data, op, true);
700 return false;
701 }
702
703 /* Look into pointer pointed to by GSIP and figure out what interesting side
704 effects it has. */
705 static void
706 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
707 {
708 gimple stmt = gsi_stmt (*gsip);
709
710 if (is_gimple_debug (stmt))
711 return;
712
713 if (dump_file)
714 {
715 fprintf (dump_file, " scanning: ");
716 print_gimple_stmt (dump_file, stmt, 0, 0);
717 }
718
719 if (gimple_has_volatile_ops (stmt)
720 && !gimple_clobber_p (stmt))
721 {
722 local->pure_const_state = IPA_NEITHER;
723 if (dump_file)
724 fprintf (dump_file, " Volatile stmt is not const/pure\n");
725 }
726
727 /* Look for loads and stores. */
728 walk_stmt_load_store_ops (stmt, local,
729 ipa ? check_ipa_load : check_load,
730 ipa ? check_ipa_store : check_store);
731
732 if (gimple_code (stmt) != GIMPLE_CALL
733 && stmt_could_throw_p (stmt))
734 {
735 if (cfun->can_throw_non_call_exceptions)
736 {
737 if (dump_file)
738 fprintf (dump_file, " can throw; looping\n");
739 local->looping = true;
740 }
741 if (stmt_can_throw_external (stmt))
742 {
743 if (dump_file)
744 fprintf (dump_file, " can throw externally\n");
745 local->can_throw = true;
746 }
747 else
748 if (dump_file)
749 fprintf (dump_file, " can throw\n");
750 }
751 switch (gimple_code (stmt))
752 {
753 case GIMPLE_CALL:
754 check_call (local, as_a <gcall *> (stmt), ipa);
755 break;
756 case GIMPLE_LABEL:
757 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
758 /* Target of long jump. */
759 {
760 if (dump_file)
761 fprintf (dump_file, " nonlocal label is not const/pure\n");
762 local->pure_const_state = IPA_NEITHER;
763 }
764 break;
765 case GIMPLE_ASM:
766 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
767 {
768 if (dump_file)
769 fprintf (dump_file, " memory asm clobber is not const/pure\n");
770 /* Abandon all hope, ye who enter here. */
771 local->pure_const_state = IPA_NEITHER;
772 local->can_free = true;
773 }
774 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
775 {
776 if (dump_file)
777 fprintf (dump_file, " volatile is not const/pure\n");
778 /* Abandon all hope, ye who enter here. */
779 local->pure_const_state = IPA_NEITHER;
780 local->looping = true;
781 local->can_free = true;
782 }
783 return;
784 default:
785 break;
786 }
787 }
788
789
790 /* This is the main routine for finding the reference patterns for
791 global variables within a function FN. */
792
793 static funct_state
794 analyze_function (struct cgraph_node *fn, bool ipa)
795 {
796 tree decl = fn->decl;
797 funct_state l;
798 basic_block this_block;
799
800 l = XCNEW (struct funct_state_d);
801 l->pure_const_state = IPA_CONST;
802 l->state_previously_known = IPA_NEITHER;
803 l->looping_previously_known = true;
804 l->looping = false;
805 l->can_throw = false;
806 l->can_free = false;
807 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
808 flags_from_decl_or_type (fn->decl),
809 fn->cannot_return_p ());
810
811 if (fn->thunk.thunk_p || fn->alias)
812 {
813 /* Thunk gets propagated through, so nothing interesting happens. */
814 gcc_assert (ipa);
815 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
816 l->pure_const_state = IPA_NEITHER;
817 return l;
818 }
819
820 if (dump_file)
821 {
822 fprintf (dump_file, "\n\n local analysis of %s\n ",
823 fn->name ());
824 }
825
826 push_cfun (DECL_STRUCT_FUNCTION (decl));
827
828 FOR_EACH_BB_FN (this_block, cfun)
829 {
830 gimple_stmt_iterator gsi;
831 struct walk_stmt_info wi;
832
833 memset (&wi, 0, sizeof (wi));
834 for (gsi = gsi_start_bb (this_block);
835 !gsi_end_p (gsi);
836 gsi_next (&gsi))
837 {
838 check_stmt (&gsi, l, ipa);
839 if (l->pure_const_state == IPA_NEITHER
840 && l->looping
841 && l->can_throw
842 && l->can_free)
843 goto end;
844 }
845 }
846
847 end:
848 if (l->pure_const_state != IPA_NEITHER)
849 {
850 /* Const functions cannot have back edges (an
851 indication of possible infinite loop side
852 effect. */
853 if (mark_dfs_back_edges ())
854 {
855 /* Preheaders are needed for SCEV to work.
856 Simple latches and recorded exits improve chances that loop will
857 proved to be finite in testcases such as in loop-15.c
858 and loop-24.c */
859 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
860 | LOOPS_HAVE_SIMPLE_LATCHES
861 | LOOPS_HAVE_RECORDED_EXITS);
862 if (dump_file && (dump_flags & TDF_DETAILS))
863 flow_loops_dump (dump_file, NULL, 0);
864 if (mark_irreducible_loops ())
865 {
866 if (dump_file)
867 fprintf (dump_file, " has irreducible loops\n");
868 l->looping = true;
869 }
870 else
871 {
872 struct loop *loop;
873 scev_initialize ();
874 FOR_EACH_LOOP (loop, 0)
875 if (!finite_loop_p (loop))
876 {
877 if (dump_file)
878 fprintf (dump_file, " can not prove finiteness of "
879 "loop %i\n", loop->num);
880 l->looping =true;
881 break;
882 }
883 scev_finalize ();
884 }
885 loop_optimizer_finalize ();
886 }
887 }
888
889 if (dump_file && (dump_flags & TDF_DETAILS))
890 fprintf (dump_file, " checking previously known:");
891
892 better_state (&l->pure_const_state, &l->looping,
893 l->state_previously_known,
894 l->looping_previously_known);
895 if (TREE_NOTHROW (decl))
896 l->can_throw = false;
897
898 pop_cfun ();
899 if (dump_file)
900 {
901 if (l->looping)
902 fprintf (dump_file, "Function is locally looping.\n");
903 if (l->can_throw)
904 fprintf (dump_file, "Function is locally throwing.\n");
905 if (l->pure_const_state == IPA_CONST)
906 fprintf (dump_file, "Function is locally const.\n");
907 if (l->pure_const_state == IPA_PURE)
908 fprintf (dump_file, "Function is locally pure.\n");
909 if (l->can_free)
910 fprintf (dump_file, "Function can locally free.\n");
911 }
912 return l;
913 }
914
915 /* Called when new function is inserted to callgraph late. */
916 static void
917 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
918 {
919 if (node->get_availability () < AVAIL_INTERPOSABLE)
920 return;
921 /* There are some shared nodes, in particular the initializers on
922 static declarations. We do not need to scan them more than once
923 since all we would be interested in are the addressof
924 operations. */
925 if (node->get_availability () > AVAIL_INTERPOSABLE
926 && opt_for_fn (node->decl, flag_ipa_pure_const))
927 set_function_state (node, analyze_function (node, true));
928 }
929
930 /* Called when new clone is inserted to callgraph late. */
931
932 static void
933 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
934 void *data ATTRIBUTE_UNUSED)
935 {
936 if (has_function_state (src))
937 {
938 funct_state l = XNEW (struct funct_state_d);
939 gcc_assert (!has_function_state (dst));
940 memcpy (l, get_function_state (src), sizeof (*l));
941 set_function_state (dst, l);
942 }
943 }
944
945 /* Called when new clone is inserted to callgraph late. */
946
947 static void
948 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
949 {
950 if (has_function_state (node))
951 {
952 funct_state l = get_function_state (node);
953 if (l != &varying_state)
954 free (l);
955 set_function_state (node, NULL);
956 }
957 }
958
959 \f
960 void
961 pass_ipa_pure_const::
962 register_hooks (void)
963 {
964 if (init_p)
965 return;
966
967 init_p = true;
968
969 node_removal_hook_holder =
970 symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
971 node_duplication_hook_holder =
972 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
973 function_insertion_hook_holder =
974 symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
975 }
976
977
978 /* Analyze each function in the cgraph to see if it is locally PURE or
979 CONST. */
980
981 static void
982 pure_const_generate_summary (void)
983 {
984 struct cgraph_node *node;
985
986 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
987 pass->register_hooks ();
988
989 /* Process all of the functions.
990
991 We process AVAIL_INTERPOSABLE functions. We can not use the results
992 by default, but the info can be used at LTO with -fwhole-program or
993 when function got cloned and the clone is AVAILABLE. */
994
995 FOR_EACH_DEFINED_FUNCTION (node)
996 if (node->get_availability () >= AVAIL_INTERPOSABLE
997 && opt_for_fn (node->decl, flag_ipa_pure_const))
998 set_function_state (node, analyze_function (node, true));
999 }
1000
1001
1002 /* Serialize the ipa info for lto. */
1003
1004 static void
1005 pure_const_write_summary (void)
1006 {
1007 struct cgraph_node *node;
1008 struct lto_simple_output_block *ob
1009 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1010 unsigned int count = 0;
1011 lto_symtab_encoder_iterator lsei;
1012 lto_symtab_encoder_t encoder;
1013
1014 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1015
1016 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1017 lsei_next_function_in_partition (&lsei))
1018 {
1019 node = lsei_cgraph_node (lsei);
1020 if (node->definition && has_function_state (node))
1021 count++;
1022 }
1023
1024 streamer_write_uhwi_stream (ob->main_stream, count);
1025
1026 /* Process all of the functions. */
1027 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1028 lsei_next_function_in_partition (&lsei))
1029 {
1030 node = lsei_cgraph_node (lsei);
1031 if (node->definition && has_function_state (node))
1032 {
1033 struct bitpack_d bp;
1034 funct_state fs;
1035 int node_ref;
1036 lto_symtab_encoder_t encoder;
1037
1038 fs = get_function_state (node);
1039
1040 encoder = ob->decl_state->symtab_node_encoder;
1041 node_ref = lto_symtab_encoder_encode (encoder, node);
1042 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1043
1044 /* Note that flags will need to be read in the opposite
1045 order as we are pushing the bitflags into FLAGS. */
1046 bp = bitpack_create (ob->main_stream);
1047 bp_pack_value (&bp, fs->pure_const_state, 2);
1048 bp_pack_value (&bp, fs->state_previously_known, 2);
1049 bp_pack_value (&bp, fs->looping_previously_known, 1);
1050 bp_pack_value (&bp, fs->looping, 1);
1051 bp_pack_value (&bp, fs->can_throw, 1);
1052 bp_pack_value (&bp, fs->can_free, 1);
1053 streamer_write_bitpack (&bp);
1054 }
1055 }
1056
1057 lto_destroy_simple_output_block (ob);
1058 }
1059
1060
1061 /* Deserialize the ipa info for lto. */
1062
1063 static void
1064 pure_const_read_summary (void)
1065 {
1066 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1067 struct lto_file_decl_data *file_data;
1068 unsigned int j = 0;
1069
1070 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1071 pass->register_hooks ();
1072
1073 while ((file_data = file_data_vec[j++]))
1074 {
1075 const char *data;
1076 size_t len;
1077 struct lto_input_block *ib
1078 = lto_create_simple_input_block (file_data,
1079 LTO_section_ipa_pure_const,
1080 &data, &len);
1081 if (ib)
1082 {
1083 unsigned int i;
1084 unsigned int count = streamer_read_uhwi (ib);
1085
1086 for (i = 0; i < count; i++)
1087 {
1088 unsigned int index;
1089 struct cgraph_node *node;
1090 struct bitpack_d bp;
1091 funct_state fs;
1092 lto_symtab_encoder_t encoder;
1093
1094 fs = XCNEW (struct funct_state_d);
1095 index = streamer_read_uhwi (ib);
1096 encoder = file_data->symtab_node_encoder;
1097 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1098 index));
1099 set_function_state (node, fs);
1100
1101 /* Note that the flags must be read in the opposite
1102 order in which they were written (the bitflags were
1103 pushed into FLAGS). */
1104 bp = streamer_read_bitpack (ib);
1105 fs->pure_const_state
1106 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1107 fs->state_previously_known
1108 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1109 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1110 fs->looping = bp_unpack_value (&bp, 1);
1111 fs->can_throw = bp_unpack_value (&bp, 1);
1112 fs->can_free = bp_unpack_value (&bp, 1);
1113 if (dump_file)
1114 {
1115 int flags = flags_from_decl_or_type (node->decl);
1116 fprintf (dump_file, "Read info for %s/%i ",
1117 node->name (),
1118 node->order);
1119 if (flags & ECF_CONST)
1120 fprintf (dump_file, " const");
1121 if (flags & ECF_PURE)
1122 fprintf (dump_file, " pure");
1123 if (flags & ECF_NOTHROW)
1124 fprintf (dump_file, " nothrow");
1125 fprintf (dump_file, "\n pure const state: %s\n",
1126 pure_const_names[fs->pure_const_state]);
1127 fprintf (dump_file, " previously known state: %s\n",
1128 pure_const_names[fs->looping_previously_known]);
1129 if (fs->looping)
1130 fprintf (dump_file," function is locally looping\n");
1131 if (fs->looping_previously_known)
1132 fprintf (dump_file," function is previously known looping\n");
1133 if (fs->can_throw)
1134 fprintf (dump_file," function is locally throwing\n");
1135 if (fs->can_free)
1136 fprintf (dump_file," function can locally free\n");
1137 }
1138 }
1139
1140 lto_destroy_simple_input_block (file_data,
1141 LTO_section_ipa_pure_const,
1142 ib, data, len);
1143 }
1144 }
1145 }
1146
1147
1148 static bool
1149 ignore_edge (struct cgraph_edge *e)
1150 {
1151 return (!e->can_throw_external);
1152 }
1153
1154 /* Return true if NODE is self recursive function.
1155 Indirectly recursive functions appears as non-trivial strongly
1156 connected components, so we need to care about self recursion
1157 only. */
1158
1159 static bool
1160 self_recursive_p (struct cgraph_node *node)
1161 {
1162 struct cgraph_edge *e;
1163 for (e = node->callees; e; e = e->next_callee)
1164 if (e->callee->function_symbol () == node)
1165 return true;
1166 return false;
1167 }
1168
1169 /* Return true if N is cdtor that is not const or pure. In this case we may
1170 need to remove unreachable function if it is marked const/pure. */
1171
1172 static bool
1173 cdtor_p (cgraph_node *n, void *)
1174 {
1175 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1176 return !TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl);
1177 return false;
1178 }
1179
1180 /* Produce transitive closure over the callgraph and compute pure/const
1181 attributes. */
1182
1183 static bool
1184 propagate_pure_const (void)
1185 {
1186 struct cgraph_node *node;
1187 struct cgraph_node *w;
1188 struct cgraph_node **order =
1189 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1190 int order_pos;
1191 int i;
1192 struct ipa_dfs_info * w_info;
1193 bool remove_p = false;
1194
1195 order_pos = ipa_reduced_postorder (order, true, false, NULL);
1196 if (dump_file)
1197 {
1198 cgraph_node::dump_cgraph (dump_file);
1199 ipa_print_order (dump_file, "reduced", order, order_pos);
1200 }
1201
1202 /* Propagate the local information through the call graph to produce
1203 the global information. All the nodes within a cycle will have
1204 the same info so we collapse cycles first. Then we can do the
1205 propagation in one pass from the leaves to the roots. */
1206 for (i = 0; i < order_pos; i++ )
1207 {
1208 enum pure_const_state_e pure_const_state = IPA_CONST;
1209 bool looping = false;
1210 int count = 0;
1211 node = order[i];
1212
1213 if (node->alias)
1214 continue;
1215
1216 if (dump_file && (dump_flags & TDF_DETAILS))
1217 fprintf (dump_file, "Starting cycle\n");
1218
1219 /* Find the worst state for any node in the cycle. */
1220 w = node;
1221 while (w && pure_const_state != IPA_NEITHER)
1222 {
1223 struct cgraph_edge *e;
1224 struct cgraph_edge *ie;
1225 int i;
1226 struct ipa_ref *ref = NULL;
1227
1228 funct_state w_l = get_function_state (w);
1229 if (dump_file && (dump_flags & TDF_DETAILS))
1230 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
1231 w->name (),
1232 w->order,
1233 pure_const_names[w_l->pure_const_state],
1234 w_l->looping);
1235
1236 /* First merge in function body properties. */
1237 worse_state (&pure_const_state, &looping,
1238 w_l->pure_const_state, w_l->looping);
1239 if (pure_const_state == IPA_NEITHER)
1240 break;
1241
1242 /* For overwritable nodes we can not assume anything. */
1243 if (w->get_availability () == AVAIL_INTERPOSABLE)
1244 {
1245 worse_state (&pure_const_state, &looping,
1246 w_l->state_previously_known,
1247 w_l->looping_previously_known);
1248 if (dump_file && (dump_flags & TDF_DETAILS))
1249 {
1250 fprintf (dump_file,
1251 " Overwritable. state %s looping %i\n",
1252 pure_const_names[w_l->state_previously_known],
1253 w_l->looping_previously_known);
1254 }
1255 break;
1256 }
1257
1258 count++;
1259
1260 /* We consider recursive cycles as possibly infinite.
1261 This might be relaxed since infinite recursion leads to stack
1262 overflow. */
1263 if (count > 1)
1264 looping = true;
1265
1266 /* Now walk the edges and merge in callee properties. */
1267 for (e = w->callees; e; e = e->next_callee)
1268 {
1269 enum availability avail;
1270 struct cgraph_node *y = e->callee->
1271 function_or_virtual_thunk_symbol (&avail);
1272 enum pure_const_state_e edge_state = IPA_CONST;
1273 bool edge_looping = false;
1274
1275 if (dump_file && (dump_flags & TDF_DETAILS))
1276 {
1277 fprintf (dump_file,
1278 " Call to %s/%i",
1279 e->callee->name (),
1280 e->callee->order);
1281 }
1282 if (avail > AVAIL_INTERPOSABLE)
1283 {
1284 funct_state y_l = get_function_state (y);
1285 if (dump_file && (dump_flags & TDF_DETAILS))
1286 {
1287 fprintf (dump_file,
1288 " state:%s looping:%i\n",
1289 pure_const_names[y_l->pure_const_state],
1290 y_l->looping);
1291 }
1292 if (y_l->pure_const_state > IPA_PURE
1293 && e->cannot_lead_to_return_p ())
1294 {
1295 if (dump_file && (dump_flags & TDF_DETAILS))
1296 fprintf (dump_file,
1297 " Ignoring side effects"
1298 " -> pure, looping\n");
1299 edge_state = IPA_PURE;
1300 edge_looping = true;
1301 }
1302 else
1303 {
1304 edge_state = y_l->pure_const_state;
1305 edge_looping = y_l->looping;
1306 }
1307 }
1308 else if (special_builtin_state (&edge_state, &edge_looping,
1309 y->decl))
1310 ;
1311 else
1312 state_from_flags (&edge_state, &edge_looping,
1313 flags_from_decl_or_type (y->decl),
1314 e->cannot_lead_to_return_p ());
1315
1316 /* Merge the results with what we already know. */
1317 better_state (&edge_state, &edge_looping,
1318 w_l->state_previously_known,
1319 w_l->looping_previously_known);
1320 worse_state (&pure_const_state, &looping,
1321 edge_state, edge_looping);
1322 if (pure_const_state == IPA_NEITHER)
1323 break;
1324 }
1325 if (pure_const_state == IPA_NEITHER)
1326 break;
1327
1328 /* Now process the indirect call. */
1329 for (ie = w->indirect_calls; ie; ie = ie->next_callee)
1330 {
1331 enum pure_const_state_e edge_state = IPA_CONST;
1332 bool edge_looping = false;
1333
1334 if (dump_file && (dump_flags & TDF_DETAILS))
1335 fprintf (dump_file, " Indirect call");
1336 state_from_flags (&edge_state, &edge_looping,
1337 ie->indirect_info->ecf_flags,
1338 ie->cannot_lead_to_return_p ());
1339 /* Merge the results with what we already know. */
1340 better_state (&edge_state, &edge_looping,
1341 w_l->state_previously_known,
1342 w_l->looping_previously_known);
1343 worse_state (&pure_const_state, &looping,
1344 edge_state, edge_looping);
1345 if (pure_const_state == IPA_NEITHER)
1346 break;
1347 }
1348 if (pure_const_state == IPA_NEITHER)
1349 break;
1350
1351 /* And finally all loads and stores. */
1352 for (i = 0; w->iterate_reference (i, ref); i++)
1353 {
1354 enum pure_const_state_e ref_state = IPA_CONST;
1355 bool ref_looping = false;
1356 switch (ref->use)
1357 {
1358 case IPA_REF_LOAD:
1359 /* readonly reads are safe. */
1360 if (TREE_READONLY (ref->referred->decl))
1361 break;
1362 if (dump_file && (dump_flags & TDF_DETAILS))
1363 fprintf (dump_file, " nonreadonly global var read\n");
1364 ref_state = IPA_PURE;
1365 break;
1366 case IPA_REF_STORE:
1367 if (ref->cannot_lead_to_return ())
1368 break;
1369 ref_state = IPA_NEITHER;
1370 if (dump_file && (dump_flags & TDF_DETAILS))
1371 fprintf (dump_file, " global var write\n");
1372 break;
1373 case IPA_REF_ADDR:
1374 case IPA_REF_CHKP:
1375 break;
1376 default:
1377 gcc_unreachable ();
1378 }
1379 better_state (&ref_state, &ref_looping,
1380 w_l->state_previously_known,
1381 w_l->looping_previously_known);
1382 worse_state (&pure_const_state, &looping,
1383 ref_state, ref_looping);
1384 if (pure_const_state == IPA_NEITHER)
1385 break;
1386 }
1387 w_info = (struct ipa_dfs_info *) w->aux;
1388 w = w_info->next_cycle;
1389 }
1390 if (dump_file && (dump_flags & TDF_DETAILS))
1391 fprintf (dump_file, "Result %s looping %i\n",
1392 pure_const_names [pure_const_state],
1393 looping);
1394
1395 /* Find the worst state of can_free for any node in the cycle. */
1396 bool can_free = false;
1397 w = node;
1398 while (w && !can_free)
1399 {
1400 struct cgraph_edge *e;
1401 funct_state w_l = get_function_state (w);
1402
1403 if (w_l->can_free
1404 || w->get_availability () == AVAIL_INTERPOSABLE
1405 || w->indirect_calls)
1406 can_free = true;
1407
1408 for (e = w->callees; e && !can_free; e = e->next_callee)
1409 {
1410 enum availability avail;
1411 struct cgraph_node *y = e->callee->
1412 function_or_virtual_thunk_symbol (&avail);
1413
1414 if (avail > AVAIL_INTERPOSABLE)
1415 can_free = get_function_state (y)->can_free;
1416 else
1417 can_free = true;
1418 }
1419 w_info = (struct ipa_dfs_info *) w->aux;
1420 w = w_info->next_cycle;
1421 }
1422
1423 /* Copy back the region's pure_const_state which is shared by
1424 all nodes in the region. */
1425 w = node;
1426 while (w)
1427 {
1428 funct_state w_l = get_function_state (w);
1429 enum pure_const_state_e this_state = pure_const_state;
1430 bool this_looping = looping;
1431
1432 w_l->can_free = can_free;
1433 w->nonfreeing_fn = !can_free;
1434 if (!can_free && dump_file)
1435 fprintf (dump_file, "Function found not to call free: %s\n",
1436 w->name ());
1437
1438 if (w_l->state_previously_known != IPA_NEITHER
1439 && this_state > w_l->state_previously_known)
1440 {
1441 this_state = w_l->state_previously_known;
1442 this_looping |= w_l->looping_previously_known;
1443 }
1444 if (!this_looping && self_recursive_p (w))
1445 this_looping = true;
1446 if (!w_l->looping_previously_known)
1447 this_looping = false;
1448
1449 /* All nodes within a cycle share the same info. */
1450 w_l->pure_const_state = this_state;
1451 w_l->looping = this_looping;
1452
1453 /* Inline clones share declaration with their offline copies;
1454 do not modify their declarations since the offline copy may
1455 be different. */
1456 if (!w->global.inlined_to)
1457 switch (this_state)
1458 {
1459 case IPA_CONST:
1460 if (!TREE_READONLY (w->decl))
1461 {
1462 warn_function_const (w->decl, !this_looping);
1463 if (dump_file)
1464 fprintf (dump_file, "Function found to be %sconst: %s\n",
1465 this_looping ? "looping " : "",
1466 w->name ());
1467 }
1468 remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
1469 NULL, true);
1470 w->set_const_flag (true, this_looping);
1471 break;
1472
1473 case IPA_PURE:
1474 if (!DECL_PURE_P (w->decl))
1475 {
1476 warn_function_pure (w->decl, !this_looping);
1477 if (dump_file)
1478 fprintf (dump_file, "Function found to be %spure: %s\n",
1479 this_looping ? "looping " : "",
1480 w->name ());
1481 }
1482 remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
1483 NULL, true);
1484 w->set_pure_flag (true, this_looping);
1485 break;
1486
1487 default:
1488 break;
1489 }
1490 w_info = (struct ipa_dfs_info *) w->aux;
1491 w = w_info->next_cycle;
1492 }
1493 }
1494
1495 ipa_free_postorder_info ();
1496 free (order);
1497 return remove_p;
1498 }
1499
1500 /* Produce transitive closure over the callgraph and compute nothrow
1501 attributes. */
1502
1503 static void
1504 propagate_nothrow (void)
1505 {
1506 struct cgraph_node *node;
1507 struct cgraph_node *w;
1508 struct cgraph_node **order =
1509 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1510 int order_pos;
1511 int i;
1512 struct ipa_dfs_info * w_info;
1513
1514 order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
1515 if (dump_file)
1516 {
1517 cgraph_node::dump_cgraph (dump_file);
1518 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1519 }
1520
1521 /* Propagate the local information through the call graph to produce
1522 the global information. All the nodes within a cycle will have
1523 the same info so we collapse cycles first. Then we can do the
1524 propagation in one pass from the leaves to the roots. */
1525 for (i = 0; i < order_pos; i++ )
1526 {
1527 bool can_throw = false;
1528 node = order[i];
1529
1530 if (node->alias)
1531 continue;
1532
1533 /* Find the worst state for any node in the cycle. */
1534 w = node;
1535 while (w && !can_throw)
1536 {
1537 struct cgraph_edge *e, *ie;
1538 funct_state w_l = get_function_state (w);
1539
1540 if (w_l->can_throw
1541 || w->get_availability () == AVAIL_INTERPOSABLE)
1542 can_throw = true;
1543
1544 for (e = w->callees; e && !can_throw; e = e->next_callee)
1545 {
1546 enum availability avail;
1547 struct cgraph_node *y = e->callee->
1548 function_or_virtual_thunk_symbol (&avail);
1549
1550 if (avail > AVAIL_INTERPOSABLE)
1551 {
1552 funct_state y_l = get_function_state (y);
1553
1554 if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1555 && e->can_throw_external)
1556 can_throw = true;
1557 }
1558 else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
1559 can_throw = true;
1560 }
1561 for (ie = w->indirect_calls; ie && !can_throw; ie = ie->next_callee)
1562 if (ie->can_throw_external)
1563 can_throw = true;
1564 w_info = (struct ipa_dfs_info *) w->aux;
1565 w = w_info->next_cycle;
1566 }
1567
1568 /* Copy back the region's pure_const_state which is shared by
1569 all nodes in the region. */
1570 w = node;
1571 while (w)
1572 {
1573 funct_state w_l = get_function_state (w);
1574 if (!can_throw && !TREE_NOTHROW (w->decl))
1575 {
1576 /* Inline clones share declaration with their offline copies;
1577 do not modify their declarations since the offline copy may
1578 be different. */
1579 if (!w->global.inlined_to)
1580 {
1581 w->set_nothrow_flag (true);
1582 if (dump_file)
1583 fprintf (dump_file, "Function found to be nothrow: %s\n",
1584 w->name ());
1585 }
1586 }
1587 else if (can_throw && !TREE_NOTHROW (w->decl))
1588 w_l->can_throw = true;
1589 w_info = (struct ipa_dfs_info *) w->aux;
1590 w = w_info->next_cycle;
1591 }
1592 }
1593
1594 ipa_free_postorder_info ();
1595 free (order);
1596 }
1597
1598
1599 /* Produce the global information by preforming a transitive closure
1600 on the local information that was produced by generate_summary. */
1601
1602 unsigned int
1603 pass_ipa_pure_const::
1604 execute (function *)
1605 {
1606 struct cgraph_node *node;
1607 bool remove_p;
1608
1609 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
1610 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
1611 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
1612
1613 /* Nothrow makes more function to not lead to return and improve
1614 later analysis. */
1615 propagate_nothrow ();
1616 remove_p = propagate_pure_const ();
1617
1618 /* Cleanup. */
1619 FOR_EACH_FUNCTION (node)
1620 if (has_function_state (node))
1621 free (get_function_state (node));
1622 funct_state_vec.release ();
1623 return remove_p ? TODO_remove_functions : 0;
1624 }
1625
1626 static bool
1627 gate_pure_const (void)
1628 {
1629 return flag_ipa_pure_const || in_lto_p;
1630 }
1631
1632 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
1633 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
1634 pure_const_generate_summary, /* generate_summary */
1635 pure_const_write_summary, /* write_summary */
1636 pure_const_read_summary, /* read_summary */
1637 NULL, /* write_optimization_summary */
1638 NULL, /* read_optimization_summary */
1639 NULL, /* stmt_fixup */
1640 0, /* function_transform_todo_flags_start */
1641 NULL, /* function_transform */
1642 NULL), /* variable_transform */
1643 init_p(false),
1644 function_insertion_hook_holder(NULL),
1645 node_duplication_hook_holder(NULL),
1646 node_removal_hook_holder(NULL)
1647 {
1648 }
1649
1650 ipa_opt_pass_d *
1651 make_pass_ipa_pure_const (gcc::context *ctxt)
1652 {
1653 return new pass_ipa_pure_const (ctxt);
1654 }
1655
1656 /* Return true if function should be skipped for local pure const analysis. */
1657
1658 static bool
1659 skip_function_for_local_pure_const (struct cgraph_node *node)
1660 {
1661 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1662 we must not promote functions that are called by already processed functions. */
1663
1664 if (function_called_by_processed_nodes_p ())
1665 {
1666 if (dump_file)
1667 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1668 return true;
1669 }
1670 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1671 {
1672 if (dump_file)
1673 fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
1674 return true;
1675 }
1676 return false;
1677 }
1678
1679 /* Simple local pass for pure const discovery reusing the analysis from
1680 ipa_pure_const. This pass is effective when executed together with
1681 other optimization passes in early optimization pass queue. */
1682
1683 namespace {
1684
1685 const pass_data pass_data_local_pure_const =
1686 {
1687 GIMPLE_PASS, /* type */
1688 "local-pure-const", /* name */
1689 OPTGROUP_NONE, /* optinfo_flags */
1690 TV_IPA_PURE_CONST, /* tv_id */
1691 0, /* properties_required */
1692 0, /* properties_provided */
1693 0, /* properties_destroyed */
1694 0, /* todo_flags_start */
1695 0, /* todo_flags_finish */
1696 };
1697
1698 class pass_local_pure_const : public gimple_opt_pass
1699 {
1700 public:
1701 pass_local_pure_const (gcc::context *ctxt)
1702 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
1703 {}
1704
1705 /* opt_pass methods: */
1706 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
1707 virtual bool gate (function *) { return gate_pure_const (); }
1708 virtual unsigned int execute (function *);
1709
1710 }; // class pass_local_pure_const
1711
1712 unsigned int
1713 pass_local_pure_const::execute (function *fun)
1714 {
1715 bool changed = false;
1716 funct_state l;
1717 bool skip;
1718 struct cgraph_node *node;
1719
1720 node = cgraph_node::get (current_function_decl);
1721 skip = skip_function_for_local_pure_const (node);
1722 if (!warn_suggest_attribute_const
1723 && !warn_suggest_attribute_pure
1724 && skip)
1725 return 0;
1726
1727 l = analyze_function (node, false);
1728
1729 /* Do NORETURN discovery. */
1730 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1731 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1732 {
1733 warn_function_noreturn (fun->decl);
1734 if (dump_file)
1735 fprintf (dump_file, "Function found to be noreturn: %s\n",
1736 current_function_name ());
1737
1738 /* Update declaration and reduce profile to executed once. */
1739 TREE_THIS_VOLATILE (current_function_decl) = 1;
1740 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1741 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1742
1743 changed = true;
1744 }
1745
1746 switch (l->pure_const_state)
1747 {
1748 case IPA_CONST:
1749 if (!TREE_READONLY (current_function_decl))
1750 {
1751 warn_function_const (current_function_decl, !l->looping);
1752 if (!skip)
1753 {
1754 node->set_const_flag (true, l->looping);
1755 changed = true;
1756 }
1757 if (dump_file)
1758 fprintf (dump_file, "Function found to be %sconst: %s\n",
1759 l->looping ? "looping " : "",
1760 current_function_name ());
1761 }
1762 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1763 && !l->looping)
1764 {
1765 if (!skip)
1766 {
1767 node->set_const_flag (true, false);
1768 changed = true;
1769 }
1770 if (dump_file)
1771 fprintf (dump_file, "Function found to be non-looping: %s\n",
1772 current_function_name ());
1773 }
1774 break;
1775
1776 case IPA_PURE:
1777 if (!DECL_PURE_P (current_function_decl))
1778 {
1779 if (!skip)
1780 {
1781 node->set_pure_flag (true, l->looping);
1782 changed = true;
1783 }
1784 warn_function_pure (current_function_decl, !l->looping);
1785 if (dump_file)
1786 fprintf (dump_file, "Function found to be %spure: %s\n",
1787 l->looping ? "looping " : "",
1788 current_function_name ());
1789 }
1790 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1791 && !l->looping)
1792 {
1793 if (!skip)
1794 {
1795 node->set_pure_flag (true, false);
1796 changed = true;
1797 }
1798 if (dump_file)
1799 fprintf (dump_file, "Function found to be non-looping: %s\n",
1800 current_function_name ());
1801 }
1802 break;
1803
1804 default:
1805 break;
1806 }
1807 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1808 {
1809 node->set_nothrow_flag (true);
1810 changed = true;
1811 if (dump_file)
1812 fprintf (dump_file, "Function found to be nothrow: %s\n",
1813 current_function_name ());
1814 }
1815 free (l);
1816 if (changed)
1817 return execute_fixup_cfg ();
1818 else
1819 return 0;
1820 }
1821
1822 } // anon namespace
1823
1824 gimple_opt_pass *
1825 make_pass_local_pure_const (gcc::context *ctxt)
1826 {
1827 return new pass_local_pure_const (ctxt);
1828 }
1829
1830 /* Emit noreturn warnings. */
1831
1832 namespace {
1833
1834 const pass_data pass_data_warn_function_noreturn =
1835 {
1836 GIMPLE_PASS, /* type */
1837 "*warn_function_noreturn", /* name */
1838 OPTGROUP_NONE, /* optinfo_flags */
1839 TV_NONE, /* tv_id */
1840 PROP_cfg, /* properties_required */
1841 0, /* properties_provided */
1842 0, /* properties_destroyed */
1843 0, /* todo_flags_start */
1844 0, /* todo_flags_finish */
1845 };
1846
1847 class pass_warn_function_noreturn : public gimple_opt_pass
1848 {
1849 public:
1850 pass_warn_function_noreturn (gcc::context *ctxt)
1851 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
1852 {}
1853
1854 /* opt_pass methods: */
1855 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
1856 virtual unsigned int execute (function *fun)
1857 {
1858 if (!TREE_THIS_VOLATILE (current_function_decl)
1859 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1860 warn_function_noreturn (current_function_decl);
1861 return 0;
1862 }
1863
1864 }; // class pass_warn_function_noreturn
1865
1866 } // anon namespace
1867
1868 gimple_opt_pass *
1869 make_pass_warn_function_noreturn (gcc::context *ctxt)
1870 {
1871 return new pass_warn_function_noreturn (ctxt);
1872 }