]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa-pure-const.c
Factor unrelated declarations out of tree.h.
[thirdparty/gcc.git] / gcc / ipa-pure-const.c
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "tree.h"
39 #include "print-tree.h"
40 #include "calls.h"
41 #include "gimple.h"
42 #include "gimple-iterator.h"
43 #include "gimple-walk.h"
44 #include "tree-cfg.h"
45 #include "tree-ssa-loop-niter.h"
46 #include "tree-inline.h"
47 #include "tree-pass.h"
48 #include "langhooks.h"
49 #include "pointer-set.h"
50 #include "ggc.h"
51 #include "ipa-utils.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "langhooks.h"
56 #include "target.h"
57 #include "lto-streamer.h"
58 #include "data-streamer.h"
59 #include "tree-streamer.h"
60 #include "cfgloop.h"
61 #include "tree-scalar-evolution.h"
62 #include "intl.h"
63 #include "opts.h"
64
65 static struct pointer_set_t *visited_nodes;
66
67 /* Lattice values for const and pure functions. Everything starts out
68 being const, then may drop to pure and then neither depending on
69 what is found. */
70 enum pure_const_state_e
71 {
72 IPA_CONST,
73 IPA_PURE,
74 IPA_NEITHER
75 };
76
77 const char *pure_const_names[3] = {"const", "pure", "neither"};
78
79 /* Holder for the const_state. There is one of these per function
80 decl. */
81 struct funct_state_d
82 {
83 /* See above. */
84 enum pure_const_state_e pure_const_state;
85 /* What user set here; we can be always sure about this. */
86 enum pure_const_state_e state_previously_known;
87 bool looping_previously_known;
88
89 /* True if the function could possibly infinite loop. There are a
90 lot of ways that this could be determined. We are pretty
91 conservative here. While it is possible to cse pure and const
92 calls, it is not legal to have dce get rid of the call if there
93 is a possibility that the call could infinite loop since this is
94 a behavioral change. */
95 bool looping;
96
97 bool can_throw;
98 };
99
100 /* State used when we know nothing about function. */
101 static struct funct_state_d varying_state
102 = { IPA_NEITHER, IPA_NEITHER, true, true, true };
103
104
105 typedef struct funct_state_d * funct_state;
106
107 /* The storage of the funct_state is abstracted because there is the
108 possibility that it may be desirable to move this to the cgraph
109 local info. */
110
111 /* Array, indexed by cgraph node uid, of function states. */
112
113 static vec<funct_state> funct_state_vec;
114
115 /* Holders of ipa cgraph hooks: */
116 static struct cgraph_node_hook_list *function_insertion_hook_holder;
117 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
118 static struct cgraph_node_hook_list *node_removal_hook_holder;
119
120 /* Try to guess if function body will always be visible to compiler
121 when compiling the call and whether compiler will be able
122 to propagate the information by itself. */
123
124 static bool
125 function_always_visible_to_compiler_p (tree decl)
126 {
127 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
128 }
129
130 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
131 is true if the function is known to be finite. The diagnostic is
132 controlled by OPTION. WARNED_ABOUT is a pointer_set unique for
133 OPTION, this function may initialize it and it is always returned
134 by the function. */
135
136 static struct pointer_set_t *
137 suggest_attribute (int option, tree decl, bool known_finite,
138 struct pointer_set_t *warned_about,
139 const char * attrib_name)
140 {
141 if (!option_enabled (option, &global_options))
142 return warned_about;
143 if (TREE_THIS_VOLATILE (decl)
144 || (known_finite && function_always_visible_to_compiler_p (decl)))
145 return warned_about;
146
147 if (!warned_about)
148 warned_about = pointer_set_create ();
149 if (pointer_set_contains (warned_about, decl))
150 return warned_about;
151 pointer_set_insert (warned_about, decl);
152 warning_at (DECL_SOURCE_LOCATION (decl),
153 option,
154 known_finite
155 ? _("function might be candidate for attribute %<%s%>")
156 : _("function might be candidate for attribute %<%s%>"
157 " if it is known to return normally"), attrib_name);
158 return warned_about;
159 }
160
161 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
162 is true if the function is known to be finite. */
163
164 static void
165 warn_function_pure (tree decl, bool known_finite)
166 {
167 static struct pointer_set_t *warned_about;
168
169 warned_about
170 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
171 known_finite, warned_about, "pure");
172 }
173
174 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
175 is true if the function is known to be finite. */
176
177 static void
178 warn_function_const (tree decl, bool known_finite)
179 {
180 static struct pointer_set_t *warned_about;
181 warned_about
182 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
183 known_finite, warned_about, "const");
184 }
185
186 static void
187 warn_function_noreturn (tree decl)
188 {
189 static struct pointer_set_t *warned_about;
190 if (!lang_hooks.missing_noreturn_ok_p (decl)
191 && targetm.warn_func_return (decl))
192 warned_about
193 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
194 true, warned_about, "noreturn");
195 }
196
197 /* Return true if we have a function state for NODE. */
198
199 static inline bool
200 has_function_state (struct cgraph_node *node)
201 {
202 if (!funct_state_vec.exists ()
203 || funct_state_vec.length () <= (unsigned int)node->uid)
204 return false;
205 return funct_state_vec[node->uid] != NULL;
206 }
207
208 /* Return the function state from NODE. */
209
210 static inline funct_state
211 get_function_state (struct cgraph_node *node)
212 {
213 if (!funct_state_vec.exists ()
214 || funct_state_vec.length () <= (unsigned int)node->uid
215 || !funct_state_vec[node->uid])
216 /* We might want to put correct previously_known state into varying. */
217 return &varying_state;
218 return funct_state_vec[node->uid];
219 }
220
221 /* Set the function state S for NODE. */
222
223 static inline void
224 set_function_state (struct cgraph_node *node, funct_state s)
225 {
226 if (!funct_state_vec.exists ()
227 || funct_state_vec.length () <= (unsigned int)node->uid)
228 funct_state_vec.safe_grow_cleared (node->uid + 1);
229 funct_state_vec[node->uid] = s;
230 }
231
232 /* Check to see if the use (or definition when CHECKING_WRITE is true)
233 variable T is legal in a function that is either pure or const. */
234
235 static inline void
236 check_decl (funct_state local,
237 tree t, bool checking_write, bool ipa)
238 {
239 /* Do not want to do anything with volatile except mark any
240 function that uses one to be not const or pure. */
241 if (TREE_THIS_VOLATILE (t))
242 {
243 local->pure_const_state = IPA_NEITHER;
244 if (dump_file)
245 fprintf (dump_file, " Volatile operand is not const/pure");
246 return;
247 }
248
249 /* Do not care about a local automatic that is not static. */
250 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
251 return;
252
253 /* If the variable has the "used" attribute, treat it as if it had a
254 been touched by the devil. */
255 if (DECL_PRESERVE_P (t))
256 {
257 local->pure_const_state = IPA_NEITHER;
258 if (dump_file)
259 fprintf (dump_file, " Used static/global variable is not const/pure\n");
260 return;
261 }
262
263 /* In IPA mode we are not interested in checking actual loads and stores;
264 they will be processed at propagation time using ipa_ref. */
265 if (ipa)
266 return;
267
268 /* Since we have dealt with the locals and params cases above, if we
269 are CHECKING_WRITE, this cannot be a pure or constant
270 function. */
271 if (checking_write)
272 {
273 local->pure_const_state = IPA_NEITHER;
274 if (dump_file)
275 fprintf (dump_file, " static/global memory write is not const/pure\n");
276 return;
277 }
278
279 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
280 {
281 /* Readonly reads are safe. */
282 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
283 return; /* Read of a constant, do not change the function state. */
284 else
285 {
286 if (dump_file)
287 fprintf (dump_file, " global memory read is not const\n");
288 /* Just a regular read. */
289 if (local->pure_const_state == IPA_CONST)
290 local->pure_const_state = IPA_PURE;
291 }
292 }
293 else
294 {
295 /* Compilation level statics can be read if they are readonly
296 variables. */
297 if (TREE_READONLY (t))
298 return;
299
300 if (dump_file)
301 fprintf (dump_file, " static memory read is not const\n");
302 /* Just a regular read. */
303 if (local->pure_const_state == IPA_CONST)
304 local->pure_const_state = IPA_PURE;
305 }
306 }
307
308
309 /* Check to see if the use (or definition when CHECKING_WRITE is true)
310 variable T is legal in a function that is either pure or const. */
311
312 static inline void
313 check_op (funct_state local, tree t, bool checking_write)
314 {
315 t = get_base_address (t);
316 if (t && TREE_THIS_VOLATILE (t))
317 {
318 local->pure_const_state = IPA_NEITHER;
319 if (dump_file)
320 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
321 return;
322 }
323 else if (t
324 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
325 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
326 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
327 {
328 if (dump_file)
329 fprintf (dump_file, " Indirect ref to local memory is OK\n");
330 return;
331 }
332 else if (checking_write)
333 {
334 local->pure_const_state = IPA_NEITHER;
335 if (dump_file)
336 fprintf (dump_file, " Indirect ref write is not const/pure\n");
337 return;
338 }
339 else
340 {
341 if (dump_file)
342 fprintf (dump_file, " Indirect ref read is not const\n");
343 if (local->pure_const_state == IPA_CONST)
344 local->pure_const_state = IPA_PURE;
345 }
346 }
347
348 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
349
350 static void
351 state_from_flags (enum pure_const_state_e *state, bool *looping,
352 int flags, bool cannot_lead_to_return)
353 {
354 *looping = false;
355 if (flags & ECF_LOOPING_CONST_OR_PURE)
356 {
357 *looping = true;
358 if (dump_file && (dump_flags & TDF_DETAILS))
359 fprintf (dump_file, " looping");
360 }
361 if (flags & ECF_CONST)
362 {
363 *state = IPA_CONST;
364 if (dump_file && (dump_flags & TDF_DETAILS))
365 fprintf (dump_file, " const\n");
366 }
367 else if (flags & ECF_PURE)
368 {
369 *state = IPA_PURE;
370 if (dump_file && (dump_flags & TDF_DETAILS))
371 fprintf (dump_file, " pure\n");
372 }
373 else if (cannot_lead_to_return)
374 {
375 *state = IPA_PURE;
376 *looping = true;
377 if (dump_file && (dump_flags & TDF_DETAILS))
378 fprintf (dump_file, " ignoring side effects->pure looping\n");
379 }
380 else
381 {
382 if (dump_file && (dump_flags & TDF_DETAILS))
383 fprintf (dump_file, " neither\n");
384 *state = IPA_NEITHER;
385 *looping = true;
386 }
387 }
388
389 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
390 into STATE and LOOPING better of the two variants.
391 Be sure to merge looping correctly. IPA_NEITHER functions
392 have looping 0 even if they don't have to return. */
393
394 static inline void
395 better_state (enum pure_const_state_e *state, bool *looping,
396 enum pure_const_state_e state2, bool looping2)
397 {
398 if (state2 < *state)
399 {
400 if (*state == IPA_NEITHER)
401 *looping = looping2;
402 else
403 *looping = MIN (*looping, looping2);
404 *state = state2;
405 }
406 else if (state2 != IPA_NEITHER)
407 *looping = MIN (*looping, looping2);
408 }
409
410 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
411 into STATE and LOOPING worse of the two variants. */
412
413 static inline void
414 worse_state (enum pure_const_state_e *state, bool *looping,
415 enum pure_const_state_e state2, bool looping2)
416 {
417 *state = MAX (*state, state2);
418 *looping = MAX (*looping, looping2);
419 }
420
421 /* Recognize special cases of builtins that are by themselves not pure or const
422 but function using them is. */
423 static bool
424 special_builtin_state (enum pure_const_state_e *state, bool *looping,
425 tree callee)
426 {
427 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
428 switch (DECL_FUNCTION_CODE (callee))
429 {
430 case BUILT_IN_RETURN:
431 case BUILT_IN_UNREACHABLE:
432 case BUILT_IN_ALLOCA:
433 case BUILT_IN_ALLOCA_WITH_ALIGN:
434 case BUILT_IN_STACK_SAVE:
435 case BUILT_IN_STACK_RESTORE:
436 case BUILT_IN_EH_POINTER:
437 case BUILT_IN_EH_FILTER:
438 case BUILT_IN_UNWIND_RESUME:
439 case BUILT_IN_CXA_END_CLEANUP:
440 case BUILT_IN_EH_COPY_VALUES:
441 case BUILT_IN_FRAME_ADDRESS:
442 case BUILT_IN_APPLY:
443 case BUILT_IN_APPLY_ARGS:
444 *looping = false;
445 *state = IPA_CONST;
446 return true;
447 case BUILT_IN_PREFETCH:
448 *looping = true;
449 *state = IPA_CONST;
450 return true;
451 }
452 return false;
453 }
454
455 /* Check the parameters of a function call to CALL_EXPR to see if
456 there are any references in the parameters that are not allowed for
457 pure or const functions. Also check to see if this is either an
458 indirect call, a call outside the compilation unit, or has special
459 attributes that may also effect the purity. The CALL_EXPR node for
460 the entire call expression. */
461
462 static void
463 check_call (funct_state local, gimple call, bool ipa)
464 {
465 int flags = gimple_call_flags (call);
466 tree callee_t = gimple_call_fndecl (call);
467 bool possibly_throws = stmt_could_throw_p (call);
468 bool possibly_throws_externally = (possibly_throws
469 && stmt_can_throw_external (call));
470
471 if (possibly_throws)
472 {
473 unsigned int i;
474 for (i = 0; i < gimple_num_ops (call); i++)
475 if (gimple_op (call, i)
476 && tree_could_throw_p (gimple_op (call, i)))
477 {
478 if (possibly_throws && cfun->can_throw_non_call_exceptions)
479 {
480 if (dump_file)
481 fprintf (dump_file, " operand can throw; looping\n");
482 local->looping = true;
483 }
484 if (possibly_throws_externally)
485 {
486 if (dump_file)
487 fprintf (dump_file, " operand can throw externally\n");
488 local->can_throw = true;
489 }
490 }
491 }
492
493 /* The const and pure flags are set by a variety of places in the
494 compiler (including here). If someone has already set the flags
495 for the callee, (such as for some of the builtins) we will use
496 them, otherwise we will compute our own information.
497
498 Const and pure functions have less clobber effects than other
499 functions so we process these first. Otherwise if it is a call
500 outside the compilation unit or an indirect call we punt. This
501 leaves local calls which will be processed by following the call
502 graph. */
503 if (callee_t)
504 {
505 enum pure_const_state_e call_state;
506 bool call_looping;
507
508 if (special_builtin_state (&call_state, &call_looping, callee_t))
509 {
510 worse_state (&local->pure_const_state, &local->looping,
511 call_state, call_looping);
512 return;
513 }
514 /* When bad things happen to bad functions, they cannot be const
515 or pure. */
516 if (setjmp_call_p (callee_t))
517 {
518 if (dump_file)
519 fprintf (dump_file, " setjmp is not const/pure\n");
520 local->looping = true;
521 local->pure_const_state = IPA_NEITHER;
522 }
523
524 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
525 switch (DECL_FUNCTION_CODE (callee_t))
526 {
527 case BUILT_IN_LONGJMP:
528 case BUILT_IN_NONLOCAL_GOTO:
529 if (dump_file)
530 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
531 local->pure_const_state = IPA_NEITHER;
532 local->looping = true;
533 break;
534 default:
535 break;
536 }
537 }
538
539 /* When not in IPA mode, we can still handle self recursion. */
540 if (!ipa && callee_t
541 && recursive_call_p (current_function_decl, callee_t))
542 {
543 if (dump_file)
544 fprintf (dump_file, " Recursive call can loop.\n");
545 local->looping = true;
546 }
547 /* Either callee is unknown or we are doing local analysis.
548 Look to see if there are any bits available for the callee (such as by
549 declaration or because it is builtin) and process solely on the basis of
550 those bits. */
551 else if (!ipa)
552 {
553 enum pure_const_state_e call_state;
554 bool call_looping;
555 if (possibly_throws && cfun->can_throw_non_call_exceptions)
556 {
557 if (dump_file)
558 fprintf (dump_file, " can throw; looping\n");
559 local->looping = true;
560 }
561 if (possibly_throws_externally)
562 {
563 if (dump_file)
564 {
565 fprintf (dump_file, " can throw externally to lp %i\n",
566 lookup_stmt_eh_lp (call));
567 if (callee_t)
568 fprintf (dump_file, " callee:%s\n",
569 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
570 }
571 local->can_throw = true;
572 }
573 if (dump_file && (dump_flags & TDF_DETAILS))
574 fprintf (dump_file, " checking flags for call:");
575 state_from_flags (&call_state, &call_looping, flags,
576 ((flags & (ECF_NORETURN | ECF_NOTHROW))
577 == (ECF_NORETURN | ECF_NOTHROW))
578 || (!flag_exceptions && (flags & ECF_NORETURN)));
579 worse_state (&local->pure_const_state, &local->looping,
580 call_state, call_looping);
581 }
582 /* Direct functions calls are handled by IPA propagation. */
583 }
584
585 /* Wrapper around check_decl for loads in local more. */
586
587 static bool
588 check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
589 {
590 if (DECL_P (op))
591 check_decl ((funct_state)data, op, false, false);
592 else
593 check_op ((funct_state)data, op, false);
594 return false;
595 }
596
597 /* Wrapper around check_decl for stores in local more. */
598
599 static bool
600 check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
601 {
602 if (DECL_P (op))
603 check_decl ((funct_state)data, op, true, false);
604 else
605 check_op ((funct_state)data, op, true);
606 return false;
607 }
608
609 /* Wrapper around check_decl for loads in ipa mode. */
610
611 static bool
612 check_ipa_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
613 {
614 if (DECL_P (op))
615 check_decl ((funct_state)data, op, false, true);
616 else
617 check_op ((funct_state)data, op, false);
618 return false;
619 }
620
621 /* Wrapper around check_decl for stores in ipa mode. */
622
623 static bool
624 check_ipa_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
625 {
626 if (DECL_P (op))
627 check_decl ((funct_state)data, op, true, true);
628 else
629 check_op ((funct_state)data, op, true);
630 return false;
631 }
632
633 /* Look into pointer pointed to by GSIP and figure out what interesting side
634 effects it has. */
635 static void
636 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
637 {
638 gimple stmt = gsi_stmt (*gsip);
639
640 if (is_gimple_debug (stmt))
641 return;
642
643 if (dump_file)
644 {
645 fprintf (dump_file, " scanning: ");
646 print_gimple_stmt (dump_file, stmt, 0, 0);
647 }
648
649 if (gimple_has_volatile_ops (stmt)
650 && !gimple_clobber_p (stmt))
651 {
652 local->pure_const_state = IPA_NEITHER;
653 if (dump_file)
654 fprintf (dump_file, " Volatile stmt is not const/pure\n");
655 }
656
657 /* Look for loads and stores. */
658 walk_stmt_load_store_ops (stmt, local,
659 ipa ? check_ipa_load : check_load,
660 ipa ? check_ipa_store : check_store);
661
662 if (gimple_code (stmt) != GIMPLE_CALL
663 && stmt_could_throw_p (stmt))
664 {
665 if (cfun->can_throw_non_call_exceptions)
666 {
667 if (dump_file)
668 fprintf (dump_file, " can throw; looping\n");
669 local->looping = true;
670 }
671 if (stmt_can_throw_external (stmt))
672 {
673 if (dump_file)
674 fprintf (dump_file, " can throw externally\n");
675 local->can_throw = true;
676 }
677 else
678 if (dump_file)
679 fprintf (dump_file, " can throw\n");
680 }
681 switch (gimple_code (stmt))
682 {
683 case GIMPLE_CALL:
684 check_call (local, stmt, ipa);
685 break;
686 case GIMPLE_LABEL:
687 if (DECL_NONLOCAL (gimple_label_label (stmt)))
688 /* Target of long jump. */
689 {
690 if (dump_file)
691 fprintf (dump_file, " nonlocal label is not const/pure\n");
692 local->pure_const_state = IPA_NEITHER;
693 }
694 break;
695 case GIMPLE_ASM:
696 if (gimple_asm_clobbers_memory_p (stmt))
697 {
698 if (dump_file)
699 fprintf (dump_file, " memory asm clobber is not const/pure\n");
700 /* Abandon all hope, ye who enter here. */
701 local->pure_const_state = IPA_NEITHER;
702 }
703 if (gimple_asm_volatile_p (stmt))
704 {
705 if (dump_file)
706 fprintf (dump_file, " volatile is not const/pure\n");
707 /* Abandon all hope, ye who enter here. */
708 local->pure_const_state = IPA_NEITHER;
709 local->looping = true;
710 }
711 return;
712 default:
713 break;
714 }
715 }
716
717
718 /* This is the main routine for finding the reference patterns for
719 global variables within a function FN. */
720
721 static funct_state
722 analyze_function (struct cgraph_node *fn, bool ipa)
723 {
724 tree decl = fn->decl;
725 funct_state l;
726 basic_block this_block;
727
728 l = XCNEW (struct funct_state_d);
729 l->pure_const_state = IPA_CONST;
730 l->state_previously_known = IPA_NEITHER;
731 l->looping_previously_known = true;
732 l->looping = false;
733 l->can_throw = false;
734 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
735 flags_from_decl_or_type (fn->decl),
736 cgraph_node_cannot_return (fn));
737
738 if (fn->thunk.thunk_p || fn->alias)
739 {
740 /* Thunk gets propagated through, so nothing interesting happens. */
741 gcc_assert (ipa);
742 return l;
743 }
744
745 if (dump_file)
746 {
747 fprintf (dump_file, "\n\n local analysis of %s\n ",
748 fn->name ());
749 }
750
751 push_cfun (DECL_STRUCT_FUNCTION (decl));
752
753 FOR_EACH_BB (this_block)
754 {
755 gimple_stmt_iterator gsi;
756 struct walk_stmt_info wi;
757
758 memset (&wi, 0, sizeof (wi));
759 for (gsi = gsi_start_bb (this_block);
760 !gsi_end_p (gsi);
761 gsi_next (&gsi))
762 {
763 check_stmt (&gsi, l, ipa);
764 if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
765 goto end;
766 }
767 }
768
769 end:
770 if (l->pure_const_state != IPA_NEITHER)
771 {
772 /* Const functions cannot have back edges (an
773 indication of possible infinite loop side
774 effect. */
775 if (mark_dfs_back_edges ())
776 {
777 /* Preheaders are needed for SCEV to work.
778 Simple latches and recorded exits improve chances that loop will
779 proved to be finite in testcases such as in loop-15.c
780 and loop-24.c */
781 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
782 | LOOPS_HAVE_SIMPLE_LATCHES
783 | LOOPS_HAVE_RECORDED_EXITS);
784 if (dump_file && (dump_flags & TDF_DETAILS))
785 flow_loops_dump (dump_file, NULL, 0);
786 if (mark_irreducible_loops ())
787 {
788 if (dump_file)
789 fprintf (dump_file, " has irreducible loops\n");
790 l->looping = true;
791 }
792 else
793 {
794 loop_iterator li;
795 struct loop *loop;
796 scev_initialize ();
797 FOR_EACH_LOOP (li, loop, 0)
798 if (!finite_loop_p (loop))
799 {
800 if (dump_file)
801 fprintf (dump_file, " can not prove finiteness of "
802 "loop %i\n", loop->num);
803 l->looping =true;
804 FOR_EACH_LOOP_BREAK (li);
805 }
806 scev_finalize ();
807 }
808 loop_optimizer_finalize ();
809 }
810 }
811
812 if (dump_file && (dump_flags & TDF_DETAILS))
813 fprintf (dump_file, " checking previously known:");
814
815 better_state (&l->pure_const_state, &l->looping,
816 l->state_previously_known,
817 l->looping_previously_known);
818 if (TREE_NOTHROW (decl))
819 l->can_throw = false;
820
821 pop_cfun ();
822 if (dump_file)
823 {
824 if (l->looping)
825 fprintf (dump_file, "Function is locally looping.\n");
826 if (l->can_throw)
827 fprintf (dump_file, "Function is locally throwing.\n");
828 if (l->pure_const_state == IPA_CONST)
829 fprintf (dump_file, "Function is locally const.\n");
830 if (l->pure_const_state == IPA_PURE)
831 fprintf (dump_file, "Function is locally pure.\n");
832 }
833 return l;
834 }
835
836 /* Called when new function is inserted to callgraph late. */
837 static void
838 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
839 {
840 if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
841 return;
842 /* There are some shared nodes, in particular the initializers on
843 static declarations. We do not need to scan them more than once
844 since all we would be interested in are the addressof
845 operations. */
846 visited_nodes = pointer_set_create ();
847 if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
848 set_function_state (node, analyze_function (node, true));
849 pointer_set_destroy (visited_nodes);
850 visited_nodes = NULL;
851 }
852
853 /* Called when new clone is inserted to callgraph late. */
854
855 static void
856 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
857 void *data ATTRIBUTE_UNUSED)
858 {
859 if (has_function_state (src))
860 {
861 funct_state l = XNEW (struct funct_state_d);
862 gcc_assert (!has_function_state (dst));
863 memcpy (l, get_function_state (src), sizeof (*l));
864 set_function_state (dst, l);
865 }
866 }
867
868 /* Called when new clone is inserted to callgraph late. */
869
870 static void
871 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
872 {
873 if (has_function_state (node))
874 {
875 funct_state l = get_function_state (node);
876 if (l != &varying_state)
877 free (l);
878 set_function_state (node, NULL);
879 }
880 }
881
882 \f
883 static void
884 register_hooks (void)
885 {
886 static bool init_p = false;
887
888 if (init_p)
889 return;
890
891 init_p = true;
892
893 node_removal_hook_holder =
894 cgraph_add_node_removal_hook (&remove_node_data, NULL);
895 node_duplication_hook_holder =
896 cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
897 function_insertion_hook_holder =
898 cgraph_add_function_insertion_hook (&add_new_function, NULL);
899 }
900
901
902 /* Analyze each function in the cgraph to see if it is locally PURE or
903 CONST. */
904
905 static void
906 pure_const_generate_summary (void)
907 {
908 struct cgraph_node *node;
909
910 register_hooks ();
911
912 /* There are some shared nodes, in particular the initializers on
913 static declarations. We do not need to scan them more than once
914 since all we would be interested in are the addressof
915 operations. */
916 visited_nodes = pointer_set_create ();
917
918 /* Process all of the functions.
919
920 We process AVAIL_OVERWRITABLE functions. We can not use the results
921 by default, but the info can be used at LTO with -fwhole-program or
922 when function got cloned and the clone is AVAILABLE. */
923
924 FOR_EACH_DEFINED_FUNCTION (node)
925 if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
926 set_function_state (node, analyze_function (node, true));
927
928 pointer_set_destroy (visited_nodes);
929 visited_nodes = NULL;
930 }
931
932
933 /* Serialize the ipa info for lto. */
934
935 static void
936 pure_const_write_summary (void)
937 {
938 struct cgraph_node *node;
939 struct lto_simple_output_block *ob
940 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
941 unsigned int count = 0;
942 lto_symtab_encoder_iterator lsei;
943 lto_symtab_encoder_t encoder;
944
945 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
946
947 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
948 lsei_next_function_in_partition (&lsei))
949 {
950 node = lsei_cgraph_node (lsei);
951 if (node->definition && has_function_state (node))
952 count++;
953 }
954
955 streamer_write_uhwi_stream (ob->main_stream, count);
956
957 /* Process all of the functions. */
958 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
959 lsei_next_function_in_partition (&lsei))
960 {
961 node = lsei_cgraph_node (lsei);
962 if (node->definition && has_function_state (node))
963 {
964 struct bitpack_d bp;
965 funct_state fs;
966 int node_ref;
967 lto_symtab_encoder_t encoder;
968
969 fs = get_function_state (node);
970
971 encoder = ob->decl_state->symtab_node_encoder;
972 node_ref = lto_symtab_encoder_encode (encoder, node);
973 streamer_write_uhwi_stream (ob->main_stream, node_ref);
974
975 /* Note that flags will need to be read in the opposite
976 order as we are pushing the bitflags into FLAGS. */
977 bp = bitpack_create (ob->main_stream);
978 bp_pack_value (&bp, fs->pure_const_state, 2);
979 bp_pack_value (&bp, fs->state_previously_known, 2);
980 bp_pack_value (&bp, fs->looping_previously_known, 1);
981 bp_pack_value (&bp, fs->looping, 1);
982 bp_pack_value (&bp, fs->can_throw, 1);
983 streamer_write_bitpack (&bp);
984 }
985 }
986
987 lto_destroy_simple_output_block (ob);
988 }
989
990
991 /* Deserialize the ipa info for lto. */
992
993 static void
994 pure_const_read_summary (void)
995 {
996 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
997 struct lto_file_decl_data *file_data;
998 unsigned int j = 0;
999
1000 register_hooks ();
1001 while ((file_data = file_data_vec[j++]))
1002 {
1003 const char *data;
1004 size_t len;
1005 struct lto_input_block *ib
1006 = lto_create_simple_input_block (file_data,
1007 LTO_section_ipa_pure_const,
1008 &data, &len);
1009 if (ib)
1010 {
1011 unsigned int i;
1012 unsigned int count = streamer_read_uhwi (ib);
1013
1014 for (i = 0; i < count; i++)
1015 {
1016 unsigned int index;
1017 struct cgraph_node *node;
1018 struct bitpack_d bp;
1019 funct_state fs;
1020 lto_symtab_encoder_t encoder;
1021
1022 fs = XCNEW (struct funct_state_d);
1023 index = streamer_read_uhwi (ib);
1024 encoder = file_data->symtab_node_encoder;
1025 node = cgraph (lto_symtab_encoder_deref (encoder, index));
1026 set_function_state (node, fs);
1027
1028 /* Note that the flags must be read in the opposite
1029 order in which they were written (the bitflags were
1030 pushed into FLAGS). */
1031 bp = streamer_read_bitpack (ib);
1032 fs->pure_const_state
1033 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1034 fs->state_previously_known
1035 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1036 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1037 fs->looping = bp_unpack_value (&bp, 1);
1038 fs->can_throw = bp_unpack_value (&bp, 1);
1039 if (dump_file)
1040 {
1041 int flags = flags_from_decl_or_type (node->decl);
1042 fprintf (dump_file, "Read info for %s/%i ",
1043 node->name (),
1044 node->order);
1045 if (flags & ECF_CONST)
1046 fprintf (dump_file, " const");
1047 if (flags & ECF_PURE)
1048 fprintf (dump_file, " pure");
1049 if (flags & ECF_NOTHROW)
1050 fprintf (dump_file, " nothrow");
1051 fprintf (dump_file, "\n pure const state: %s\n",
1052 pure_const_names[fs->pure_const_state]);
1053 fprintf (dump_file, " previously known state: %s\n",
1054 pure_const_names[fs->looping_previously_known]);
1055 if (fs->looping)
1056 fprintf (dump_file," function is locally looping\n");
1057 if (fs->looping_previously_known)
1058 fprintf (dump_file," function is previously known looping\n");
1059 if (fs->can_throw)
1060 fprintf (dump_file," function is locally throwing\n");
1061 }
1062 }
1063
1064 lto_destroy_simple_input_block (file_data,
1065 LTO_section_ipa_pure_const,
1066 ib, data, len);
1067 }
1068 }
1069 }
1070
1071
1072 static bool
1073 ignore_edge (struct cgraph_edge *e)
1074 {
1075 return (!e->can_throw_external);
1076 }
1077
1078 /* Return true if NODE is self recursive function.
1079 Indirectly recursive functions appears as non-trivial strongly
1080 connected components, so we need to care about self recursion
1081 only. */
1082
1083 static bool
1084 self_recursive_p (struct cgraph_node *node)
1085 {
1086 struct cgraph_edge *e;
1087 for (e = node->callees; e; e = e->next_callee)
1088 if (cgraph_function_node (e->callee, NULL) == node)
1089 return true;
1090 return false;
1091 }
1092
1093 /* Produce transitive closure over the callgraph and compute pure/const
1094 attributes. */
1095
1096 static void
1097 propagate_pure_const (void)
1098 {
1099 struct cgraph_node *node;
1100 struct cgraph_node *w;
1101 struct cgraph_node **order =
1102 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1103 int order_pos;
1104 int i;
1105 struct ipa_dfs_info * w_info;
1106
1107 order_pos = ipa_reduced_postorder (order, true, false, NULL);
1108 if (dump_file)
1109 {
1110 dump_cgraph (dump_file);
1111 ipa_print_order (dump_file, "reduced", order, order_pos);
1112 }
1113
1114 /* Propagate the local information through the call graph to produce
1115 the global information. All the nodes within a cycle will have
1116 the same info so we collapse cycles first. Then we can do the
1117 propagation in one pass from the leaves to the roots. */
1118 for (i = 0; i < order_pos; i++ )
1119 {
1120 enum pure_const_state_e pure_const_state = IPA_CONST;
1121 bool looping = false;
1122 int count = 0;
1123 node = order[i];
1124
1125 if (node->alias)
1126 continue;
1127
1128 if (dump_file && (dump_flags & TDF_DETAILS))
1129 fprintf (dump_file, "Starting cycle\n");
1130
1131 /* Find the worst state for any node in the cycle. */
1132 w = node;
1133 while (w && pure_const_state != IPA_NEITHER)
1134 {
1135 struct cgraph_edge *e;
1136 struct cgraph_edge *ie;
1137 int i;
1138 struct ipa_ref *ref;
1139
1140 funct_state w_l = get_function_state (w);
1141 if (dump_file && (dump_flags & TDF_DETAILS))
1142 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
1143 w->name (),
1144 w->order,
1145 pure_const_names[w_l->pure_const_state],
1146 w_l->looping);
1147
1148 /* First merge in function body properties. */
1149 worse_state (&pure_const_state, &looping,
1150 w_l->pure_const_state, w_l->looping);
1151 if (pure_const_state == IPA_NEITHER)
1152 break;
1153
1154 /* For overwritable nodes we can not assume anything. */
1155 if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1156 {
1157 worse_state (&pure_const_state, &looping,
1158 w_l->state_previously_known,
1159 w_l->looping_previously_known);
1160 if (dump_file && (dump_flags & TDF_DETAILS))
1161 {
1162 fprintf (dump_file,
1163 " Overwritable. state %s looping %i\n",
1164 pure_const_names[w_l->state_previously_known],
1165 w_l->looping_previously_known);
1166 }
1167 break;
1168 }
1169
1170 count++;
1171
1172 /* We consider recursive cycles as possibly infinite.
1173 This might be relaxed since infinite recursion leads to stack
1174 overflow. */
1175 if (count > 1)
1176 looping = true;
1177
1178 /* Now walk the edges and merge in callee properties. */
1179 for (e = w->callees; e; e = e->next_callee)
1180 {
1181 enum availability avail;
1182 struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
1183 enum pure_const_state_e edge_state = IPA_CONST;
1184 bool edge_looping = false;
1185
1186 if (dump_file && (dump_flags & TDF_DETAILS))
1187 {
1188 fprintf (dump_file,
1189 " Call to %s/%i",
1190 e->callee->name (),
1191 e->callee->order);
1192 }
1193 if (avail > AVAIL_OVERWRITABLE)
1194 {
1195 funct_state y_l = get_function_state (y);
1196 if (dump_file && (dump_flags & TDF_DETAILS))
1197 {
1198 fprintf (dump_file,
1199 " state:%s looping:%i\n",
1200 pure_const_names[y_l->pure_const_state],
1201 y_l->looping);
1202 }
1203 if (y_l->pure_const_state > IPA_PURE
1204 && cgraph_edge_cannot_lead_to_return (e))
1205 {
1206 if (dump_file && (dump_flags & TDF_DETAILS))
1207 fprintf (dump_file,
1208 " Ignoring side effects"
1209 " -> pure, looping\n");
1210 edge_state = IPA_PURE;
1211 edge_looping = true;
1212 }
1213 else
1214 {
1215 edge_state = y_l->pure_const_state;
1216 edge_looping = y_l->looping;
1217 }
1218 }
1219 else if (special_builtin_state (&edge_state, &edge_looping,
1220 y->decl))
1221 ;
1222 else
1223 state_from_flags (&edge_state, &edge_looping,
1224 flags_from_decl_or_type (y->decl),
1225 cgraph_edge_cannot_lead_to_return (e));
1226
1227 /* Merge the results with what we already know. */
1228 better_state (&edge_state, &edge_looping,
1229 w_l->state_previously_known,
1230 w_l->looping_previously_known);
1231 worse_state (&pure_const_state, &looping,
1232 edge_state, edge_looping);
1233 if (pure_const_state == IPA_NEITHER)
1234 break;
1235 }
1236 if (pure_const_state == IPA_NEITHER)
1237 break;
1238
1239 /* Now process the indirect call. */
1240 for (ie = w->indirect_calls; ie; ie = ie->next_callee)
1241 {
1242 enum pure_const_state_e edge_state = IPA_CONST;
1243 bool edge_looping = false;
1244
1245 if (dump_file && (dump_flags & TDF_DETAILS))
1246 fprintf (dump_file, " Indirect call");
1247 state_from_flags (&edge_state, &edge_looping,
1248 ie->indirect_info->ecf_flags,
1249 cgraph_edge_cannot_lead_to_return (ie));
1250 /* Merge the results with what we already know. */
1251 better_state (&edge_state, &edge_looping,
1252 w_l->state_previously_known,
1253 w_l->looping_previously_known);
1254 worse_state (&pure_const_state, &looping,
1255 edge_state, edge_looping);
1256 if (pure_const_state == IPA_NEITHER)
1257 break;
1258 }
1259 if (pure_const_state == IPA_NEITHER)
1260 break;
1261
1262 /* And finally all loads and stores. */
1263 for (i = 0; ipa_ref_list_reference_iterate (&w->ref_list, i, ref); i++)
1264 {
1265 enum pure_const_state_e ref_state = IPA_CONST;
1266 bool ref_looping = false;
1267 switch (ref->use)
1268 {
1269 case IPA_REF_LOAD:
1270 /* readonly reads are safe. */
1271 if (TREE_READONLY (ipa_ref_varpool_node (ref)->decl))
1272 break;
1273 if (dump_file && (dump_flags & TDF_DETAILS))
1274 fprintf (dump_file, " nonreadonly global var read\n");
1275 ref_state = IPA_PURE;
1276 break;
1277 case IPA_REF_STORE:
1278 if (ipa_ref_cannot_lead_to_return (ref))
1279 break;
1280 ref_state = IPA_NEITHER;
1281 if (dump_file && (dump_flags & TDF_DETAILS))
1282 fprintf (dump_file, " global var write\n");
1283 break;
1284 case IPA_REF_ADDR:
1285 break;
1286 }
1287 better_state (&ref_state, &ref_looping,
1288 w_l->state_previously_known,
1289 w_l->looping_previously_known);
1290 worse_state (&pure_const_state, &looping,
1291 ref_state, ref_looping);
1292 if (pure_const_state == IPA_NEITHER)
1293 break;
1294 }
1295 w_info = (struct ipa_dfs_info *) w->aux;
1296 w = w_info->next_cycle;
1297 }
1298 if (dump_file && (dump_flags & TDF_DETAILS))
1299 fprintf (dump_file, "Result %s looping %i\n",
1300 pure_const_names [pure_const_state],
1301 looping);
1302
1303 /* Copy back the region's pure_const_state which is shared by
1304 all nodes in the region. */
1305 w = node;
1306 while (w)
1307 {
1308 funct_state w_l = get_function_state (w);
1309 enum pure_const_state_e this_state = pure_const_state;
1310 bool this_looping = looping;
1311
1312 if (w_l->state_previously_known != IPA_NEITHER
1313 && this_state > w_l->state_previously_known)
1314 {
1315 this_state = w_l->state_previously_known;
1316 this_looping |= w_l->looping_previously_known;
1317 }
1318 if (!this_looping && self_recursive_p (w))
1319 this_looping = true;
1320 if (!w_l->looping_previously_known)
1321 this_looping = false;
1322
1323 /* All nodes within a cycle share the same info. */
1324 w_l->pure_const_state = this_state;
1325 w_l->looping = this_looping;
1326
1327 switch (this_state)
1328 {
1329 case IPA_CONST:
1330 if (!TREE_READONLY (w->decl))
1331 {
1332 warn_function_const (w->decl, !this_looping);
1333 if (dump_file)
1334 fprintf (dump_file, "Function found to be %sconst: %s\n",
1335 this_looping ? "looping " : "",
1336 w->name ());
1337 }
1338 cgraph_set_const_flag (w, true, this_looping);
1339 break;
1340
1341 case IPA_PURE:
1342 if (!DECL_PURE_P (w->decl))
1343 {
1344 warn_function_pure (w->decl, !this_looping);
1345 if (dump_file)
1346 fprintf (dump_file, "Function found to be %spure: %s\n",
1347 this_looping ? "looping " : "",
1348 w->name ());
1349 }
1350 cgraph_set_pure_flag (w, true, this_looping);
1351 break;
1352
1353 default:
1354 break;
1355 }
1356 w_info = (struct ipa_dfs_info *) w->aux;
1357 w = w_info->next_cycle;
1358 }
1359 }
1360
1361 ipa_free_postorder_info ();
1362 free (order);
1363 }
1364
1365 /* Produce transitive closure over the callgraph and compute nothrow
1366 attributes. */
1367
1368 static void
1369 propagate_nothrow (void)
1370 {
1371 struct cgraph_node *node;
1372 struct cgraph_node *w;
1373 struct cgraph_node **order =
1374 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1375 int order_pos;
1376 int i;
1377 struct ipa_dfs_info * w_info;
1378
1379 order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
1380 if (dump_file)
1381 {
1382 dump_cgraph (dump_file);
1383 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1384 }
1385
1386 /* Propagate the local information through the call graph to produce
1387 the global information. All the nodes within a cycle will have
1388 the same info so we collapse cycles first. Then we can do the
1389 propagation in one pass from the leaves to the roots. */
1390 for (i = 0; i < order_pos; i++ )
1391 {
1392 bool can_throw = false;
1393 node = order[i];
1394
1395 if (node->alias)
1396 continue;
1397
1398 /* Find the worst state for any node in the cycle. */
1399 w = node;
1400 while (w)
1401 {
1402 struct cgraph_edge *e, *ie;
1403 funct_state w_l = get_function_state (w);
1404
1405 if (w_l->can_throw
1406 || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1407 can_throw = true;
1408
1409 if (can_throw)
1410 break;
1411
1412 for (e = w->callees; e; e = e->next_callee)
1413 {
1414 enum availability avail;
1415 struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
1416
1417 if (avail > AVAIL_OVERWRITABLE)
1418 {
1419 funct_state y_l = get_function_state (y);
1420
1421 if (can_throw)
1422 break;
1423 if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1424 && e->can_throw_external)
1425 can_throw = true;
1426 }
1427 else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
1428 can_throw = true;
1429 }
1430 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1431 if (ie->can_throw_external)
1432 {
1433 can_throw = true;
1434 break;
1435 }
1436 w_info = (struct ipa_dfs_info *) w->aux;
1437 w = w_info->next_cycle;
1438 }
1439
1440 /* Copy back the region's pure_const_state which is shared by
1441 all nodes in the region. */
1442 w = node;
1443 while (w)
1444 {
1445 funct_state w_l = get_function_state (w);
1446 if (!can_throw && !TREE_NOTHROW (w->decl))
1447 {
1448 cgraph_set_nothrow_flag (w, true);
1449 if (dump_file)
1450 fprintf (dump_file, "Function found to be nothrow: %s\n",
1451 w->name ());
1452 }
1453 else if (can_throw && !TREE_NOTHROW (w->decl))
1454 w_l->can_throw = true;
1455 w_info = (struct ipa_dfs_info *) w->aux;
1456 w = w_info->next_cycle;
1457 }
1458 }
1459
1460 ipa_free_postorder_info ();
1461 free (order);
1462 }
1463
1464
1465 /* Produce the global information by preforming a transitive closure
1466 on the local information that was produced by generate_summary. */
1467
1468 static unsigned int
1469 propagate (void)
1470 {
1471 struct cgraph_node *node;
1472
1473 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
1474 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1475 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1476
1477 /* Nothrow makes more function to not lead to return and improve
1478 later analysis. */
1479 propagate_nothrow ();
1480 propagate_pure_const ();
1481
1482 /* Cleanup. */
1483 FOR_EACH_FUNCTION (node)
1484 if (has_function_state (node))
1485 free (get_function_state (node));
1486 funct_state_vec.release ();
1487 return 0;
1488 }
1489
1490 static bool
1491 gate_pure_const (void)
1492 {
1493 return (flag_ipa_pure_const
1494 /* Don't bother doing anything if the program has errors. */
1495 && !seen_error ());
1496 }
1497
1498 namespace {
1499
1500 const pass_data pass_data_ipa_pure_const =
1501 {
1502 IPA_PASS, /* type */
1503 "pure-const", /* name */
1504 OPTGROUP_NONE, /* optinfo_flags */
1505 true, /* has_gate */
1506 true, /* has_execute */
1507 TV_IPA_PURE_CONST, /* tv_id */
1508 0, /* properties_required */
1509 0, /* properties_provided */
1510 0, /* properties_destroyed */
1511 0, /* todo_flags_start */
1512 0, /* todo_flags_finish */
1513 };
1514
1515 class pass_ipa_pure_const : public ipa_opt_pass_d
1516 {
1517 public:
1518 pass_ipa_pure_const (gcc::context *ctxt)
1519 : ipa_opt_pass_d (pass_data_ipa_pure_const, ctxt,
1520 pure_const_generate_summary, /* generate_summary */
1521 pure_const_write_summary, /* write_summary */
1522 pure_const_read_summary, /* read_summary */
1523 NULL, /* write_optimization_summary */
1524 NULL, /* read_optimization_summary */
1525 NULL, /* stmt_fixup */
1526 0, /* function_transform_todo_flags_start */
1527 NULL, /* function_transform */
1528 NULL) /* variable_transform */
1529 {}
1530
1531 /* opt_pass methods: */
1532 bool gate () { return gate_pure_const (); }
1533 unsigned int execute () { return propagate (); }
1534
1535 }; // class pass_ipa_pure_const
1536
1537 } // anon namespace
1538
1539 ipa_opt_pass_d *
1540 make_pass_ipa_pure_const (gcc::context *ctxt)
1541 {
1542 return new pass_ipa_pure_const (ctxt);
1543 }
1544
1545 /* Return true if function should be skipped for local pure const analysis. */
1546
1547 static bool
1548 skip_function_for_local_pure_const (struct cgraph_node *node)
1549 {
1550 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1551 we must not promote functions that are called by already processed functions. */
1552
1553 if (function_called_by_processed_nodes_p ())
1554 {
1555 if (dump_file)
1556 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1557 return true;
1558 }
1559 if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
1560 {
1561 if (dump_file)
1562 fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
1563 return true;
1564 }
1565 return false;
1566 }
1567
1568 /* Simple local pass for pure const discovery reusing the analysis from
1569 ipa_pure_const. This pass is effective when executed together with
1570 other optimization passes in early optimization pass queue. */
1571
1572 static unsigned int
1573 local_pure_const (void)
1574 {
1575 bool changed = false;
1576 funct_state l;
1577 bool skip;
1578 struct cgraph_node *node;
1579
1580 node = cgraph_get_node (current_function_decl);
1581 skip = skip_function_for_local_pure_const (node);
1582 if (!warn_suggest_attribute_const
1583 && !warn_suggest_attribute_pure
1584 && skip)
1585 return 0;
1586
1587 l = analyze_function (node, false);
1588
1589 /* Do NORETURN discovery. */
1590 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1591 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
1592 {
1593 warn_function_noreturn (cfun->decl);
1594 if (dump_file)
1595 fprintf (dump_file, "Function found to be noreturn: %s\n",
1596 current_function_name ());
1597
1598 /* Update declaration and reduce profile to executed once. */
1599 TREE_THIS_VOLATILE (current_function_decl) = 1;
1600 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1601 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1602
1603 changed = true;
1604 }
1605
1606 switch (l->pure_const_state)
1607 {
1608 case IPA_CONST:
1609 if (!TREE_READONLY (current_function_decl))
1610 {
1611 warn_function_const (current_function_decl, !l->looping);
1612 if (!skip)
1613 {
1614 cgraph_set_const_flag (node, true, l->looping);
1615 changed = true;
1616 }
1617 if (dump_file)
1618 fprintf (dump_file, "Function found to be %sconst: %s\n",
1619 l->looping ? "looping " : "",
1620 current_function_name ());
1621 }
1622 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1623 && !l->looping)
1624 {
1625 if (!skip)
1626 {
1627 cgraph_set_const_flag (node, true, false);
1628 changed = true;
1629 }
1630 if (dump_file)
1631 fprintf (dump_file, "Function found to be non-looping: %s\n",
1632 current_function_name ());
1633 }
1634 break;
1635
1636 case IPA_PURE:
1637 if (!DECL_PURE_P (current_function_decl))
1638 {
1639 if (!skip)
1640 {
1641 cgraph_set_pure_flag (node, true, l->looping);
1642 changed = true;
1643 }
1644 warn_function_pure (current_function_decl, !l->looping);
1645 if (dump_file)
1646 fprintf (dump_file, "Function found to be %spure: %s\n",
1647 l->looping ? "looping " : "",
1648 current_function_name ());
1649 }
1650 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1651 && !l->looping)
1652 {
1653 if (!skip)
1654 {
1655 cgraph_set_pure_flag (node, true, false);
1656 changed = true;
1657 }
1658 if (dump_file)
1659 fprintf (dump_file, "Function found to be non-looping: %s\n",
1660 current_function_name ());
1661 }
1662 break;
1663
1664 default:
1665 break;
1666 }
1667 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1668 {
1669 cgraph_set_nothrow_flag (node, true);
1670 changed = true;
1671 if (dump_file)
1672 fprintf (dump_file, "Function found to be nothrow: %s\n",
1673 current_function_name ());
1674 }
1675 free (l);
1676 if (changed)
1677 return execute_fixup_cfg ();
1678 else
1679 return 0;
1680 }
1681
1682 namespace {
1683
1684 const pass_data pass_data_local_pure_const =
1685 {
1686 GIMPLE_PASS, /* type */
1687 "local-pure-const", /* name */
1688 OPTGROUP_NONE, /* optinfo_flags */
1689 true, /* has_gate */
1690 true, /* has_execute */
1691 TV_IPA_PURE_CONST, /* tv_id */
1692 0, /* properties_required */
1693 0, /* properties_provided */
1694 0, /* properties_destroyed */
1695 0, /* todo_flags_start */
1696 0, /* todo_flags_finish */
1697 };
1698
1699 class pass_local_pure_const : public gimple_opt_pass
1700 {
1701 public:
1702 pass_local_pure_const (gcc::context *ctxt)
1703 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
1704 {}
1705
1706 /* opt_pass methods: */
1707 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
1708 bool gate () { return gate_pure_const (); }
1709 unsigned int execute () { return local_pure_const (); }
1710
1711 }; // class pass_local_pure_const
1712
1713 } // anon namespace
1714
1715 gimple_opt_pass *
1716 make_pass_local_pure_const (gcc::context *ctxt)
1717 {
1718 return new pass_local_pure_const (ctxt);
1719 }
1720
1721 /* Emit noreturn warnings. */
1722
1723 static unsigned int
1724 execute_warn_function_noreturn (void)
1725 {
1726 if (!TREE_THIS_VOLATILE (current_function_decl)
1727 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
1728 warn_function_noreturn (current_function_decl);
1729 return 0;
1730 }
1731
1732 static bool
1733 gate_warn_function_noreturn (void)
1734 {
1735 return warn_suggest_attribute_noreturn;
1736 }
1737
1738 namespace {
1739
1740 const pass_data pass_data_warn_function_noreturn =
1741 {
1742 GIMPLE_PASS, /* type */
1743 "*warn_function_noreturn", /* name */
1744 OPTGROUP_NONE, /* optinfo_flags */
1745 true, /* has_gate */
1746 true, /* has_execute */
1747 TV_NONE, /* tv_id */
1748 PROP_cfg, /* properties_required */
1749 0, /* properties_provided */
1750 0, /* properties_destroyed */
1751 0, /* todo_flags_start */
1752 0, /* todo_flags_finish */
1753 };
1754
1755 class pass_warn_function_noreturn : public gimple_opt_pass
1756 {
1757 public:
1758 pass_warn_function_noreturn (gcc::context *ctxt)
1759 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
1760 {}
1761
1762 /* opt_pass methods: */
1763 bool gate () { return gate_warn_function_noreturn (); }
1764 unsigned int execute () { return execute_warn_function_noreturn (); }
1765
1766 }; // class pass_warn_function_noreturn
1767
1768 } // anon namespace
1769
1770 gimple_opt_pass *
1771 make_pass_warn_function_noreturn (gcc::context *ctxt)
1772 {
1773 return new pass_warn_function_noreturn (ctxt);
1774 }
1775
1776