1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
36 #include "coretypes.h"
39 #include "print-tree.h"
42 #include "gimple-iterator.h"
43 #include "gimple-walk.h"
45 #include "tree-ssa-loop-niter.h"
46 #include "tree-inline.h"
47 #include "tree-pass.h"
48 #include "langhooks.h"
49 #include "pointer-set.h"
51 #include "ipa-utils.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "langhooks.h"
57 #include "lto-streamer.h"
58 #include "data-streamer.h"
59 #include "tree-streamer.h"
61 #include "tree-scalar-evolution.h"
65 static struct pointer_set_t
*visited_nodes
;
67 /* Lattice values for const and pure functions. Everything starts out
68 being const, then may drop to pure and then neither depending on
70 enum pure_const_state_e
77 const char *pure_const_names
[3] = {"const", "pure", "neither"};
79 /* Holder for the const_state. There is one of these per function
84 enum pure_const_state_e pure_const_state
;
85 /* What user set here; we can be always sure about this. */
86 enum pure_const_state_e state_previously_known
;
87 bool looping_previously_known
;
89 /* True if the function could possibly infinite loop. There are a
90 lot of ways that this could be determined. We are pretty
91 conservative here. While it is possible to cse pure and const
92 calls, it is not legal to have dce get rid of the call if there
93 is a possibility that the call could infinite loop since this is
94 a behavioral change. */
100 /* State used when we know nothing about function. */
101 static struct funct_state_d varying_state
102 = { IPA_NEITHER
, IPA_NEITHER
, true, true, true };
105 typedef struct funct_state_d
* funct_state
;
107 /* The storage of the funct_state is abstracted because there is the
108 possibility that it may be desirable to move this to the cgraph
111 /* Array, indexed by cgraph node uid, of function states. */
113 static vec
<funct_state
> funct_state_vec
;
115 /* Holders of ipa cgraph hooks: */
116 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
117 static struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
118 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
120 /* Try to guess if function body will always be visible to compiler
121 when compiling the call and whether compiler will be able
122 to propagate the information by itself. */
125 function_always_visible_to_compiler_p (tree decl
)
127 return (!TREE_PUBLIC (decl
) || DECL_DECLARED_INLINE_P (decl
));
130 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
131 is true if the function is known to be finite. The diagnostic is
132 controlled by OPTION. WARNED_ABOUT is a pointer_set unique for
133 OPTION, this function may initialize it and it is always returned
136 static struct pointer_set_t
*
137 suggest_attribute (int option
, tree decl
, bool known_finite
,
138 struct pointer_set_t
*warned_about
,
139 const char * attrib_name
)
141 if (!option_enabled (option
, &global_options
))
143 if (TREE_THIS_VOLATILE (decl
)
144 || (known_finite
&& function_always_visible_to_compiler_p (decl
)))
148 warned_about
= pointer_set_create ();
149 if (pointer_set_contains (warned_about
, decl
))
151 pointer_set_insert (warned_about
, decl
);
152 warning_at (DECL_SOURCE_LOCATION (decl
),
155 ? _("function might be candidate for attribute %<%s%>")
156 : _("function might be candidate for attribute %<%s%>"
157 " if it is known to return normally"), attrib_name
);
161 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
162 is true if the function is known to be finite. */
165 warn_function_pure (tree decl
, bool known_finite
)
167 static struct pointer_set_t
*warned_about
;
170 = suggest_attribute (OPT_Wsuggest_attribute_pure
, decl
,
171 known_finite
, warned_about
, "pure");
174 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
175 is true if the function is known to be finite. */
178 warn_function_const (tree decl
, bool known_finite
)
180 static struct pointer_set_t
*warned_about
;
182 = suggest_attribute (OPT_Wsuggest_attribute_const
, decl
,
183 known_finite
, warned_about
, "const");
187 warn_function_noreturn (tree decl
)
189 static struct pointer_set_t
*warned_about
;
190 if (!lang_hooks
.missing_noreturn_ok_p (decl
)
191 && targetm
.warn_func_return (decl
))
193 = suggest_attribute (OPT_Wsuggest_attribute_noreturn
, decl
,
194 true, warned_about
, "noreturn");
197 /* Return true if we have a function state for NODE. */
200 has_function_state (struct cgraph_node
*node
)
202 if (!funct_state_vec
.exists ()
203 || funct_state_vec
.length () <= (unsigned int)node
->uid
)
205 return funct_state_vec
[node
->uid
] != NULL
;
208 /* Return the function state from NODE. */
210 static inline funct_state
211 get_function_state (struct cgraph_node
*node
)
213 if (!funct_state_vec
.exists ()
214 || funct_state_vec
.length () <= (unsigned int)node
->uid
215 || !funct_state_vec
[node
->uid
])
216 /* We might want to put correct previously_known state into varying. */
217 return &varying_state
;
218 return funct_state_vec
[node
->uid
];
221 /* Set the function state S for NODE. */
224 set_function_state (struct cgraph_node
*node
, funct_state s
)
226 if (!funct_state_vec
.exists ()
227 || funct_state_vec
.length () <= (unsigned int)node
->uid
)
228 funct_state_vec
.safe_grow_cleared (node
->uid
+ 1);
229 funct_state_vec
[node
->uid
] = s
;
232 /* Check to see if the use (or definition when CHECKING_WRITE is true)
233 variable T is legal in a function that is either pure or const. */
236 check_decl (funct_state local
,
237 tree t
, bool checking_write
, bool ipa
)
239 /* Do not want to do anything with volatile except mark any
240 function that uses one to be not const or pure. */
241 if (TREE_THIS_VOLATILE (t
))
243 local
->pure_const_state
= IPA_NEITHER
;
245 fprintf (dump_file
, " Volatile operand is not const/pure");
249 /* Do not care about a local automatic that is not static. */
250 if (!TREE_STATIC (t
) && !DECL_EXTERNAL (t
))
253 /* If the variable has the "used" attribute, treat it as if it had a
254 been touched by the devil. */
255 if (DECL_PRESERVE_P (t
))
257 local
->pure_const_state
= IPA_NEITHER
;
259 fprintf (dump_file
, " Used static/global variable is not const/pure\n");
263 /* In IPA mode we are not interested in checking actual loads and stores;
264 they will be processed at propagation time using ipa_ref. */
268 /* Since we have dealt with the locals and params cases above, if we
269 are CHECKING_WRITE, this cannot be a pure or constant
273 local
->pure_const_state
= IPA_NEITHER
;
275 fprintf (dump_file
, " static/global memory write is not const/pure\n");
279 if (DECL_EXTERNAL (t
) || TREE_PUBLIC (t
))
281 /* Readonly reads are safe. */
282 if (TREE_READONLY (t
) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t
)))
283 return; /* Read of a constant, do not change the function state. */
287 fprintf (dump_file
, " global memory read is not const\n");
288 /* Just a regular read. */
289 if (local
->pure_const_state
== IPA_CONST
)
290 local
->pure_const_state
= IPA_PURE
;
295 /* Compilation level statics can be read if they are readonly
297 if (TREE_READONLY (t
))
301 fprintf (dump_file
, " static memory read is not const\n");
302 /* Just a regular read. */
303 if (local
->pure_const_state
== IPA_CONST
)
304 local
->pure_const_state
= IPA_PURE
;
309 /* Check to see if the use (or definition when CHECKING_WRITE is true)
310 variable T is legal in a function that is either pure or const. */
313 check_op (funct_state local
, tree t
, bool checking_write
)
315 t
= get_base_address (t
);
316 if (t
&& TREE_THIS_VOLATILE (t
))
318 local
->pure_const_state
= IPA_NEITHER
;
320 fprintf (dump_file
, " Volatile indirect ref is not const/pure\n");
324 && (INDIRECT_REF_P (t
) || TREE_CODE (t
) == MEM_REF
)
325 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
326 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t
, 0)))
329 fprintf (dump_file
, " Indirect ref to local memory is OK\n");
332 else if (checking_write
)
334 local
->pure_const_state
= IPA_NEITHER
;
336 fprintf (dump_file
, " Indirect ref write is not const/pure\n");
342 fprintf (dump_file
, " Indirect ref read is not const\n");
343 if (local
->pure_const_state
== IPA_CONST
)
344 local
->pure_const_state
= IPA_PURE
;
348 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
351 state_from_flags (enum pure_const_state_e
*state
, bool *looping
,
352 int flags
, bool cannot_lead_to_return
)
355 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
358 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
359 fprintf (dump_file
, " looping");
361 if (flags
& ECF_CONST
)
364 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
365 fprintf (dump_file
, " const\n");
367 else if (flags
& ECF_PURE
)
370 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
371 fprintf (dump_file
, " pure\n");
373 else if (cannot_lead_to_return
)
377 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
378 fprintf (dump_file
, " ignoring side effects->pure looping\n");
382 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
383 fprintf (dump_file
, " neither\n");
384 *state
= IPA_NEITHER
;
389 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
390 into STATE and LOOPING better of the two variants.
391 Be sure to merge looping correctly. IPA_NEITHER functions
392 have looping 0 even if they don't have to return. */
395 better_state (enum pure_const_state_e
*state
, bool *looping
,
396 enum pure_const_state_e state2
, bool looping2
)
400 if (*state
== IPA_NEITHER
)
403 *looping
= MIN (*looping
, looping2
);
406 else if (state2
!= IPA_NEITHER
)
407 *looping
= MIN (*looping
, looping2
);
410 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
411 into STATE and LOOPING worse of the two variants. */
414 worse_state (enum pure_const_state_e
*state
, bool *looping
,
415 enum pure_const_state_e state2
, bool looping2
)
417 *state
= MAX (*state
, state2
);
418 *looping
= MAX (*looping
, looping2
);
421 /* Recognize special cases of builtins that are by themselves not pure or const
422 but function using them is. */
424 special_builtin_state (enum pure_const_state_e
*state
, bool *looping
,
427 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
428 switch (DECL_FUNCTION_CODE (callee
))
430 case BUILT_IN_RETURN
:
431 case BUILT_IN_UNREACHABLE
:
432 case BUILT_IN_ALLOCA
:
433 case BUILT_IN_ALLOCA_WITH_ALIGN
:
434 case BUILT_IN_STACK_SAVE
:
435 case BUILT_IN_STACK_RESTORE
:
436 case BUILT_IN_EH_POINTER
:
437 case BUILT_IN_EH_FILTER
:
438 case BUILT_IN_UNWIND_RESUME
:
439 case BUILT_IN_CXA_END_CLEANUP
:
440 case BUILT_IN_EH_COPY_VALUES
:
441 case BUILT_IN_FRAME_ADDRESS
:
443 case BUILT_IN_APPLY_ARGS
:
447 case BUILT_IN_PREFETCH
:
455 /* Check the parameters of a function call to CALL_EXPR to see if
456 there are any references in the parameters that are not allowed for
457 pure or const functions. Also check to see if this is either an
458 indirect call, a call outside the compilation unit, or has special
459 attributes that may also effect the purity. The CALL_EXPR node for
460 the entire call expression. */
463 check_call (funct_state local
, gimple call
, bool ipa
)
465 int flags
= gimple_call_flags (call
);
466 tree callee_t
= gimple_call_fndecl (call
);
467 bool possibly_throws
= stmt_could_throw_p (call
);
468 bool possibly_throws_externally
= (possibly_throws
469 && stmt_can_throw_external (call
));
474 for (i
= 0; i
< gimple_num_ops (call
); i
++)
475 if (gimple_op (call
, i
)
476 && tree_could_throw_p (gimple_op (call
, i
)))
478 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
481 fprintf (dump_file
, " operand can throw; looping\n");
482 local
->looping
= true;
484 if (possibly_throws_externally
)
487 fprintf (dump_file
, " operand can throw externally\n");
488 local
->can_throw
= true;
493 /* The const and pure flags are set by a variety of places in the
494 compiler (including here). If someone has already set the flags
495 for the callee, (such as for some of the builtins) we will use
496 them, otherwise we will compute our own information.
498 Const and pure functions have less clobber effects than other
499 functions so we process these first. Otherwise if it is a call
500 outside the compilation unit or an indirect call we punt. This
501 leaves local calls which will be processed by following the call
505 enum pure_const_state_e call_state
;
508 if (special_builtin_state (&call_state
, &call_looping
, callee_t
))
510 worse_state (&local
->pure_const_state
, &local
->looping
,
511 call_state
, call_looping
);
514 /* When bad things happen to bad functions, they cannot be const
516 if (setjmp_call_p (callee_t
))
519 fprintf (dump_file
, " setjmp is not const/pure\n");
520 local
->looping
= true;
521 local
->pure_const_state
= IPA_NEITHER
;
524 if (DECL_BUILT_IN_CLASS (callee_t
) == BUILT_IN_NORMAL
)
525 switch (DECL_FUNCTION_CODE (callee_t
))
527 case BUILT_IN_LONGJMP
:
528 case BUILT_IN_NONLOCAL_GOTO
:
530 fprintf (dump_file
, " longjmp and nonlocal goto is not const/pure\n");
531 local
->pure_const_state
= IPA_NEITHER
;
532 local
->looping
= true;
539 /* When not in IPA mode, we can still handle self recursion. */
541 && recursive_call_p (current_function_decl
, callee_t
))
544 fprintf (dump_file
, " Recursive call can loop.\n");
545 local
->looping
= true;
547 /* Either callee is unknown or we are doing local analysis.
548 Look to see if there are any bits available for the callee (such as by
549 declaration or because it is builtin) and process solely on the basis of
553 enum pure_const_state_e call_state
;
555 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
558 fprintf (dump_file
, " can throw; looping\n");
559 local
->looping
= true;
561 if (possibly_throws_externally
)
565 fprintf (dump_file
, " can throw externally to lp %i\n",
566 lookup_stmt_eh_lp (call
));
568 fprintf (dump_file
, " callee:%s\n",
569 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t
)));
571 local
->can_throw
= true;
573 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
574 fprintf (dump_file
, " checking flags for call:");
575 state_from_flags (&call_state
, &call_looping
, flags
,
576 ((flags
& (ECF_NORETURN
| ECF_NOTHROW
))
577 == (ECF_NORETURN
| ECF_NOTHROW
))
578 || (!flag_exceptions
&& (flags
& ECF_NORETURN
)));
579 worse_state (&local
->pure_const_state
, &local
->looping
,
580 call_state
, call_looping
);
582 /* Direct functions calls are handled by IPA propagation. */
585 /* Wrapper around check_decl for loads in local more. */
588 check_load (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
591 check_decl ((funct_state
)data
, op
, false, false);
593 check_op ((funct_state
)data
, op
, false);
597 /* Wrapper around check_decl for stores in local more. */
600 check_store (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
603 check_decl ((funct_state
)data
, op
, true, false);
605 check_op ((funct_state
)data
, op
, true);
609 /* Wrapper around check_decl for loads in ipa mode. */
612 check_ipa_load (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
615 check_decl ((funct_state
)data
, op
, false, true);
617 check_op ((funct_state
)data
, op
, false);
621 /* Wrapper around check_decl for stores in ipa mode. */
624 check_ipa_store (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
627 check_decl ((funct_state
)data
, op
, true, true);
629 check_op ((funct_state
)data
, op
, true);
633 /* Look into pointer pointed to by GSIP and figure out what interesting side
636 check_stmt (gimple_stmt_iterator
*gsip
, funct_state local
, bool ipa
)
638 gimple stmt
= gsi_stmt (*gsip
);
640 if (is_gimple_debug (stmt
))
645 fprintf (dump_file
, " scanning: ");
646 print_gimple_stmt (dump_file
, stmt
, 0, 0);
649 if (gimple_has_volatile_ops (stmt
)
650 && !gimple_clobber_p (stmt
))
652 local
->pure_const_state
= IPA_NEITHER
;
654 fprintf (dump_file
, " Volatile stmt is not const/pure\n");
657 /* Look for loads and stores. */
658 walk_stmt_load_store_ops (stmt
, local
,
659 ipa
? check_ipa_load
: check_load
,
660 ipa
? check_ipa_store
: check_store
);
662 if (gimple_code (stmt
) != GIMPLE_CALL
663 && stmt_could_throw_p (stmt
))
665 if (cfun
->can_throw_non_call_exceptions
)
668 fprintf (dump_file
, " can throw; looping\n");
669 local
->looping
= true;
671 if (stmt_can_throw_external (stmt
))
674 fprintf (dump_file
, " can throw externally\n");
675 local
->can_throw
= true;
679 fprintf (dump_file
, " can throw\n");
681 switch (gimple_code (stmt
))
684 check_call (local
, stmt
, ipa
);
687 if (DECL_NONLOCAL (gimple_label_label (stmt
)))
688 /* Target of long jump. */
691 fprintf (dump_file
, " nonlocal label is not const/pure\n");
692 local
->pure_const_state
= IPA_NEITHER
;
696 if (gimple_asm_clobbers_memory_p (stmt
))
699 fprintf (dump_file
, " memory asm clobber is not const/pure\n");
700 /* Abandon all hope, ye who enter here. */
701 local
->pure_const_state
= IPA_NEITHER
;
703 if (gimple_asm_volatile_p (stmt
))
706 fprintf (dump_file
, " volatile is not const/pure\n");
707 /* Abandon all hope, ye who enter here. */
708 local
->pure_const_state
= IPA_NEITHER
;
709 local
->looping
= true;
718 /* This is the main routine for finding the reference patterns for
719 global variables within a function FN. */
722 analyze_function (struct cgraph_node
*fn
, bool ipa
)
724 tree decl
= fn
->decl
;
726 basic_block this_block
;
728 l
= XCNEW (struct funct_state_d
);
729 l
->pure_const_state
= IPA_CONST
;
730 l
->state_previously_known
= IPA_NEITHER
;
731 l
->looping_previously_known
= true;
733 l
->can_throw
= false;
734 state_from_flags (&l
->state_previously_known
, &l
->looping_previously_known
,
735 flags_from_decl_or_type (fn
->decl
),
736 cgraph_node_cannot_return (fn
));
738 if (fn
->thunk
.thunk_p
|| fn
->alias
)
740 /* Thunk gets propagated through, so nothing interesting happens. */
747 fprintf (dump_file
, "\n\n local analysis of %s\n ",
751 push_cfun (DECL_STRUCT_FUNCTION (decl
));
753 FOR_EACH_BB (this_block
)
755 gimple_stmt_iterator gsi
;
756 struct walk_stmt_info wi
;
758 memset (&wi
, 0, sizeof (wi
));
759 for (gsi
= gsi_start_bb (this_block
);
763 check_stmt (&gsi
, l
, ipa
);
764 if (l
->pure_const_state
== IPA_NEITHER
&& l
->looping
&& l
->can_throw
)
770 if (l
->pure_const_state
!= IPA_NEITHER
)
772 /* Const functions cannot have back edges (an
773 indication of possible infinite loop side
775 if (mark_dfs_back_edges ())
777 /* Preheaders are needed for SCEV to work.
778 Simple latches and recorded exits improve chances that loop will
779 proved to be finite in testcases such as in loop-15.c
781 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
782 | LOOPS_HAVE_SIMPLE_LATCHES
783 | LOOPS_HAVE_RECORDED_EXITS
);
784 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
785 flow_loops_dump (dump_file
, NULL
, 0);
786 if (mark_irreducible_loops ())
789 fprintf (dump_file
, " has irreducible loops\n");
797 FOR_EACH_LOOP (li
, loop
, 0)
798 if (!finite_loop_p (loop
))
801 fprintf (dump_file
, " can not prove finiteness of "
802 "loop %i\n", loop
->num
);
804 FOR_EACH_LOOP_BREAK (li
);
808 loop_optimizer_finalize ();
812 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
813 fprintf (dump_file
, " checking previously known:");
815 better_state (&l
->pure_const_state
, &l
->looping
,
816 l
->state_previously_known
,
817 l
->looping_previously_known
);
818 if (TREE_NOTHROW (decl
))
819 l
->can_throw
= false;
825 fprintf (dump_file
, "Function is locally looping.\n");
827 fprintf (dump_file
, "Function is locally throwing.\n");
828 if (l
->pure_const_state
== IPA_CONST
)
829 fprintf (dump_file
, "Function is locally const.\n");
830 if (l
->pure_const_state
== IPA_PURE
)
831 fprintf (dump_file
, "Function is locally pure.\n");
836 /* Called when new function is inserted to callgraph late. */
838 add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
840 if (cgraph_function_body_availability (node
) < AVAIL_OVERWRITABLE
)
842 /* There are some shared nodes, in particular the initializers on
843 static declarations. We do not need to scan them more than once
844 since all we would be interested in are the addressof
846 visited_nodes
= pointer_set_create ();
847 if (cgraph_function_body_availability (node
) > AVAIL_OVERWRITABLE
)
848 set_function_state (node
, analyze_function (node
, true));
849 pointer_set_destroy (visited_nodes
);
850 visited_nodes
= NULL
;
853 /* Called when new clone is inserted to callgraph late. */
856 duplicate_node_data (struct cgraph_node
*src
, struct cgraph_node
*dst
,
857 void *data ATTRIBUTE_UNUSED
)
859 if (has_function_state (src
))
861 funct_state l
= XNEW (struct funct_state_d
);
862 gcc_assert (!has_function_state (dst
));
863 memcpy (l
, get_function_state (src
), sizeof (*l
));
864 set_function_state (dst
, l
);
868 /* Called when new clone is inserted to callgraph late. */
871 remove_node_data (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
873 if (has_function_state (node
))
875 funct_state l
= get_function_state (node
);
876 if (l
!= &varying_state
)
878 set_function_state (node
, NULL
);
884 register_hooks (void)
886 static bool init_p
= false;
893 node_removal_hook_holder
=
894 cgraph_add_node_removal_hook (&remove_node_data
, NULL
);
895 node_duplication_hook_holder
=
896 cgraph_add_node_duplication_hook (&duplicate_node_data
, NULL
);
897 function_insertion_hook_holder
=
898 cgraph_add_function_insertion_hook (&add_new_function
, NULL
);
902 /* Analyze each function in the cgraph to see if it is locally PURE or
906 pure_const_generate_summary (void)
908 struct cgraph_node
*node
;
912 /* There are some shared nodes, in particular the initializers on
913 static declarations. We do not need to scan them more than once
914 since all we would be interested in are the addressof
916 visited_nodes
= pointer_set_create ();
918 /* Process all of the functions.
920 We process AVAIL_OVERWRITABLE functions. We can not use the results
921 by default, but the info can be used at LTO with -fwhole-program or
922 when function got cloned and the clone is AVAILABLE. */
924 FOR_EACH_DEFINED_FUNCTION (node
)
925 if (cgraph_function_body_availability (node
) >= AVAIL_OVERWRITABLE
)
926 set_function_state (node
, analyze_function (node
, true));
928 pointer_set_destroy (visited_nodes
);
929 visited_nodes
= NULL
;
933 /* Serialize the ipa info for lto. */
936 pure_const_write_summary (void)
938 struct cgraph_node
*node
;
939 struct lto_simple_output_block
*ob
940 = lto_create_simple_output_block (LTO_section_ipa_pure_const
);
941 unsigned int count
= 0;
942 lto_symtab_encoder_iterator lsei
;
943 lto_symtab_encoder_t encoder
;
945 encoder
= lto_get_out_decl_state ()->symtab_node_encoder
;
947 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
948 lsei_next_function_in_partition (&lsei
))
950 node
= lsei_cgraph_node (lsei
);
951 if (node
->definition
&& has_function_state (node
))
955 streamer_write_uhwi_stream (ob
->main_stream
, count
);
957 /* Process all of the functions. */
958 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
959 lsei_next_function_in_partition (&lsei
))
961 node
= lsei_cgraph_node (lsei
);
962 if (node
->definition
&& has_function_state (node
))
967 lto_symtab_encoder_t encoder
;
969 fs
= get_function_state (node
);
971 encoder
= ob
->decl_state
->symtab_node_encoder
;
972 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
973 streamer_write_uhwi_stream (ob
->main_stream
, node_ref
);
975 /* Note that flags will need to be read in the opposite
976 order as we are pushing the bitflags into FLAGS. */
977 bp
= bitpack_create (ob
->main_stream
);
978 bp_pack_value (&bp
, fs
->pure_const_state
, 2);
979 bp_pack_value (&bp
, fs
->state_previously_known
, 2);
980 bp_pack_value (&bp
, fs
->looping_previously_known
, 1);
981 bp_pack_value (&bp
, fs
->looping
, 1);
982 bp_pack_value (&bp
, fs
->can_throw
, 1);
983 streamer_write_bitpack (&bp
);
987 lto_destroy_simple_output_block (ob
);
991 /* Deserialize the ipa info for lto. */
994 pure_const_read_summary (void)
996 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
997 struct lto_file_decl_data
*file_data
;
1001 while ((file_data
= file_data_vec
[j
++]))
1005 struct lto_input_block
*ib
1006 = lto_create_simple_input_block (file_data
,
1007 LTO_section_ipa_pure_const
,
1012 unsigned int count
= streamer_read_uhwi (ib
);
1014 for (i
= 0; i
< count
; i
++)
1017 struct cgraph_node
*node
;
1018 struct bitpack_d bp
;
1020 lto_symtab_encoder_t encoder
;
1022 fs
= XCNEW (struct funct_state_d
);
1023 index
= streamer_read_uhwi (ib
);
1024 encoder
= file_data
->symtab_node_encoder
;
1025 node
= cgraph (lto_symtab_encoder_deref (encoder
, index
));
1026 set_function_state (node
, fs
);
1028 /* Note that the flags must be read in the opposite
1029 order in which they were written (the bitflags were
1030 pushed into FLAGS). */
1031 bp
= streamer_read_bitpack (ib
);
1032 fs
->pure_const_state
1033 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1034 fs
->state_previously_known
1035 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1036 fs
->looping_previously_known
= bp_unpack_value (&bp
, 1);
1037 fs
->looping
= bp_unpack_value (&bp
, 1);
1038 fs
->can_throw
= bp_unpack_value (&bp
, 1);
1041 int flags
= flags_from_decl_or_type (node
->decl
);
1042 fprintf (dump_file
, "Read info for %s/%i ",
1045 if (flags
& ECF_CONST
)
1046 fprintf (dump_file
, " const");
1047 if (flags
& ECF_PURE
)
1048 fprintf (dump_file
, " pure");
1049 if (flags
& ECF_NOTHROW
)
1050 fprintf (dump_file
, " nothrow");
1051 fprintf (dump_file
, "\n pure const state: %s\n",
1052 pure_const_names
[fs
->pure_const_state
]);
1053 fprintf (dump_file
, " previously known state: %s\n",
1054 pure_const_names
[fs
->looping_previously_known
]);
1056 fprintf (dump_file
," function is locally looping\n");
1057 if (fs
->looping_previously_known
)
1058 fprintf (dump_file
," function is previously known looping\n");
1060 fprintf (dump_file
," function is locally throwing\n");
1064 lto_destroy_simple_input_block (file_data
,
1065 LTO_section_ipa_pure_const
,
1073 ignore_edge (struct cgraph_edge
*e
)
1075 return (!e
->can_throw_external
);
1078 /* Return true if NODE is self recursive function.
1079 Indirectly recursive functions appears as non-trivial strongly
1080 connected components, so we need to care about self recursion
1084 self_recursive_p (struct cgraph_node
*node
)
1086 struct cgraph_edge
*e
;
1087 for (e
= node
->callees
; e
; e
= e
->next_callee
)
1088 if (cgraph_function_node (e
->callee
, NULL
) == node
)
1093 /* Produce transitive closure over the callgraph and compute pure/const
1097 propagate_pure_const (void)
1099 struct cgraph_node
*node
;
1100 struct cgraph_node
*w
;
1101 struct cgraph_node
**order
=
1102 XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
1105 struct ipa_dfs_info
* w_info
;
1107 order_pos
= ipa_reduced_postorder (order
, true, false, NULL
);
1110 dump_cgraph (dump_file
);
1111 ipa_print_order (dump_file
, "reduced", order
, order_pos
);
1114 /* Propagate the local information through the call graph to produce
1115 the global information. All the nodes within a cycle will have
1116 the same info so we collapse cycles first. Then we can do the
1117 propagation in one pass from the leaves to the roots. */
1118 for (i
= 0; i
< order_pos
; i
++ )
1120 enum pure_const_state_e pure_const_state
= IPA_CONST
;
1121 bool looping
= false;
1128 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1129 fprintf (dump_file
, "Starting cycle\n");
1131 /* Find the worst state for any node in the cycle. */
1133 while (w
&& pure_const_state
!= IPA_NEITHER
)
1135 struct cgraph_edge
*e
;
1136 struct cgraph_edge
*ie
;
1138 struct ipa_ref
*ref
;
1140 funct_state w_l
= get_function_state (w
);
1141 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1142 fprintf (dump_file
, " Visiting %s/%i state:%s looping %i\n",
1145 pure_const_names
[w_l
->pure_const_state
],
1148 /* First merge in function body properties. */
1149 worse_state (&pure_const_state
, &looping
,
1150 w_l
->pure_const_state
, w_l
->looping
);
1151 if (pure_const_state
== IPA_NEITHER
)
1154 /* For overwritable nodes we can not assume anything. */
1155 if (cgraph_function_body_availability (w
) == AVAIL_OVERWRITABLE
)
1157 worse_state (&pure_const_state
, &looping
,
1158 w_l
->state_previously_known
,
1159 w_l
->looping_previously_known
);
1160 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1163 " Overwritable. state %s looping %i\n",
1164 pure_const_names
[w_l
->state_previously_known
],
1165 w_l
->looping_previously_known
);
1172 /* We consider recursive cycles as possibly infinite.
1173 This might be relaxed since infinite recursion leads to stack
1178 /* Now walk the edges and merge in callee properties. */
1179 for (e
= w
->callees
; e
; e
= e
->next_callee
)
1181 enum availability avail
;
1182 struct cgraph_node
*y
= cgraph_function_node (e
->callee
, &avail
);
1183 enum pure_const_state_e edge_state
= IPA_CONST
;
1184 bool edge_looping
= false;
1186 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1193 if (avail
> AVAIL_OVERWRITABLE
)
1195 funct_state y_l
= get_function_state (y
);
1196 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1199 " state:%s looping:%i\n",
1200 pure_const_names
[y_l
->pure_const_state
],
1203 if (y_l
->pure_const_state
> IPA_PURE
1204 && cgraph_edge_cannot_lead_to_return (e
))
1206 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1208 " Ignoring side effects"
1209 " -> pure, looping\n");
1210 edge_state
= IPA_PURE
;
1211 edge_looping
= true;
1215 edge_state
= y_l
->pure_const_state
;
1216 edge_looping
= y_l
->looping
;
1219 else if (special_builtin_state (&edge_state
, &edge_looping
,
1223 state_from_flags (&edge_state
, &edge_looping
,
1224 flags_from_decl_or_type (y
->decl
),
1225 cgraph_edge_cannot_lead_to_return (e
));
1227 /* Merge the results with what we already know. */
1228 better_state (&edge_state
, &edge_looping
,
1229 w_l
->state_previously_known
,
1230 w_l
->looping_previously_known
);
1231 worse_state (&pure_const_state
, &looping
,
1232 edge_state
, edge_looping
);
1233 if (pure_const_state
== IPA_NEITHER
)
1236 if (pure_const_state
== IPA_NEITHER
)
1239 /* Now process the indirect call. */
1240 for (ie
= w
->indirect_calls
; ie
; ie
= ie
->next_callee
)
1242 enum pure_const_state_e edge_state
= IPA_CONST
;
1243 bool edge_looping
= false;
1245 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1246 fprintf (dump_file
, " Indirect call");
1247 state_from_flags (&edge_state
, &edge_looping
,
1248 ie
->indirect_info
->ecf_flags
,
1249 cgraph_edge_cannot_lead_to_return (ie
));
1250 /* Merge the results with what we already know. */
1251 better_state (&edge_state
, &edge_looping
,
1252 w_l
->state_previously_known
,
1253 w_l
->looping_previously_known
);
1254 worse_state (&pure_const_state
, &looping
,
1255 edge_state
, edge_looping
);
1256 if (pure_const_state
== IPA_NEITHER
)
1259 if (pure_const_state
== IPA_NEITHER
)
1262 /* And finally all loads and stores. */
1263 for (i
= 0; ipa_ref_list_reference_iterate (&w
->ref_list
, i
, ref
); i
++)
1265 enum pure_const_state_e ref_state
= IPA_CONST
;
1266 bool ref_looping
= false;
1270 /* readonly reads are safe. */
1271 if (TREE_READONLY (ipa_ref_varpool_node (ref
)->decl
))
1273 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1274 fprintf (dump_file
, " nonreadonly global var read\n");
1275 ref_state
= IPA_PURE
;
1278 if (ipa_ref_cannot_lead_to_return (ref
))
1280 ref_state
= IPA_NEITHER
;
1281 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1282 fprintf (dump_file
, " global var write\n");
1287 better_state (&ref_state
, &ref_looping
,
1288 w_l
->state_previously_known
,
1289 w_l
->looping_previously_known
);
1290 worse_state (&pure_const_state
, &looping
,
1291 ref_state
, ref_looping
);
1292 if (pure_const_state
== IPA_NEITHER
)
1295 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1296 w
= w_info
->next_cycle
;
1298 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1299 fprintf (dump_file
, "Result %s looping %i\n",
1300 pure_const_names
[pure_const_state
],
1303 /* Copy back the region's pure_const_state which is shared by
1304 all nodes in the region. */
1308 funct_state w_l
= get_function_state (w
);
1309 enum pure_const_state_e this_state
= pure_const_state
;
1310 bool this_looping
= looping
;
1312 if (w_l
->state_previously_known
!= IPA_NEITHER
1313 && this_state
> w_l
->state_previously_known
)
1315 this_state
= w_l
->state_previously_known
;
1316 this_looping
|= w_l
->looping_previously_known
;
1318 if (!this_looping
&& self_recursive_p (w
))
1319 this_looping
= true;
1320 if (!w_l
->looping_previously_known
)
1321 this_looping
= false;
1323 /* All nodes within a cycle share the same info. */
1324 w_l
->pure_const_state
= this_state
;
1325 w_l
->looping
= this_looping
;
1330 if (!TREE_READONLY (w
->decl
))
1332 warn_function_const (w
->decl
, !this_looping
);
1334 fprintf (dump_file
, "Function found to be %sconst: %s\n",
1335 this_looping
? "looping " : "",
1338 cgraph_set_const_flag (w
, true, this_looping
);
1342 if (!DECL_PURE_P (w
->decl
))
1344 warn_function_pure (w
->decl
, !this_looping
);
1346 fprintf (dump_file
, "Function found to be %spure: %s\n",
1347 this_looping
? "looping " : "",
1350 cgraph_set_pure_flag (w
, true, this_looping
);
1356 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1357 w
= w_info
->next_cycle
;
1361 ipa_free_postorder_info ();
1365 /* Produce transitive closure over the callgraph and compute nothrow
1369 propagate_nothrow (void)
1371 struct cgraph_node
*node
;
1372 struct cgraph_node
*w
;
1373 struct cgraph_node
**order
=
1374 XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
1377 struct ipa_dfs_info
* w_info
;
1379 order_pos
= ipa_reduced_postorder (order
, true, false, ignore_edge
);
1382 dump_cgraph (dump_file
);
1383 ipa_print_order (dump_file
, "reduced for nothrow", order
, order_pos
);
1386 /* Propagate the local information through the call graph to produce
1387 the global information. All the nodes within a cycle will have
1388 the same info so we collapse cycles first. Then we can do the
1389 propagation in one pass from the leaves to the roots. */
1390 for (i
= 0; i
< order_pos
; i
++ )
1392 bool can_throw
= false;
1398 /* Find the worst state for any node in the cycle. */
1402 struct cgraph_edge
*e
, *ie
;
1403 funct_state w_l
= get_function_state (w
);
1406 || cgraph_function_body_availability (w
) == AVAIL_OVERWRITABLE
)
1412 for (e
= w
->callees
; e
; e
= e
->next_callee
)
1414 enum availability avail
;
1415 struct cgraph_node
*y
= cgraph_function_node (e
->callee
, &avail
);
1417 if (avail
> AVAIL_OVERWRITABLE
)
1419 funct_state y_l
= get_function_state (y
);
1423 if (y_l
->can_throw
&& !TREE_NOTHROW (w
->decl
)
1424 && e
->can_throw_external
)
1427 else if (e
->can_throw_external
&& !TREE_NOTHROW (y
->decl
))
1430 for (ie
= node
->indirect_calls
; ie
; ie
= ie
->next_callee
)
1431 if (ie
->can_throw_external
)
1436 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1437 w
= w_info
->next_cycle
;
1440 /* Copy back the region's pure_const_state which is shared by
1441 all nodes in the region. */
1445 funct_state w_l
= get_function_state (w
);
1446 if (!can_throw
&& !TREE_NOTHROW (w
->decl
))
1448 cgraph_set_nothrow_flag (w
, true);
1450 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1453 else if (can_throw
&& !TREE_NOTHROW (w
->decl
))
1454 w_l
->can_throw
= true;
1455 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1456 w
= w_info
->next_cycle
;
1460 ipa_free_postorder_info ();
1465 /* Produce the global information by preforming a transitive closure
1466 on the local information that was produced by generate_summary. */
1471 struct cgraph_node
*node
;
1473 cgraph_remove_function_insertion_hook (function_insertion_hook_holder
);
1474 cgraph_remove_node_duplication_hook (node_duplication_hook_holder
);
1475 cgraph_remove_node_removal_hook (node_removal_hook_holder
);
1477 /* Nothrow makes more function to not lead to return and improve
1479 propagate_nothrow ();
1480 propagate_pure_const ();
1483 FOR_EACH_FUNCTION (node
)
1484 if (has_function_state (node
))
1485 free (get_function_state (node
));
1486 funct_state_vec
.release ();
1491 gate_pure_const (void)
1493 return (flag_ipa_pure_const
1494 /* Don't bother doing anything if the program has errors. */
1500 const pass_data pass_data_ipa_pure_const
=
1502 IPA_PASS
, /* type */
1503 "pure-const", /* name */
1504 OPTGROUP_NONE
, /* optinfo_flags */
1505 true, /* has_gate */
1506 true, /* has_execute */
1507 TV_IPA_PURE_CONST
, /* tv_id */
1508 0, /* properties_required */
1509 0, /* properties_provided */
1510 0, /* properties_destroyed */
1511 0, /* todo_flags_start */
1512 0, /* todo_flags_finish */
1515 class pass_ipa_pure_const
: public ipa_opt_pass_d
1518 pass_ipa_pure_const (gcc::context
*ctxt
)
1519 : ipa_opt_pass_d (pass_data_ipa_pure_const
, ctxt
,
1520 pure_const_generate_summary
, /* generate_summary */
1521 pure_const_write_summary
, /* write_summary */
1522 pure_const_read_summary
, /* read_summary */
1523 NULL
, /* write_optimization_summary */
1524 NULL
, /* read_optimization_summary */
1525 NULL
, /* stmt_fixup */
1526 0, /* function_transform_todo_flags_start */
1527 NULL
, /* function_transform */
1528 NULL
) /* variable_transform */
1531 /* opt_pass methods: */
1532 bool gate () { return gate_pure_const (); }
1533 unsigned int execute () { return propagate (); }
1535 }; // class pass_ipa_pure_const
1540 make_pass_ipa_pure_const (gcc::context
*ctxt
)
1542 return new pass_ipa_pure_const (ctxt
);
1545 /* Return true if function should be skipped for local pure const analysis. */
1548 skip_function_for_local_pure_const (struct cgraph_node
*node
)
1550 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1551 we must not promote functions that are called by already processed functions. */
1553 if (function_called_by_processed_nodes_p ())
1556 fprintf (dump_file
, "Function called in recursive cycle; ignoring\n");
1559 if (cgraph_function_body_availability (node
) <= AVAIL_OVERWRITABLE
)
1562 fprintf (dump_file
, "Function is not available or overwritable; not analyzing.\n");
1568 /* Simple local pass for pure const discovery reusing the analysis from
1569 ipa_pure_const. This pass is effective when executed together with
1570 other optimization passes in early optimization pass queue. */
1573 local_pure_const (void)
1575 bool changed
= false;
1578 struct cgraph_node
*node
;
1580 node
= cgraph_get_node (current_function_decl
);
1581 skip
= skip_function_for_local_pure_const (node
);
1582 if (!warn_suggest_attribute_const
1583 && !warn_suggest_attribute_pure
1587 l
= analyze_function (node
, false);
1589 /* Do NORETURN discovery. */
1590 if (!skip
&& !TREE_THIS_VOLATILE (current_function_decl
)
1591 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) == 0)
1593 warn_function_noreturn (cfun
->decl
);
1595 fprintf (dump_file
, "Function found to be noreturn: %s\n",
1596 current_function_name ());
1598 /* Update declaration and reduce profile to executed once. */
1599 TREE_THIS_VOLATILE (current_function_decl
) = 1;
1600 if (node
->frequency
> NODE_FREQUENCY_EXECUTED_ONCE
)
1601 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
1606 switch (l
->pure_const_state
)
1609 if (!TREE_READONLY (current_function_decl
))
1611 warn_function_const (current_function_decl
, !l
->looping
);
1614 cgraph_set_const_flag (node
, true, l
->looping
);
1618 fprintf (dump_file
, "Function found to be %sconst: %s\n",
1619 l
->looping
? "looping " : "",
1620 current_function_name ());
1622 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
1627 cgraph_set_const_flag (node
, true, false);
1631 fprintf (dump_file
, "Function found to be non-looping: %s\n",
1632 current_function_name ());
1637 if (!DECL_PURE_P (current_function_decl
))
1641 cgraph_set_pure_flag (node
, true, l
->looping
);
1644 warn_function_pure (current_function_decl
, !l
->looping
);
1646 fprintf (dump_file
, "Function found to be %spure: %s\n",
1647 l
->looping
? "looping " : "",
1648 current_function_name ());
1650 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
1655 cgraph_set_pure_flag (node
, true, false);
1659 fprintf (dump_file
, "Function found to be non-looping: %s\n",
1660 current_function_name ());
1667 if (!l
->can_throw
&& !TREE_NOTHROW (current_function_decl
))
1669 cgraph_set_nothrow_flag (node
, true);
1672 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1673 current_function_name ());
1677 return execute_fixup_cfg ();
1684 const pass_data pass_data_local_pure_const
=
1686 GIMPLE_PASS
, /* type */
1687 "local-pure-const", /* name */
1688 OPTGROUP_NONE
, /* optinfo_flags */
1689 true, /* has_gate */
1690 true, /* has_execute */
1691 TV_IPA_PURE_CONST
, /* tv_id */
1692 0, /* properties_required */
1693 0, /* properties_provided */
1694 0, /* properties_destroyed */
1695 0, /* todo_flags_start */
1696 0, /* todo_flags_finish */
1699 class pass_local_pure_const
: public gimple_opt_pass
1702 pass_local_pure_const (gcc::context
*ctxt
)
1703 : gimple_opt_pass (pass_data_local_pure_const
, ctxt
)
1706 /* opt_pass methods: */
1707 opt_pass
* clone () { return new pass_local_pure_const (m_ctxt
); }
1708 bool gate () { return gate_pure_const (); }
1709 unsigned int execute () { return local_pure_const (); }
1711 }; // class pass_local_pure_const
1716 make_pass_local_pure_const (gcc::context
*ctxt
)
1718 return new pass_local_pure_const (ctxt
);
1721 /* Emit noreturn warnings. */
1724 execute_warn_function_noreturn (void)
1726 if (!TREE_THIS_VOLATILE (current_function_decl
)
1727 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) == 0)
1728 warn_function_noreturn (current_function_decl
);
1733 gate_warn_function_noreturn (void)
1735 return warn_suggest_attribute_noreturn
;
1740 const pass_data pass_data_warn_function_noreturn
=
1742 GIMPLE_PASS
, /* type */
1743 "*warn_function_noreturn", /* name */
1744 OPTGROUP_NONE
, /* optinfo_flags */
1745 true, /* has_gate */
1746 true, /* has_execute */
1747 TV_NONE
, /* tv_id */
1748 PROP_cfg
, /* properties_required */
1749 0, /* properties_provided */
1750 0, /* properties_destroyed */
1751 0, /* todo_flags_start */
1752 0, /* todo_flags_finish */
1755 class pass_warn_function_noreturn
: public gimple_opt_pass
1758 pass_warn_function_noreturn (gcc::context
*ctxt
)
1759 : gimple_opt_pass (pass_data_warn_function_noreturn
, ctxt
)
1762 /* opt_pass methods: */
1763 bool gate () { return gate_warn_function_noreturn (); }
1764 unsigned int execute () { return execute_warn_function_noreturn (); }
1766 }; // class pass_warn_function_noreturn
1771 make_pass_warn_function_noreturn (gcc::context
*ctxt
)
1773 return new pass_warn_function_noreturn (ctxt
);