]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-pure-const.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / ipa-pure-const.c
CommitLineData
ea900239 1/* Callgraph based analysis of static variables.
5624e564 2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
ea900239
DB
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9dcd6f09 9Software Foundation; either version 3, or (at your option) any later
ea900239
DB
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
ea900239 20
fa10beec
RW
21/* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
ea900239
DB
24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30/* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34#include "config.h"
35#include "system.h"
36#include "coretypes.h"
c7131fb2 37#include "backend.h"
ea900239 38#include "tree.h"
c7131fb2
AM
39#include "gimple.h"
40#include "hard-reg-set.h"
41#include "alias.h"
40e23961 42#include "fold-const.h"
d8a2d370
DN
43#include "print-tree.h"
44#include "calls.h"
60393bbc 45#include "cfganal.h"
2fb9a547
AM
46#include "internal-fn.h"
47#include "tree-eh.h"
5be5c238
AM
48#include "gimple-iterator.h"
49#include "gimple-walk.h"
442b4905 50#include "tree-cfg.h"
e28030cf 51#include "tree-ssa-loop-niter.h"
ea900239
DB
52#include "tree-inline.h"
53#include "tree-pass.h"
54#include "langhooks.h"
c582198b 55#include "cgraph.h"
ea900239 56#include "ipa-utils.h"
ea900239 57#include "flags.h"
ea900239 58#include "diagnostic.h"
cf835838 59#include "gimple-pretty-print.h"
ea900239 60#include "langhooks.h"
5d35c171 61#include "target.h"
d7f09764 62#include "lto-streamer.h"
f0efc7aa
DN
63#include "data-streamer.h"
64#include "tree-streamer.h"
2de58650
JH
65#include "cfgloop.h"
66#include "tree-scalar-evolution.h"
5dc16b19
MLI
67#include "intl.h"
68#include "opts.h"
38147a2a 69#include "varasm.h"
ea900239
DB
70
71/* Lattice values for const and pure functions. Everything starts out
72 being const, then may drop to pure and then neither depending on
73 what is found. */
74enum pure_const_state_e
75{
76 IPA_CONST,
77 IPA_PURE,
78 IPA_NEITHER
79};
80
d56026c2
JH
81const char *pure_const_names[3] = {"const", "pure", "neither"};
82
812dbce5
JH
83/* Holder for the const_state. There is one of these per function
84 decl. */
b8698a0f 85struct funct_state_d
ea900239 86{
812dbce5 87 /* See above. */
ea900239 88 enum pure_const_state_e pure_const_state;
33977f81 89 /* What user set here; we can be always sure about this. */
b8698a0f
L
90 enum pure_const_state_e state_previously_known;
91 bool looping_previously_known;
812dbce5
JH
92
93 /* True if the function could possibly infinite loop. There are a
94 lot of ways that this could be determined. We are pretty
95 conservative here. While it is possible to cse pure and const
96 calls, it is not legal to have dce get rid of the call if there
97 is a possibility that the call could infinite loop since this is
98 a behavioral change. */
becfd6e5 99 bool looping;
812dbce5 100
33977f81 101 bool can_throw;
8413ca87
JJ
102
103 /* If function can call free, munmap or otherwise make previously
104 non-trapping memory accesses trapping. */
105 bool can_free;
ea900239
DB
106};
107
37512c66
JH
108/* State used when we know nothing about function. */
109static struct funct_state_d varying_state
8413ca87 110 = { IPA_NEITHER, IPA_NEITHER, true, true, true, true };
37512c66
JH
111
112
ea900239
DB
113typedef struct funct_state_d * funct_state;
114
812dbce5
JH
115/* The storage of the funct_state is abstracted because there is the
116 possibility that it may be desirable to move this to the cgraph
b8698a0f 117 local info. */
812dbce5
JH
118
119/* Array, indexed by cgraph node uid, of function states. */
120
9771b263 121static vec<funct_state> funct_state_vec;
812dbce5 122
3edf64aa
DM
123static bool gate_pure_const (void);
124
125namespace {
126
127const pass_data pass_data_ipa_pure_const =
128{
129 IPA_PASS, /* type */
130 "pure-const", /* name */
131 OPTGROUP_NONE, /* optinfo_flags */
132 TV_IPA_PURE_CONST, /* tv_id */
133 0, /* properties_required */
134 0, /* properties_provided */
135 0, /* properties_destroyed */
136 0, /* todo_flags_start */
137 0, /* todo_flags_finish */
138};
139
140class pass_ipa_pure_const : public ipa_opt_pass_d
141{
142public:
143 pass_ipa_pure_const(gcc::context *ctxt);
144
145 /* opt_pass methods: */
146 bool gate (function *) { return gate_pure_const (); }
147 unsigned int execute (function *fun);
148
149 void register_hooks (void);
150
151private:
152 bool init_p;
153
154 /* Holders of ipa cgraph hooks: */
155 struct cgraph_node_hook_list *function_insertion_hook_holder;
156 struct cgraph_2node_hook_list *node_duplication_hook_holder;
157 struct cgraph_node_hook_list *node_removal_hook_holder;
158
159}; // class pass_ipa_pure_const
160
161} // anon namespace
812dbce5 162
5dc16b19
MLI
163/* Try to guess if function body will always be visible to compiler
164 when compiling the call and whether compiler will be able
165 to propagate the information by itself. */
166
167static bool
168function_always_visible_to_compiler_p (tree decl)
169{
170 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
171}
172
173/* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
174 is true if the function is known to be finite. The diagnostic is
6e2830c3 175 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
5dc16b19
MLI
176 OPTION, this function may initialize it and it is always returned
177 by the function. */
178
6e2830c3 179static hash_set<tree> *
5dc16b19 180suggest_attribute (int option, tree decl, bool known_finite,
6e2830c3 181 hash_set<tree> *warned_about,
5dc16b19
MLI
182 const char * attrib_name)
183{
46625112 184 if (!option_enabled (option, &global_options))
5dc16b19
MLI
185 return warned_about;
186 if (TREE_THIS_VOLATILE (decl)
187 || (known_finite && function_always_visible_to_compiler_p (decl)))
188 return warned_about;
189
190 if (!warned_about)
6e2830c3
TS
191 warned_about = new hash_set<tree>;
192 if (warned_about->contains (decl))
5dc16b19 193 return warned_about;
6e2830c3 194 warned_about->add (decl);
5dc16b19
MLI
195 warning_at (DECL_SOURCE_LOCATION (decl),
196 option,
197 known_finite
198 ? _("function might be candidate for attribute %<%s%>")
199 : _("function might be candidate for attribute %<%s%>"
200 " if it is known to return normally"), attrib_name);
201 return warned_about;
202}
203
204/* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
205 is true if the function is known to be finite. */
206
207static void
208warn_function_pure (tree decl, bool known_finite)
209{
6e2830c3 210 static hash_set<tree> *warned_about;
5dc16b19
MLI
211
212 warned_about
213 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
214 known_finite, warned_about, "pure");
215}
216
217/* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
218 is true if the function is known to be finite. */
219
220static void
221warn_function_const (tree decl, bool known_finite)
222{
6e2830c3 223 static hash_set<tree> *warned_about;
5dc16b19
MLI
224 warned_about
225 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
226 known_finite, warned_about, "const");
227}
7ea6b6cf 228
c1bf2a39 229static void
7ea6b6cf
JH
230warn_function_noreturn (tree decl)
231{
6e2830c3 232 static hash_set<tree> *warned_about;
d45eae79
SL
233 if (!lang_hooks.missing_noreturn_ok_p (decl)
234 && targetm.warn_func_return (decl))
7ea6b6cf
JH
235 warned_about
236 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
237 true, warned_about, "noreturn");
238}
df92c640 239
97d45cef
RG
240/* Return true if we have a function state for NODE. */
241
242static inline bool
243has_function_state (struct cgraph_node *node)
244{
9771b263
DN
245 if (!funct_state_vec.exists ()
246 || funct_state_vec.length () <= (unsigned int)node->uid)
97d45cef 247 return false;
9771b263 248 return funct_state_vec[node->uid] != NULL;
97d45cef
RG
249}
250
b8698a0f 251/* Return the function state from NODE. */
ea900239
DB
252
253static inline funct_state
254get_function_state (struct cgraph_node *node)
255{
9771b263
DN
256 if (!funct_state_vec.exists ()
257 || funct_state_vec.length () <= (unsigned int)node->uid
258 || !funct_state_vec[node->uid])
97d45cef 259 /* We might want to put correct previously_known state into varying. */
37512c66 260 return &varying_state;
9771b263 261 return funct_state_vec[node->uid];
812dbce5
JH
262}
263
264/* Set the function state S for NODE. */
265
266static inline void
267set_function_state (struct cgraph_node *node, funct_state s)
268{
9771b263
DN
269 if (!funct_state_vec.exists ()
270 || funct_state_vec.length () <= (unsigned int)node->uid)
271 funct_state_vec.safe_grow_cleared (node->uid + 1);
272 funct_state_vec[node->uid] = s;
ea900239
DB
273}
274
fa10beec 275/* Check to see if the use (or definition when CHECKING_WRITE is true)
ea900239
DB
276 variable T is legal in a function that is either pure or const. */
277
b8698a0f
L
278static inline void
279check_decl (funct_state local,
f10ea640 280 tree t, bool checking_write, bool ipa)
ea900239 281{
ea900239
DB
282 /* Do not want to do anything with volatile except mark any
283 function that uses one to be not const or pure. */
b8698a0f
L
284 if (TREE_THIS_VOLATILE (t))
285 {
ea900239 286 local->pure_const_state = IPA_NEITHER;
33977f81
JH
287 if (dump_file)
288 fprintf (dump_file, " Volatile operand is not const/pure");
ea900239
DB
289 return;
290 }
291
292 /* Do not care about a local automatic that is not static. */
293 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
294 return;
295
33977f81
JH
296 /* If the variable has the "used" attribute, treat it as if it had a
297 been touched by the devil. */
b42186f1 298 if (DECL_PRESERVE_P (t))
33977f81
JH
299 {
300 local->pure_const_state = IPA_NEITHER;
301 if (dump_file)
302 fprintf (dump_file, " Used static/global variable is not const/pure\n");
303 return;
304 }
305
f10ea640
JH
306 /* In IPA mode we are not interested in checking actual loads and stores;
307 they will be processed at propagation time using ipa_ref. */
308 if (ipa)
309 return;
310
ea900239
DB
311 /* Since we have dealt with the locals and params cases above, if we
312 are CHECKING_WRITE, this cannot be a pure or constant
313 function. */
b8698a0f 314 if (checking_write)
eb80272a
ST
315 {
316 local->pure_const_state = IPA_NEITHER;
33977f81
JH
317 if (dump_file)
318 fprintf (dump_file, " static/global memory write is not const/pure\n");
eb80272a
ST
319 return;
320 }
ea900239
DB
321
322 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
323 {
33977f81
JH
324 /* Readonly reads are safe. */
325 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
326 return; /* Read of a constant, do not change the function state. */
b8698a0f 327 else
ea900239 328 {
33977f81
JH
329 if (dump_file)
330 fprintf (dump_file, " global memory read is not const\n");
ea900239
DB
331 /* Just a regular read. */
332 if (local->pure_const_state == IPA_CONST)
333 local->pure_const_state = IPA_PURE;
334 }
335 }
33977f81
JH
336 else
337 {
338 /* Compilation level statics can be read if they are readonly
339 variables. */
340 if (TREE_READONLY (t))
341 return;
342
343 if (dump_file)
344 fprintf (dump_file, " static memory read is not const\n");
345 /* Just a regular read. */
346 if (local->pure_const_state == IPA_CONST)
347 local->pure_const_state = IPA_PURE;
348 }
ea900239
DB
349}
350
ea900239 351
33977f81
JH
352/* Check to see if the use (or definition when CHECKING_WRITE is true)
353 variable T is legal in a function that is either pure or const. */
ea900239 354
b8698a0f 355static inline void
346ef3fa 356check_op (funct_state local, tree t, bool checking_write)
ea900239 357{
3c1832c3
JH
358 t = get_base_address (t);
359 if (t && TREE_THIS_VOLATILE (t))
ea900239 360 {
346ef3fa
RG
361 local->pure_const_state = IPA_NEITHER;
362 if (dump_file)
363 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
364 return;
365 }
3c1832c3 366 else if (t
70f34814 367 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
3c1832c3
JH
368 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
369 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
370 {
371 if (dump_file)
372 fprintf (dump_file, " Indirect ref to local memory is OK\n");
373 return;
374 }
346ef3fa
RG
375 else if (checking_write)
376 {
377 local->pure_const_state = IPA_NEITHER;
378 if (dump_file)
379 fprintf (dump_file, " Indirect ref write is not const/pure\n");
380 return;
381 }
382 else
383 {
384 if (dump_file)
385 fprintf (dump_file, " Indirect ref read is not const\n");
386 if (local->pure_const_state == IPA_CONST)
387 local->pure_const_state = IPA_PURE;
ea900239
DB
388 }
389}
390
f10ea640
JH
391/* compute state based on ECF FLAGS and store to STATE and LOOPING. */
392
393static void
394state_from_flags (enum pure_const_state_e *state, bool *looping,
395 int flags, bool cannot_lead_to_return)
396{
397 *looping = false;
398 if (flags & ECF_LOOPING_CONST_OR_PURE)
399 {
400 *looping = true;
401 if (dump_file && (dump_flags & TDF_DETAILS))
402 fprintf (dump_file, " looping");
403 }
404 if (flags & ECF_CONST)
405 {
406 *state = IPA_CONST;
407 if (dump_file && (dump_flags & TDF_DETAILS))
408 fprintf (dump_file, " const\n");
409 }
410 else if (flags & ECF_PURE)
411 {
412 *state = IPA_PURE;
413 if (dump_file && (dump_flags & TDF_DETAILS))
414 fprintf (dump_file, " pure\n");
415 }
416 else if (cannot_lead_to_return)
417 {
418 *state = IPA_PURE;
419 *looping = true;
420 if (dump_file && (dump_flags & TDF_DETAILS))
421 fprintf (dump_file, " ignoring side effects->pure looping\n");
422 }
423 else
424 {
425 if (dump_file && (dump_flags & TDF_DETAILS))
df92c640 426 fprintf (dump_file, " neither\n");
f10ea640
JH
427 *state = IPA_NEITHER;
428 *looping = true;
429 }
430}
431
432/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
433 into STATE and LOOPING better of the two variants.
434 Be sure to merge looping correctly. IPA_NEITHER functions
435 have looping 0 even if they don't have to return. */
436
437static inline void
438better_state (enum pure_const_state_e *state, bool *looping,
439 enum pure_const_state_e state2, bool looping2)
440{
441 if (state2 < *state)
442 {
443 if (*state == IPA_NEITHER)
444 *looping = looping2;
445 else
446 *looping = MIN (*looping, looping2);
b0d04a5f 447 *state = state2;
f10ea640
JH
448 }
449 else if (state2 != IPA_NEITHER)
450 *looping = MIN (*looping, looping2);
451}
452
453/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
454 into STATE and LOOPING worse of the two variants. */
455
456static inline void
457worse_state (enum pure_const_state_e *state, bool *looping,
458 enum pure_const_state_e state2, bool looping2)
459{
460 *state = MAX (*state, state2);
461 *looping = MAX (*looping, looping2);
462}
463
61502ca8 464/* Recognize special cases of builtins that are by themselves not pure or const
0a42aa4e
JH
465 but function using them is. */
466static bool
9e97ff61 467special_builtin_state (enum pure_const_state_e *state, bool *looping,
0a42aa4e
JH
468 tree callee)
469{
470 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
471 switch (DECL_FUNCTION_CODE (callee))
472 {
473 case BUILT_IN_RETURN:
474 case BUILT_IN_UNREACHABLE:
475 case BUILT_IN_ALLOCA:
13e49da9 476 case BUILT_IN_ALLOCA_WITH_ALIGN:
0a42aa4e
JH
477 case BUILT_IN_STACK_SAVE:
478 case BUILT_IN_STACK_RESTORE:
479 case BUILT_IN_EH_POINTER:
480 case BUILT_IN_EH_FILTER:
481 case BUILT_IN_UNWIND_RESUME:
482 case BUILT_IN_CXA_END_CLEANUP:
483 case BUILT_IN_EH_COPY_VALUES:
484 case BUILT_IN_FRAME_ADDRESS:
485 case BUILT_IN_APPLY:
486 case BUILT_IN_APPLY_ARGS:
0a42aa4e
JH
487 *looping = false;
488 *state = IPA_CONST;
489 return true;
490 case BUILT_IN_PREFETCH:
491 *looping = true;
492 *state = IPA_CONST;
493 return true;
083e891e
MP
494 default:
495 break;
0a42aa4e
JH
496 }
497 return false;
498}
499
ea900239
DB
500/* Check the parameters of a function call to CALL_EXPR to see if
501 there are any references in the parameters that are not allowed for
502 pure or const functions. Also check to see if this is either an
503 indirect call, a call outside the compilation unit, or has special
504 attributes that may also effect the purity. The CALL_EXPR node for
505 the entire call expression. */
506
507static void
538dd0b7 508check_call (funct_state local, gcall *call, bool ipa)
ea900239 509{
726a989a 510 int flags = gimple_call_flags (call);
33977f81 511 tree callee_t = gimple_call_fndecl (call);
33977f81
JH
512 bool possibly_throws = stmt_could_throw_p (call);
513 bool possibly_throws_externally = (possibly_throws
514 && stmt_can_throw_external (call));
ea900239 515
33977f81
JH
516 if (possibly_throws)
517 {
518 unsigned int i;
519 for (i = 0; i < gimple_num_ops (call); i++)
520 if (gimple_op (call, i)
521 && tree_could_throw_p (gimple_op (call, i)))
522 {
8f4f502f 523 if (possibly_throws && cfun->can_throw_non_call_exceptions)
33977f81
JH
524 {
525 if (dump_file)
526 fprintf (dump_file, " operand can throw; looping\n");
527 local->looping = true;
528 }
529 if (possibly_throws_externally)
530 {
531 if (dump_file)
532 fprintf (dump_file, " operand can throw externally\n");
533 local->can_throw = true;
534 }
535 }
536 }
b8698a0f 537
ea900239
DB
538 /* The const and pure flags are set by a variety of places in the
539 compiler (including here). If someone has already set the flags
540 for the callee, (such as for some of the builtins) we will use
b8698a0f
L
541 them, otherwise we will compute our own information.
542
ea900239
DB
543 Const and pure functions have less clobber effects than other
544 functions so we process these first. Otherwise if it is a call
545 outside the compilation unit or an indirect call we punt. This
546 leaves local calls which will be processed by following the call
b8698a0f 547 graph. */
ea900239
DB
548 if (callee_t)
549 {
0a42aa4e
JH
550 enum pure_const_state_e call_state;
551 bool call_looping;
552
8413ca87
JJ
553 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
554 && !nonfreeing_call_p (call))
555 local->can_free = true;
556
9e97ff61 557 if (special_builtin_state (&call_state, &call_looping, callee_t))
0a42aa4e
JH
558 {
559 worse_state (&local->pure_const_state, &local->looping,
560 call_state, call_looping);
561 return;
562 }
ea900239
DB
563 /* When bad things happen to bad functions, they cannot be const
564 or pure. */
565 if (setjmp_call_p (callee_t))
becfd6e5 566 {
33977f81
JH
567 if (dump_file)
568 fprintf (dump_file, " setjmp is not const/pure\n");
569 local->looping = true;
becfd6e5 570 local->pure_const_state = IPA_NEITHER;
becfd6e5 571 }
ea900239
DB
572
573 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
574 switch (DECL_FUNCTION_CODE (callee_t))
575 {
576 case BUILT_IN_LONGJMP:
577 case BUILT_IN_NONLOCAL_GOTO:
33977f81
JH
578 if (dump_file)
579 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
ea900239 580 local->pure_const_state = IPA_NEITHER;
33977f81 581 local->looping = true;
ea900239
DB
582 break;
583 default:
584 break;
585 }
586 }
8413ca87
JJ
587 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
588 local->can_free = true;
ea900239 589
33977f81 590 /* When not in IPA mode, we can still handle self recursion. */
fc11f321
JH
591 if (!ipa && callee_t
592 && recursive_call_p (current_function_decl, callee_t))
5dc16b19
MLI
593 {
594 if (dump_file)
595 fprintf (dump_file, " Recursive call can loop.\n");
596 local->looping = true;
597 }
61502ca8 598 /* Either callee is unknown or we are doing local analysis.
c59f5d1b
JH
599 Look to see if there are any bits available for the callee (such as by
600 declaration or because it is builtin) and process solely on the basis of
601 those bits. */
f10ea640 602 else if (!ipa)
ea900239 603 {
f10ea640
JH
604 enum pure_const_state_e call_state;
605 bool call_looping;
8f4f502f 606 if (possibly_throws && cfun->can_throw_non_call_exceptions)
33977f81
JH
607 {
608 if (dump_file)
609 fprintf (dump_file, " can throw; looping\n");
610 local->looping = true;
611 }
612 if (possibly_throws_externally)
613 {
614 if (dump_file)
615 {
1d65f45c
RH
616 fprintf (dump_file, " can throw externally to lp %i\n",
617 lookup_stmt_eh_lp (call));
33977f81
JH
618 if (callee_t)
619 fprintf (dump_file, " callee:%s\n",
620 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
621 }
622 local->can_throw = true;
623 }
f10ea640
JH
624 if (dump_file && (dump_flags & TDF_DETAILS))
625 fprintf (dump_file, " checking flags for call:");
626 state_from_flags (&call_state, &call_looping, flags,
627 ((flags & (ECF_NORETURN | ECF_NOTHROW))
628 == (ECF_NORETURN | ECF_NOTHROW))
629 || (!flag_exceptions && (flags & ECF_NORETURN)));
630 worse_state (&local->pure_const_state, &local->looping,
631 call_state, call_looping);
ea900239 632 }
33977f81 633 /* Direct functions calls are handled by IPA propagation. */
ea900239
DB
634}
635
f10ea640 636/* Wrapper around check_decl for loads in local more. */
346ef3fa
RG
637
638static bool
9f1363cd 639check_load (gimple, tree op, tree, void *data)
346ef3fa
RG
640{
641 if (DECL_P (op))
f10ea640 642 check_decl ((funct_state)data, op, false, false);
346ef3fa
RG
643 else
644 check_op ((funct_state)data, op, false);
645 return false;
646}
647
f10ea640 648/* Wrapper around check_decl for stores in local more. */
346ef3fa
RG
649
650static bool
9f1363cd 651check_store (gimple, tree op, tree, void *data)
346ef3fa
RG
652{
653 if (DECL_P (op))
f10ea640
JH
654 check_decl ((funct_state)data, op, true, false);
655 else
656 check_op ((funct_state)data, op, true);
657 return false;
658}
659
660/* Wrapper around check_decl for loads in ipa mode. */
661
662static bool
9f1363cd 663check_ipa_load (gimple, tree op, tree, void *data)
f10ea640
JH
664{
665 if (DECL_P (op))
666 check_decl ((funct_state)data, op, false, true);
667 else
668 check_op ((funct_state)data, op, false);
669 return false;
670}
671
672/* Wrapper around check_decl for stores in ipa mode. */
673
674static bool
9f1363cd 675check_ipa_store (gimple, tree op, tree, void *data)
f10ea640
JH
676{
677 if (DECL_P (op))
678 check_decl ((funct_state)data, op, true, true);
346ef3fa
RG
679 else
680 check_op ((funct_state)data, op, true);
681 return false;
682}
683
5006671f
RG
684/* Look into pointer pointed to by GSIP and figure out what interesting side
685 effects it has. */
33977f81
JH
686static void
687check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
ea900239 688{
33977f81 689 gimple stmt = gsi_stmt (*gsip);
ea900239 690
b5b8b0ac
AO
691 if (is_gimple_debug (stmt))
692 return;
693
38147a2a
JH
694 /* Do consider clobber as side effects before IPA, so we rather inline
695 C++ destructors and keep clobber semantics than eliminate them.
696
697 TODO: We may get smarter during early optimizations on these and let
698 functions containing only clobbers to be optimized more. This is a common
699 case of C++ destructors. */
700
701 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
702 return;
703
33977f81 704 if (dump_file)
ea900239 705 {
33977f81
JH
706 fprintf (dump_file, " scanning: ");
707 print_gimple_stmt (dump_file, stmt, 0, 0);
708 }
5006671f 709
ace018f9
RG
710 if (gimple_has_volatile_ops (stmt)
711 && !gimple_clobber_p (stmt))
4e89a3fa
RG
712 {
713 local->pure_const_state = IPA_NEITHER;
714 if (dump_file)
715 fprintf (dump_file, " Volatile stmt is not const/pure\n");
716 }
717
346ef3fa 718 /* Look for loads and stores. */
f10ea640
JH
719 walk_stmt_load_store_ops (stmt, local,
720 ipa ? check_ipa_load : check_load,
721 ipa ? check_ipa_store : check_store);
33977f81
JH
722
723 if (gimple_code (stmt) != GIMPLE_CALL
724 && stmt_could_throw_p (stmt))
725 {
8f4f502f 726 if (cfun->can_throw_non_call_exceptions)
33977f81
JH
727 {
728 if (dump_file)
425b784f 729 fprintf (dump_file, " can throw; looping\n");
33977f81
JH
730 local->looping = true;
731 }
732 if (stmt_can_throw_external (stmt))
733 {
734 if (dump_file)
425b784f 735 fprintf (dump_file, " can throw externally\n");
33977f81
JH
736 local->can_throw = true;
737 }
425b784f
JH
738 else
739 if (dump_file)
740 fprintf (dump_file, " can throw\n");
726a989a 741 }
726a989a
RB
742 switch (gimple_code (stmt))
743 {
33977f81 744 case GIMPLE_CALL:
538dd0b7 745 check_call (local, as_a <gcall *> (stmt), ipa);
ea900239 746 break;
726a989a 747 case GIMPLE_LABEL:
538dd0b7 748 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
ea900239 749 /* Target of long jump. */
becfd6e5 750 {
33977f81 751 if (dump_file)
425b784f 752 fprintf (dump_file, " nonlocal label is not const/pure\n");
becfd6e5 753 local->pure_const_state = IPA_NEITHER;
becfd6e5 754 }
ea900239 755 break;
726a989a 756 case GIMPLE_ASM:
538dd0b7 757 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
33977f81 758 {
edcdea5b 759 if (dump_file)
425b784f 760 fprintf (dump_file, " memory asm clobber is not const/pure\n");
edcdea5b
NF
761 /* Abandon all hope, ye who enter here. */
762 local->pure_const_state = IPA_NEITHER;
8413ca87 763 local->can_free = true;
33977f81 764 }
538dd0b7 765 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
33977f81
JH
766 {
767 if (dump_file)
425b784f 768 fprintf (dump_file, " volatile is not const/pure\n");
33977f81
JH
769 /* Abandon all hope, ye who enter here. */
770 local->pure_const_state = IPA_NEITHER;
8413ca87
JJ
771 local->looping = true;
772 local->can_free = true;
33977f81
JH
773 }
774 return;
ea900239
DB
775 default:
776 break;
777 }
ea900239
DB
778}
779
812dbce5 780
ea900239
DB
781/* This is the main routine for finding the reference patterns for
782 global variables within a function FN. */
783
33977f81
JH
784static funct_state
785analyze_function (struct cgraph_node *fn, bool ipa)
ea900239 786{
67348ccc 787 tree decl = fn->decl;
33977f81
JH
788 funct_state l;
789 basic_block this_block;
ea900239 790
33977f81
JH
791 l = XCNEW (struct funct_state_d);
792 l->pure_const_state = IPA_CONST;
f87c9042
JH
793 l->state_previously_known = IPA_NEITHER;
794 l->looping_previously_known = true;
33977f81
JH
795 l->looping = false;
796 l->can_throw = false;
8413ca87 797 l->can_free = false;
c47d0034 798 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
67348ccc 799 flags_from_decl_or_type (fn->decl),
d52f5295 800 fn->cannot_return_p ());
c47d0034 801
67348ccc 802 if (fn->thunk.thunk_p || fn->alias)
c47d0034
JH
803 {
804 /* Thunk gets propagated through, so nothing interesting happens. */
805 gcc_assert (ipa);
6cbde2e3
BE
806 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
807 l->pure_const_state = IPA_NEITHER;
c47d0034
JH
808 return l;
809 }
ea900239
DB
810
811 if (dump_file)
812 {
b8698a0f 813 fprintf (dump_file, "\n\n local analysis of %s\n ",
fec39fa6 814 fn->name ());
ea900239 815 }
b8698a0f 816
33977f81 817 push_cfun (DECL_STRUCT_FUNCTION (decl));
b8698a0f 818
11cd3bed 819 FOR_EACH_BB_FN (this_block, cfun)
ea900239 820 {
33977f81
JH
821 gimple_stmt_iterator gsi;
822 struct walk_stmt_info wi;
ea900239 823
c3284718 824 memset (&wi, 0, sizeof (wi));
33977f81
JH
825 for (gsi = gsi_start_bb (this_block);
826 !gsi_end_p (gsi);
827 gsi_next (&gsi))
ea900239 828 {
33977f81 829 check_stmt (&gsi, l, ipa);
8413ca87
JJ
830 if (l->pure_const_state == IPA_NEITHER
831 && l->looping
832 && l->can_throw
833 && l->can_free)
33977f81 834 goto end;
ea900239
DB
835 }
836 }
13335ae6
AP
837
838end:
33977f81
JH
839 if (l->pure_const_state != IPA_NEITHER)
840 {
841 /* Const functions cannot have back edges (an
842 indication of possible infinite loop side
843 effect. */
844 if (mark_dfs_back_edges ())
2de58650 845 {
7351bcaa 846 /* Preheaders are needed for SCEV to work.
61502ca8 847 Simple latches and recorded exits improve chances that loop will
0d5049b2
RB
848 proved to be finite in testcases such as in loop-15.c
849 and loop-24.c */
850 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
851 | LOOPS_HAVE_SIMPLE_LATCHES
7351bcaa 852 | LOOPS_HAVE_RECORDED_EXITS);
2de58650
JH
853 if (dump_file && (dump_flags & TDF_DETAILS))
854 flow_loops_dump (dump_file, NULL, 0);
855 if (mark_irreducible_loops ())
856 {
857 if (dump_file)
858 fprintf (dump_file, " has irreducible loops\n");
859 l->looping = true;
860 }
b8698a0f 861 else
2de58650 862 {
2de58650
JH
863 struct loop *loop;
864 scev_initialize ();
f0bd40b1 865 FOR_EACH_LOOP (loop, 0)
2de58650
JH
866 if (!finite_loop_p (loop))
867 {
868 if (dump_file)
0d5049b2
RB
869 fprintf (dump_file, " can not prove finiteness of "
870 "loop %i\n", loop->num);
2de58650 871 l->looping =true;
f0bd40b1 872 break;
2de58650
JH
873 }
874 scev_finalize ();
875 }
876 loop_optimizer_finalize ();
877 }
33977f81
JH
878 }
879
f10ea640
JH
880 if (dump_file && (dump_flags & TDF_DETAILS))
881 fprintf (dump_file, " checking previously known:");
f10ea640
JH
882
883 better_state (&l->pure_const_state, &l->looping,
884 l->state_previously_known,
885 l->looping_previously_known);
33977f81
JH
886 if (TREE_NOTHROW (decl))
887 l->can_throw = false;
888
889 pop_cfun ();
13335ae6
AP
890 if (dump_file)
891 {
33977f81
JH
892 if (l->looping)
893 fprintf (dump_file, "Function is locally looping.\n");
894 if (l->can_throw)
895 fprintf (dump_file, "Function is locally throwing.\n");
896 if (l->pure_const_state == IPA_CONST)
897 fprintf (dump_file, "Function is locally const.\n");
898 if (l->pure_const_state == IPA_PURE)
899 fprintf (dump_file, "Function is locally pure.\n");
8413ca87
JJ
900 if (l->can_free)
901 fprintf (dump_file, "Function can locally free.\n");
13335ae6 902 }
33977f81 903 return l;
ea900239
DB
904}
905
129a37fc
JH
906/* Called when new function is inserted to callgraph late. */
907static void
908add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
909{
d52f5295 910 if (node->get_availability () < AVAIL_INTERPOSABLE)
e2c9111c 911 return;
129a37fc
JH
912 /* There are some shared nodes, in particular the initializers on
913 static declarations. We do not need to scan them more than once
914 since all we would be interested in are the addressof
915 operations. */
2bf86c84
JH
916 if (node->get_availability () > AVAIL_INTERPOSABLE
917 && opt_for_fn (node->decl, flag_ipa_pure_const))
5dc16b19 918 set_function_state (node, analyze_function (node, true));
129a37fc
JH
919}
920
e2c9111c
JH
921/* Called when new clone is inserted to callgraph late. */
922
923static void
924duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
925 void *data ATTRIBUTE_UNUSED)
926{
97d45cef 927 if (has_function_state (src))
e2c9111c
JH
928 {
929 funct_state l = XNEW (struct funct_state_d);
97d45cef 930 gcc_assert (!has_function_state (dst));
e2c9111c
JH
931 memcpy (l, get_function_state (src), sizeof (*l));
932 set_function_state (dst, l);
933 }
934}
935
936/* Called when new clone is inserted to callgraph late. */
937
938static void
939remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
940{
97d45cef 941 if (has_function_state (node))
e2c9111c 942 {
37512c66
JH
943 funct_state l = get_function_state (node);
944 if (l != &varying_state)
945 free (l);
e2c9111c
JH
946 set_function_state (node, NULL);
947 }
948}
949
ea900239 950\f
3edf64aa
DM
951void
952pass_ipa_pure_const::
d7f09764 953register_hooks (void)
ea900239 954{
d7f09764
DN
955 if (init_p)
956 return;
957
958 init_p = true;
ea900239 959
e2c9111c 960 node_removal_hook_holder =
3dafb85c 961 symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
e2c9111c 962 node_duplication_hook_holder =
3dafb85c 963 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
129a37fc 964 function_insertion_hook_holder =
3dafb85c 965 symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
d7f09764
DN
966}
967
968
969/* Analyze each function in the cgraph to see if it is locally PURE or
970 CONST. */
971
b8698a0f 972static void
651df1b2 973pure_const_generate_summary (void)
d7f09764
DN
974{
975 struct cgraph_node *node;
976
3edf64aa
DM
977 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
978 pass->register_hooks ();
d7f09764 979
b8698a0f 980 /* Process all of the functions.
ea900239 981
d52f5295 982 We process AVAIL_INTERPOSABLE functions. We can not use the results
c59f5d1b 983 by default, but the info can be used at LTO with -fwhole-program or
61502ca8 984 when function got cloned and the clone is AVAILABLE. */
c59f5d1b 985
65c70e6b 986 FOR_EACH_DEFINED_FUNCTION (node)
2bf86c84
JH
987 if (node->get_availability () >= AVAIL_INTERPOSABLE
988 && opt_for_fn (node->decl, flag_ipa_pure_const))
33977f81 989 set_function_state (node, analyze_function (node, true));
812dbce5
JH
990}
991
d7f09764
DN
992
993/* Serialize the ipa info for lto. */
994
995static void
f27c1867 996pure_const_write_summary (void)
d7f09764
DN
997{
998 struct cgraph_node *node;
999 struct lto_simple_output_block *ob
1000 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1001 unsigned int count = 0;
f27c1867
JH
1002 lto_symtab_encoder_iterator lsei;
1003 lto_symtab_encoder_t encoder;
d7f09764 1004
f27c1867
JH
1005 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1006
1007 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1008 lsei_next_function_in_partition (&lsei))
d7f09764 1009 {
f27c1867 1010 node = lsei_cgraph_node (lsei);
67348ccc 1011 if (node->definition && has_function_state (node))
d7f09764
DN
1012 count++;
1013 }
b8698a0f 1014
412288f1 1015 streamer_write_uhwi_stream (ob->main_stream, count);
b8698a0f 1016
d7f09764 1017 /* Process all of the functions. */
f27c1867
JH
1018 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1019 lsei_next_function_in_partition (&lsei))
d7f09764 1020 {
f27c1867 1021 node = lsei_cgraph_node (lsei);
67348ccc 1022 if (node->definition && has_function_state (node))
d7f09764 1023 {
2465dcc2 1024 struct bitpack_d bp;
d7f09764
DN
1025 funct_state fs;
1026 int node_ref;
7380e6ef 1027 lto_symtab_encoder_t encoder;
b8698a0f 1028
d7f09764
DN
1029 fs = get_function_state (node);
1030
7380e6ef 1031 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 1032 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 1033 streamer_write_uhwi_stream (ob->main_stream, node_ref);
b8698a0f 1034
d7f09764
DN
1035 /* Note that flags will need to be read in the opposite
1036 order as we are pushing the bitflags into FLAGS. */
2465dcc2
RG
1037 bp = bitpack_create (ob->main_stream);
1038 bp_pack_value (&bp, fs->pure_const_state, 2);
1039 bp_pack_value (&bp, fs->state_previously_known, 2);
1040 bp_pack_value (&bp, fs->looping_previously_known, 1);
1041 bp_pack_value (&bp, fs->looping, 1);
1042 bp_pack_value (&bp, fs->can_throw, 1);
8413ca87 1043 bp_pack_value (&bp, fs->can_free, 1);
412288f1 1044 streamer_write_bitpack (&bp);
d7f09764
DN
1045 }
1046 }
1047
1048 lto_destroy_simple_output_block (ob);
1049}
1050
1051
1052/* Deserialize the ipa info for lto. */
1053
b8698a0f 1054static void
d7f09764
DN
1055pure_const_read_summary (void)
1056{
1057 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1058 struct lto_file_decl_data *file_data;
1059 unsigned int j = 0;
1060
3edf64aa
DM
1061 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1062 pass->register_hooks ();
1063
d7f09764
DN
1064 while ((file_data = file_data_vec[j++]))
1065 {
1066 const char *data;
1067 size_t len;
1068 struct lto_input_block *ib
b8698a0f
L
1069 = lto_create_simple_input_block (file_data,
1070 LTO_section_ipa_pure_const,
d7f09764
DN
1071 &data, &len);
1072 if (ib)
1073 {
1074 unsigned int i;
412288f1 1075 unsigned int count = streamer_read_uhwi (ib);
d7f09764
DN
1076
1077 for (i = 0; i < count; i++)
1078 {
1079 unsigned int index;
1080 struct cgraph_node *node;
2465dcc2 1081 struct bitpack_d bp;
d7f09764 1082 funct_state fs;
7380e6ef 1083 lto_symtab_encoder_t encoder;
d7f09764
DN
1084
1085 fs = XCNEW (struct funct_state_d);
412288f1 1086 index = streamer_read_uhwi (ib);
7380e6ef 1087 encoder = file_data->symtab_node_encoder;
d52f5295
ML
1088 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1089 index));
d7f09764
DN
1090 set_function_state (node, fs);
1091
1092 /* Note that the flags must be read in the opposite
1093 order in which they were written (the bitflags were
1094 pushed into FLAGS). */
412288f1 1095 bp = streamer_read_bitpack (ib);
d7f09764 1096 fs->pure_const_state
2465dcc2 1097 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
d7f09764 1098 fs->state_previously_known
2465dcc2
RG
1099 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1100 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1101 fs->looping = bp_unpack_value (&bp, 1);
1102 fs->can_throw = bp_unpack_value (&bp, 1);
8413ca87 1103 fs->can_free = bp_unpack_value (&bp, 1);
d56026c2
JH
1104 if (dump_file)
1105 {
67348ccc 1106 int flags = flags_from_decl_or_type (node->decl);
d56026c2 1107 fprintf (dump_file, "Read info for %s/%i ",
fec39fa6 1108 node->name (),
67348ccc 1109 node->order);
d56026c2
JH
1110 if (flags & ECF_CONST)
1111 fprintf (dump_file, " const");
1112 if (flags & ECF_PURE)
1113 fprintf (dump_file, " pure");
1114 if (flags & ECF_NOTHROW)
1115 fprintf (dump_file, " nothrow");
1116 fprintf (dump_file, "\n pure const state: %s\n",
1117 pure_const_names[fs->pure_const_state]);
1118 fprintf (dump_file, " previously known state: %s\n",
1119 pure_const_names[fs->looping_previously_known]);
1120 if (fs->looping)
1121 fprintf (dump_file," function is locally looping\n");
1122 if (fs->looping_previously_known)
1123 fprintf (dump_file," function is previously known looping\n");
1124 if (fs->can_throw)
1125 fprintf (dump_file," function is locally throwing\n");
8413ca87
JJ
1126 if (fs->can_free)
1127 fprintf (dump_file," function can locally free\n");
d56026c2 1128 }
d7f09764
DN
1129 }
1130
b8698a0f
L
1131 lto_destroy_simple_input_block (file_data,
1132 LTO_section_ipa_pure_const,
d7f09764
DN
1133 ib, data, len);
1134 }
1135 }
1136}
1137
1138
2505c5ed
JH
1139static bool
1140ignore_edge (struct cgraph_edge *e)
1141{
1142 return (!e->can_throw_external);
1143}
1144
fede8efa 1145/* Return true if NODE is self recursive function.
fc11f321
JH
1146 Indirectly recursive functions appears as non-trivial strongly
1147 connected components, so we need to care about self recursion
1148 only. */
03ec7d01
JH
1149
1150static bool
1151self_recursive_p (struct cgraph_node *node)
1152{
1153 struct cgraph_edge *e;
1154 for (e = node->callees; e; e = e->next_callee)
d52f5295 1155 if (e->callee->function_symbol () == node)
03ec7d01
JH
1156 return true;
1157 return false;
1158}
1159
17e0fc92
JH
1160/* Return true if N is cdtor that is not const or pure. In this case we may
1161 need to remove unreachable function if it is marked const/pure. */
1162
1163static bool
1164cdtor_p (cgraph_node *n, void *)
1165{
1166 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1167 return !TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl);
1168 return false;
1169}
1170
15e80fc3
JH
1171/* Produce transitive closure over the callgraph and compute pure/const
1172 attributes. */
f10ea640 1173
17e0fc92 1174static bool
15e80fc3 1175propagate_pure_const (void)
812dbce5
JH
1176{
1177 struct cgraph_node *node;
1178 struct cgraph_node *w;
1179 struct cgraph_node **order =
3dafb85c 1180 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
812dbce5
JH
1181 int order_pos;
1182 int i;
1183 struct ipa_dfs_info * w_info;
17e0fc92 1184 bool remove_p = false;
812dbce5 1185
af8bca3c 1186 order_pos = ipa_reduced_postorder (order, true, false, NULL);
ea900239
DB
1187 if (dump_file)
1188 {
d52f5295 1189 cgraph_node::dump_cgraph (dump_file);
40a7fe1e 1190 ipa_print_order (dump_file, "reduced", order, order_pos);
ea900239
DB
1191 }
1192
073a8998 1193 /* Propagate the local information through the call graph to produce
ea900239
DB
1194 the global information. All the nodes within a cycle will have
1195 the same info so we collapse cycles first. Then we can do the
1196 propagation in one pass from the leaves to the roots. */
1197 for (i = 0; i < order_pos; i++ )
1198 {
1199 enum pure_const_state_e pure_const_state = IPA_CONST;
becfd6e5 1200 bool looping = false;
17541d72 1201 int count = 0;
ea900239
DB
1202 node = order[i];
1203
67348ccc 1204 if (node->alias)
71fb4f92
JH
1205 continue;
1206
d56026c2
JH
1207 if (dump_file && (dump_flags & TDF_DETAILS))
1208 fprintf (dump_file, "Starting cycle\n");
1209
ea900239
DB
1210 /* Find the worst state for any node in the cycle. */
1211 w = node;
f10ea640 1212 while (w && pure_const_state != IPA_NEITHER)
ea900239 1213 {
33977f81 1214 struct cgraph_edge *e;
f10ea640
JH
1215 struct cgraph_edge *ie;
1216 int i;
d122681a 1217 struct ipa_ref *ref = NULL;
f10ea640 1218
ea900239 1219 funct_state w_l = get_function_state (w);
d56026c2
JH
1220 if (dump_file && (dump_flags & TDF_DETAILS))
1221 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
fec39fa6 1222 w->name (),
67348ccc 1223 w->order,
d56026c2
JH
1224 pure_const_names[w_l->pure_const_state],
1225 w_l->looping);
ea900239 1226
f10ea640
JH
1227 /* First merge in function body properties. */
1228 worse_state (&pure_const_state, &looping,
1229 w_l->pure_const_state, w_l->looping);
1230 if (pure_const_state == IPA_NEITHER)
1231 break;
1232
1233 /* For overwritable nodes we can not assume anything. */
d52f5295 1234 if (w->get_availability () == AVAIL_INTERPOSABLE)
c59f5d1b 1235 {
f10ea640
JH
1236 worse_state (&pure_const_state, &looping,
1237 w_l->state_previously_known,
1238 w_l->looping_previously_known);
d56026c2
JH
1239 if (dump_file && (dump_flags & TDF_DETAILS))
1240 {
1241 fprintf (dump_file,
1242 " Overwritable. state %s looping %i\n",
1243 pure_const_names[w_l->state_previously_known],
1244 w_l->looping_previously_known);
1245 }
f10ea640 1246 break;
c59f5d1b 1247 }
becfd6e5 1248
33977f81
JH
1249 count++;
1250
f10ea640
JH
1251 /* We consider recursive cycles as possibly infinite.
1252 This might be relaxed since infinite recursion leads to stack
1253 overflow. */
33977f81
JH
1254 if (count > 1)
1255 looping = true;
b8698a0f 1256
f10ea640 1257 /* Now walk the edges and merge in callee properties. */
b8698a0f 1258 for (e = w->callees; e; e = e->next_callee)
ea900239 1259 {
fede8efa 1260 enum availability avail;
6cbde2e3
BE
1261 struct cgraph_node *y = e->callee->
1262 function_or_virtual_thunk_symbol (&avail);
d56026c2
JH
1263 enum pure_const_state_e edge_state = IPA_CONST;
1264 bool edge_looping = false;
17541d72 1265
d56026c2
JH
1266 if (dump_file && (dump_flags & TDF_DETAILS))
1267 {
1268 fprintf (dump_file,
1269 " Call to %s/%i",
fec39fa6 1270 e->callee->name (),
67348ccc 1271 e->callee->order);
d56026c2 1272 }
d52f5295 1273 if (avail > AVAIL_INTERPOSABLE)
ea900239 1274 {
33977f81 1275 funct_state y_l = get_function_state (y);
d56026c2
JH
1276 if (dump_file && (dump_flags & TDF_DETAILS))
1277 {
1278 fprintf (dump_file,
1279 " state:%s looping:%i\n",
1280 pure_const_names[y_l->pure_const_state],
1281 y_l->looping);
1282 }
da8c7675 1283 if (y_l->pure_const_state > IPA_PURE
3dafb85c 1284 && e->cannot_lead_to_return_p ())
d56026c2
JH
1285 {
1286 if (dump_file && (dump_flags & TDF_DETAILS))
f10ea640
JH
1287 fprintf (dump_file,
1288 " Ignoring side effects"
1289 " -> pure, looping\n");
d56026c2
JH
1290 edge_state = IPA_PURE;
1291 edge_looping = true;
1292 }
1293 else
1294 {
1295 edge_state = y_l->pure_const_state;
1296 edge_looping = y_l->looping;
1297 }
ea900239 1298 }
9e97ff61 1299 else if (special_builtin_state (&edge_state, &edge_looping,
67348ccc 1300 y->decl))
0a42aa4e 1301 ;
c59f5d1b 1302 else
f10ea640 1303 state_from_flags (&edge_state, &edge_looping,
67348ccc 1304 flags_from_decl_or_type (y->decl),
3dafb85c 1305 e->cannot_lead_to_return_p ());
f10ea640
JH
1306
1307 /* Merge the results with what we already know. */
1308 better_state (&edge_state, &edge_looping,
1309 w_l->state_previously_known,
1310 w_l->looping_previously_known);
1311 worse_state (&pure_const_state, &looping,
1312 edge_state, edge_looping);
1313 if (pure_const_state == IPA_NEITHER)
1314 break;
1315 }
1316 if (pure_const_state == IPA_NEITHER)
1317 break;
c59f5d1b 1318
f10ea640 1319 /* Now process the indirect call. */
61e374ab 1320 for (ie = w->indirect_calls; ie; ie = ie->next_callee)
f10ea640
JH
1321 {
1322 enum pure_const_state_e edge_state = IPA_CONST;
1323 bool edge_looping = false;
da8c7675 1324
f10ea640
JH
1325 if (dump_file && (dump_flags & TDF_DETAILS))
1326 fprintf (dump_file, " Indirect call");
1327 state_from_flags (&edge_state, &edge_looping,
1328 ie->indirect_info->ecf_flags,
3dafb85c 1329 ie->cannot_lead_to_return_p ());
f10ea640
JH
1330 /* Merge the results with what we already know. */
1331 better_state (&edge_state, &edge_looping,
1332 w_l->state_previously_known,
1333 w_l->looping_previously_known);
1334 worse_state (&pure_const_state, &looping,
1335 edge_state, edge_looping);
d56026c2
JH
1336 if (pure_const_state == IPA_NEITHER)
1337 break;
ea900239 1338 }
f10ea640
JH
1339 if (pure_const_state == IPA_NEITHER)
1340 break;
1341
1342 /* And finally all loads and stores. */
d122681a 1343 for (i = 0; w->iterate_reference (i, ref); i++)
f10ea640
JH
1344 {
1345 enum pure_const_state_e ref_state = IPA_CONST;
1346 bool ref_looping = false;
1347 switch (ref->use)
1348 {
1349 case IPA_REF_LOAD:
1350 /* readonly reads are safe. */
d122681a 1351 if (TREE_READONLY (ref->referred->decl))
f10ea640
JH
1352 break;
1353 if (dump_file && (dump_flags & TDF_DETAILS))
1354 fprintf (dump_file, " nonreadonly global var read\n");
1355 ref_state = IPA_PURE;
1356 break;
1357 case IPA_REF_STORE:
d122681a 1358 if (ref->cannot_lead_to_return ())
f10ea640
JH
1359 break;
1360 ref_state = IPA_NEITHER;
1361 if (dump_file && (dump_flags & TDF_DETAILS))
1362 fprintf (dump_file, " global var write\n");
1363 break;
1364 case IPA_REF_ADDR:
d5e254e1 1365 case IPA_REF_CHKP:
f10ea640 1366 break;
7d2268ea
MJ
1367 default:
1368 gcc_unreachable ();
f10ea640
JH
1369 }
1370 better_state (&ref_state, &ref_looping,
1371 w_l->state_previously_known,
1372 w_l->looping_previously_known);
1373 worse_state (&pure_const_state, &looping,
1374 ref_state, ref_looping);
1375 if (pure_const_state == IPA_NEITHER)
1376 break;
1377 }
67348ccc 1378 w_info = (struct ipa_dfs_info *) w->aux;
ea900239
DB
1379 w = w_info->next_cycle;
1380 }
d56026c2
JH
1381 if (dump_file && (dump_flags & TDF_DETAILS))
1382 fprintf (dump_file, "Result %s looping %i\n",
1383 pure_const_names [pure_const_state],
1384 looping);
ea900239 1385
8413ca87
JJ
1386 /* Find the worst state of can_free for any node in the cycle. */
1387 bool can_free = false;
1388 w = node;
1389 while (w && !can_free)
1390 {
1391 struct cgraph_edge *e;
1392 funct_state w_l = get_function_state (w);
1393
1394 if (w_l->can_free
1395 || w->get_availability () == AVAIL_INTERPOSABLE
1396 || w->indirect_calls)
1397 can_free = true;
1398
1399 for (e = w->callees; e && !can_free; e = e->next_callee)
1400 {
1401 enum availability avail;
6cbde2e3
BE
1402 struct cgraph_node *y = e->callee->
1403 function_or_virtual_thunk_symbol (&avail);
8413ca87
JJ
1404
1405 if (avail > AVAIL_INTERPOSABLE)
1406 can_free = get_function_state (y)->can_free;
1407 else
1408 can_free = true;
1409 }
1410 w_info = (struct ipa_dfs_info *) w->aux;
1411 w = w_info->next_cycle;
1412 }
1413
ea900239
DB
1414 /* Copy back the region's pure_const_state which is shared by
1415 all nodes in the region. */
1416 w = node;
1417 while (w)
1418 {
1419 funct_state w_l = get_function_state (w);
33977f81
JH
1420 enum pure_const_state_e this_state = pure_const_state;
1421 bool this_looping = looping;
ea900239 1422
8413ca87
JJ
1423 w_l->can_free = can_free;
1424 w->nonfreeing_fn = !can_free;
1425 if (!can_free && dump_file)
1426 fprintf (dump_file, "Function found not to call free: %s\n",
1427 w->name ());
1428
f87c9042
JH
1429 if (w_l->state_previously_known != IPA_NEITHER
1430 && this_state > w_l->state_previously_known)
da8c7675
JH
1431 {
1432 this_state = w_l->state_previously_known;
1433 this_looping |= w_l->looping_previously_known;
1434 }
03ec7d01
JH
1435 if (!this_looping && self_recursive_p (w))
1436 this_looping = true;
f87c9042
JH
1437 if (!w_l->looping_previously_known)
1438 this_looping = false;
812dbce5 1439
33977f81
JH
1440 /* All nodes within a cycle share the same info. */
1441 w_l->pure_const_state = this_state;
1442 w_l->looping = this_looping;
1443
d7636f56
JH
1444 /* Inline clones share declaration with their offline copies;
1445 do not modify their declarations since the offline copy may
1446 be different. */
1447 if (!w->global.inlined_to)
1448 switch (this_state)
1449 {
1450 case IPA_CONST:
1451 if (!TREE_READONLY (w->decl))
1452 {
1453 warn_function_const (w->decl, !this_looping);
1454 if (dump_file)
1455 fprintf (dump_file, "Function found to be %sconst: %s\n",
1456 this_looping ? "looping " : "",
1457 w->name ());
1458 }
17e0fc92
JH
1459 remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
1460 NULL, true);
d52f5295 1461 w->set_const_flag (true, this_looping);
d7636f56 1462 break;
b8698a0f 1463
d7636f56
JH
1464 case IPA_PURE:
1465 if (!DECL_PURE_P (w->decl))
1466 {
1467 warn_function_pure (w->decl, !this_looping);
1468 if (dump_file)
1469 fprintf (dump_file, "Function found to be %spure: %s\n",
1470 this_looping ? "looping " : "",
1471 w->name ());
1472 }
17e0fc92
JH
1473 remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
1474 NULL, true);
d52f5295 1475 w->set_pure_flag (true, this_looping);
d7636f56 1476 break;
b8698a0f 1477
d7636f56
JH
1478 default:
1479 break;
1480 }
67348ccc 1481 w_info = (struct ipa_dfs_info *) w->aux;
2505c5ed
JH
1482 w = w_info->next_cycle;
1483 }
1484 }
1485
af8bca3c 1486 ipa_free_postorder_info ();
15e80fc3 1487 free (order);
17e0fc92 1488 return remove_p;
15e80fc3
JH
1489}
1490
1491/* Produce transitive closure over the callgraph and compute nothrow
1492 attributes. */
1493
1494static void
1495propagate_nothrow (void)
1496{
1497 struct cgraph_node *node;
1498 struct cgraph_node *w;
1499 struct cgraph_node **order =
3dafb85c 1500 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
15e80fc3
JH
1501 int order_pos;
1502 int i;
1503 struct ipa_dfs_info * w_info;
1504
af8bca3c 1505 order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
2505c5ed
JH
1506 if (dump_file)
1507 {
d52f5295 1508 cgraph_node::dump_cgraph (dump_file);
af8bca3c 1509 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
2505c5ed 1510 }
15e80fc3 1511
073a8998 1512 /* Propagate the local information through the call graph to produce
2505c5ed
JH
1513 the global information. All the nodes within a cycle will have
1514 the same info so we collapse cycles first. Then we can do the
1515 propagation in one pass from the leaves to the roots. */
1516 for (i = 0; i < order_pos; i++ )
1517 {
1518 bool can_throw = false;
1519 node = order[i];
1520
67348ccc 1521 if (node->alias)
71fb4f92
JH
1522 continue;
1523
2505c5ed
JH
1524 /* Find the worst state for any node in the cycle. */
1525 w = node;
abb50207 1526 while (w && !can_throw)
2505c5ed 1527 {
f10ea640 1528 struct cgraph_edge *e, *ie;
2505c5ed
JH
1529 funct_state w_l = get_function_state (w);
1530
c59f5d1b 1531 if (w_l->can_throw
d52f5295 1532 || w->get_availability () == AVAIL_INTERPOSABLE)
2505c5ed
JH
1533 can_throw = true;
1534
abb50207 1535 for (e = w->callees; e && !can_throw; e = e->next_callee)
2505c5ed 1536 {
fede8efa 1537 enum availability avail;
6cbde2e3
BE
1538 struct cgraph_node *y = e->callee->
1539 function_or_virtual_thunk_symbol (&avail);
2505c5ed 1540
d52f5295 1541 if (avail > AVAIL_INTERPOSABLE)
2505c5ed
JH
1542 {
1543 funct_state y_l = get_function_state (y);
1544
67348ccc 1545 if (y_l->can_throw && !TREE_NOTHROW (w->decl)
2505c5ed
JH
1546 && e->can_throw_external)
1547 can_throw = true;
1548 }
67348ccc 1549 else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
c59f5d1b 1550 can_throw = true;
2505c5ed 1551 }
abb50207 1552 for (ie = w->indirect_calls; ie && !can_throw; ie = ie->next_callee)
f10ea640 1553 if (ie->can_throw_external)
abb50207 1554 can_throw = true;
67348ccc 1555 w_info = (struct ipa_dfs_info *) w->aux;
2505c5ed
JH
1556 w = w_info->next_cycle;
1557 }
1558
1559 /* Copy back the region's pure_const_state which is shared by
1560 all nodes in the region. */
1561 w = node;
1562 while (w)
1563 {
b6fa5b01 1564 funct_state w_l = get_function_state (w);
67348ccc 1565 if (!can_throw && !TREE_NOTHROW (w->decl))
33977f81 1566 {
d7636f56
JH
1567 /* Inline clones share declaration with their offline copies;
1568 do not modify their declarations since the offline copy may
1569 be different. */
1570 if (!w->global.inlined_to)
1571 {
d52f5295 1572 w->set_nothrow_flag (true);
d7636f56
JH
1573 if (dump_file)
1574 fprintf (dump_file, "Function found to be nothrow: %s\n",
1575 w->name ());
1576 }
ea900239 1577 }
67348ccc 1578 else if (can_throw && !TREE_NOTHROW (w->decl))
b6fa5b01 1579 w_l->can_throw = true;
67348ccc 1580 w_info = (struct ipa_dfs_info *) w->aux;
ea900239
DB
1581 w = w_info->next_cycle;
1582 }
1583 }
1584
af8bca3c 1585 ipa_free_postorder_info ();
ea900239 1586 free (order);
15e80fc3
JH
1587}
1588
1589
1590/* Produce the global information by preforming a transitive closure
1591 on the local information that was produced by generate_summary. */
1592
3edf64aa
DM
1593unsigned int
1594pass_ipa_pure_const::
1595execute (function *)
15e80fc3
JH
1596{
1597 struct cgraph_node *node;
17e0fc92 1598 bool remove_p;
15e80fc3 1599
3dafb85c
ML
1600 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
1601 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
1602 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
15e80fc3
JH
1603
1604 /* Nothrow makes more function to not lead to return and improve
1605 later analysis. */
1606 propagate_nothrow ();
17e0fc92 1607 remove_p = propagate_pure_const ();
15e80fc3
JH
1608
1609 /* Cleanup. */
307f83a3 1610 FOR_EACH_FUNCTION (node)
15e80fc3
JH
1611 if (has_function_state (node))
1612 free (get_function_state (node));
9771b263 1613 funct_state_vec.release ();
17e0fc92 1614 return remove_p ? TODO_remove_functions : 0;
ea900239
DB
1615}
1616
1617static bool
1618gate_pure_const (void)
1619{
2bf86c84 1620 return flag_ipa_pure_const || in_lto_p;
ea900239
DB
1621}
1622
3edf64aa
DM
1623pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
1624 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
1625 pure_const_generate_summary, /* generate_summary */
1626 pure_const_write_summary, /* write_summary */
1627 pure_const_read_summary, /* read_summary */
1628 NULL, /* write_optimization_summary */
1629 NULL, /* read_optimization_summary */
1630 NULL, /* stmt_fixup */
1631 0, /* function_transform_todo_flags_start */
1632 NULL, /* function_transform */
1633 NULL), /* variable_transform */
1634 init_p(false),
1635 function_insertion_hook_holder(NULL),
1636 node_duplication_hook_holder(NULL),
1637 node_removal_hook_holder(NULL)
ea900239 1638{
3edf64aa 1639}
27a4cd48
DM
1640
1641ipa_opt_pass_d *
1642make_pass_ipa_pure_const (gcc::context *ctxt)
1643{
1644 return new pass_ipa_pure_const (ctxt);
1645}
1646
5dc16b19 1647/* Return true if function should be skipped for local pure const analysis. */
33977f81 1648
5dc16b19
MLI
1649static bool
1650skip_function_for_local_pure_const (struct cgraph_node *node)
33977f81 1651{
33977f81
JH
1652 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1653 we must not promote functions that are called by already processed functions. */
1654
1655 if (function_called_by_processed_nodes_p ())
1656 {
1657 if (dump_file)
1658 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
5dc16b19 1659 return true;
33977f81 1660 }
d52f5295 1661 if (node->get_availability () <= AVAIL_INTERPOSABLE)
33977f81
JH
1662 {
1663 if (dump_file)
61502ca8 1664 fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
5dc16b19 1665 return true;
33977f81 1666 }
5dc16b19
MLI
1667 return false;
1668}
1669
1670/* Simple local pass for pure const discovery reusing the analysis from
1671 ipa_pure_const. This pass is effective when executed together with
1672 other optimization passes in early optimization pass queue. */
33977f81 1673
be55bfe6
TS
1674namespace {
1675
1676const pass_data pass_data_local_pure_const =
1677{
1678 GIMPLE_PASS, /* type */
1679 "local-pure-const", /* name */
1680 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
1681 TV_IPA_PURE_CONST, /* tv_id */
1682 0, /* properties_required */
1683 0, /* properties_provided */
1684 0, /* properties_destroyed */
1685 0, /* todo_flags_start */
1686 0, /* todo_flags_finish */
1687};
1688
1689class pass_local_pure_const : public gimple_opt_pass
1690{
1691public:
1692 pass_local_pure_const (gcc::context *ctxt)
1693 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
1694 {}
1695
1696 /* opt_pass methods: */
1697 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
1698 virtual bool gate (function *) { return gate_pure_const (); }
1699 virtual unsigned int execute (function *);
1700
1701}; // class pass_local_pure_const
1702
1703unsigned int
1704pass_local_pure_const::execute (function *fun)
5dc16b19
MLI
1705{
1706 bool changed = false;
1707 funct_state l;
1708 bool skip;
1709 struct cgraph_node *node;
1710
d52f5295 1711 node = cgraph_node::get (current_function_decl);
5dc16b19
MLI
1712 skip = skip_function_for_local_pure_const (node);
1713 if (!warn_suggest_attribute_const
1714 && !warn_suggest_attribute_pure
1715 && skip)
1716 return 0;
73add7fe 1717
269c80f2
JJ
1718 l = analyze_function (node, false);
1719
1720 /* Do NORETURN discovery. */
73add7fe 1721 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
be55bfe6 1722 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
73add7fe 1723 {
be55bfe6 1724 warn_function_noreturn (fun->decl);
73add7fe 1725 if (dump_file)
be55bfe6
TS
1726 fprintf (dump_file, "Function found to be noreturn: %s\n",
1727 current_function_name ());
73add7fe
JH
1728
1729 /* Update declaration and reduce profile to executed once. */
1730 TREE_THIS_VOLATILE (current_function_decl) = 1;
1731 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
be55bfe6 1732 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
73add7fe
JH
1733
1734 changed = true;
1735 }
c59f5d1b 1736
33977f81
JH
1737 switch (l->pure_const_state)
1738 {
1739 case IPA_CONST:
1740 if (!TREE_READONLY (current_function_decl))
1741 {
5dc16b19
MLI
1742 warn_function_const (current_function_decl, !l->looping);
1743 if (!skip)
1744 {
d52f5295 1745 node->set_const_flag (true, l->looping);
5dc16b19
MLI
1746 changed = true;
1747 }
33977f81
JH
1748 if (dump_file)
1749 fprintf (dump_file, "Function found to be %sconst: %s\n",
1750 l->looping ? "looping " : "",
df92c640 1751 current_function_name ());
33977f81
JH
1752 }
1753 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1754 && !l->looping)
1755 {
5dc16b19
MLI
1756 if (!skip)
1757 {
d52f5295 1758 node->set_const_flag (true, false);
5dc16b19
MLI
1759 changed = true;
1760 }
33977f81
JH
1761 if (dump_file)
1762 fprintf (dump_file, "Function found to be non-looping: %s\n",
df92c640 1763 current_function_name ());
33977f81
JH
1764 }
1765 break;
1766
1767 case IPA_PURE:
5dc16b19 1768 if (!DECL_PURE_P (current_function_decl))
33977f81 1769 {
5dc16b19
MLI
1770 if (!skip)
1771 {
d52f5295 1772 node->set_pure_flag (true, l->looping);
5dc16b19
MLI
1773 changed = true;
1774 }
1775 warn_function_pure (current_function_decl, !l->looping);
33977f81
JH
1776 if (dump_file)
1777 fprintf (dump_file, "Function found to be %spure: %s\n",
1778 l->looping ? "looping " : "",
df92c640 1779 current_function_name ());
33977f81
JH
1780 }
1781 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1782 && !l->looping)
1783 {
5dc16b19
MLI
1784 if (!skip)
1785 {
d52f5295 1786 node->set_pure_flag (true, false);
5dc16b19
MLI
1787 changed = true;
1788 }
33977f81
JH
1789 if (dump_file)
1790 fprintf (dump_file, "Function found to be non-looping: %s\n",
df92c640 1791 current_function_name ());
33977f81
JH
1792 }
1793 break;
1794
1795 default:
1796 break;
1797 }
1798 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1799 {
d52f5295 1800 node->set_nothrow_flag (true);
33977f81
JH
1801 changed = true;
1802 if (dump_file)
1803 fprintf (dump_file, "Function found to be nothrow: %s\n",
df92c640 1804 current_function_name ());
33977f81 1805 }
04695783 1806 free (l);
33977f81
JH
1807 if (changed)
1808 return execute_fixup_cfg ();
1809 else
1810 return 0;
1811}
1812
27a4cd48
DM
1813} // anon namespace
1814
1815gimple_opt_pass *
1816make_pass_local_pure_const (gcc::context *ctxt)
1817{
1818 return new pass_local_pure_const (ctxt);
1819}
c1bf2a39
AM
1820
1821/* Emit noreturn warnings. */
1822
c1bf2a39
AM
1823namespace {
1824
1825const pass_data pass_data_warn_function_noreturn =
1826{
1827 GIMPLE_PASS, /* type */
1828 "*warn_function_noreturn", /* name */
1829 OPTGROUP_NONE, /* optinfo_flags */
c1bf2a39
AM
1830 TV_NONE, /* tv_id */
1831 PROP_cfg, /* properties_required */
1832 0, /* properties_provided */
1833 0, /* properties_destroyed */
1834 0, /* todo_flags_start */
1835 0, /* todo_flags_finish */
1836};
1837
1838class pass_warn_function_noreturn : public gimple_opt_pass
1839{
1840public:
1841 pass_warn_function_noreturn (gcc::context *ctxt)
1842 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
1843 {}
1844
1845 /* opt_pass methods: */
1a3d085c 1846 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
be55bfe6
TS
1847 virtual unsigned int execute (function *fun)
1848 {
1849 if (!TREE_THIS_VOLATILE (current_function_decl)
1850 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1851 warn_function_noreturn (current_function_decl);
1852 return 0;
1853 }
c1bf2a39
AM
1854
1855}; // class pass_warn_function_noreturn
1856
1857} // anon namespace
1858
1859gimple_opt_pass *
1860make_pass_warn_function_noreturn (gcc::context *ctxt)
1861{
1862 return new pass_warn_function_noreturn (ctxt);
1863}
38147a2a
JH
1864
1865/* Simple local pass for pure const discovery reusing the analysis from
1866 ipa_pure_const. This pass is effective when executed together with
1867 other optimization passes in early optimization pass queue. */
1868
1869namespace {
1870
1871const pass_data pass_data_nothrow =
1872{
1873 GIMPLE_PASS, /* type */
1874 "nothrow", /* name */
1875 OPTGROUP_NONE, /* optinfo_flags */
1876 TV_IPA_PURE_CONST, /* tv_id */
1877 0, /* properties_required */
1878 0, /* properties_provided */
1879 0, /* properties_destroyed */
1880 0, /* todo_flags_start */
1881 0, /* todo_flags_finish */
1882};
1883
1884class pass_nothrow : public gimple_opt_pass
1885{
1886public:
1887 pass_nothrow (gcc::context *ctxt)
1888 : gimple_opt_pass (pass_data_nothrow, ctxt)
1889 {}
1890
1891 /* opt_pass methods: */
1892 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
1893 virtual bool gate (function *) { return optimize; }
1894 virtual unsigned int execute (function *);
1895
1896}; // class pass_nothrow
1897
1898unsigned int
1899pass_nothrow::execute (function *)
1900{
1901 struct cgraph_node *node;
1902 basic_block this_block;
1903
1904 if (TREE_NOTHROW (current_function_decl))
1905 return 0;
1906
1907 node = cgraph_node::get (current_function_decl);
1908
1909 /* We run during lowering, we can not really use availability yet. */
1910 if (cgraph_node::get (current_function_decl)->get_availability ()
1911 <= AVAIL_INTERPOSABLE)
1912 {
1913 if (dump_file)
1914 fprintf (dump_file, "Function is interposable;"
1915 " not analyzing.\n");
1916 return true;
1917 }
1918
1919 FOR_EACH_BB_FN (this_block, cfun)
1920 {
1921 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
1922 !gsi_end_p (gsi);
1923 gsi_next (&gsi))
1924 if (stmt_can_throw_external (gsi_stmt (gsi)))
1925 {
1926 if (is_gimple_call (gsi_stmt (gsi)))
1927 {
1928 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
1929 if (callee_t && recursive_call_p (current_function_decl,
1930 callee_t))
1931 continue;
1932 }
1933
1934 if (dump_file)
1935 {
1936 fprintf (dump_file, "Statement can throw: ");
1937 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
1938 }
1939 return 0;
1940 }
1941 }
1942
1943 node->set_nothrow_flag (true);
1944 if (dump_file)
1945 fprintf (dump_file, "Function found to be nothrow: %s\n",
1946 current_function_name ());
1947 return 0;
1948}
1949
1950} // anon namespace
1951
1952gimple_opt_pass *
1953make_pass_nothrow (gcc::context *ctxt)
1954{
1955 return new pass_nothrow (ctxt);
1956}