]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-pure-const.c
Update copyright years.
[thirdparty/gcc.git] / gcc / ipa-pure-const.c
CommitLineData
f7d118a9 1/* Callgraph based analysis of static variables.
f1717362 2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
f7d118a9 3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
8c4c00c1 9Software Foundation; either version 3, or (at your option) any later
f7d118a9 10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
f7d118a9 20
f0b5f617 21/* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
f7d118a9 24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30/* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34#include "config.h"
35#include "system.h"
36#include "coretypes.h"
9ef16211 37#include "backend.h"
7c29e30e 38#include "target.h"
f7d118a9 39#include "tree.h"
9ef16211 40#include "gimple.h"
7c29e30e 41#include "tree-pass.h"
42#include "tree-streamer.h"
43#include "cgraph.h"
44#include "diagnostic.h"
9ed99284 45#include "calls.h"
94ea8568 46#include "cfganal.h"
bc61cadb 47#include "tree-eh.h"
dcf1a1ec 48#include "gimple-iterator.h"
49#include "gimple-walk.h"
073c1fd5 50#include "tree-cfg.h"
05d9c18a 51#include "tree-ssa-loop-niter.h"
f7d118a9 52#include "langhooks.h"
f7d118a9 53#include "ipa-utils.h"
ce084dfc 54#include "gimple-pretty-print.h"
c9263b6a 55#include "cfgloop.h"
56#include "tree-scalar-evolution.h"
2c06958d 57#include "intl.h"
58#include "opts.h"
f7d118a9 59
60/* Lattice values for const and pure functions. Everything starts out
61 being const, then may drop to pure and then neither depending on
62 what is found. */
63enum pure_const_state_e
64{
65 IPA_CONST,
66 IPA_PURE,
67 IPA_NEITHER
68};
69
fc94a528 70const char *pure_const_names[3] = {"const", "pure", "neither"};
71
cb886925 72/* Holder for the const_state. There is one of these per function
73 decl. */
48e1416a 74struct funct_state_d
f7d118a9 75{
cb886925 76 /* See above. */
f7d118a9 77 enum pure_const_state_e pure_const_state;
b5cebd44 78 /* What user set here; we can be always sure about this. */
48e1416a 79 enum pure_const_state_e state_previously_known;
80 bool looping_previously_known;
cb886925 81
82 /* True if the function could possibly infinite loop. There are a
83 lot of ways that this could be determined. We are pretty
84 conservative here. While it is possible to cse pure and const
85 calls, it is not legal to have dce get rid of the call if there
86 is a possibility that the call could infinite loop since this is
87 a behavioral change. */
9c2a0c05 88 bool looping;
cb886925 89
b5cebd44 90 bool can_throw;
04c849b3 91
92 /* If function can call free, munmap or otherwise make previously
93 non-trapping memory accesses trapping. */
94 bool can_free;
f7d118a9 95};
96
db86e424 97/* State used when we know nothing about function. */
98static struct funct_state_d varying_state
04c849b3 99 = { IPA_NEITHER, IPA_NEITHER, true, true, true, true };
db86e424 100
101
f7d118a9 102typedef struct funct_state_d * funct_state;
103
cb886925 104/* The storage of the funct_state is abstracted because there is the
105 possibility that it may be desirable to move this to the cgraph
48e1416a 106 local info. */
cb886925 107
108/* Array, indexed by cgraph node uid, of function states. */
109
f1f41a6c 110static vec<funct_state> funct_state_vec;
cb886925 111
415309e2 112static bool gate_pure_const (void);
113
7620bc82 114namespace {
115
116const pass_data pass_data_ipa_pure_const =
415309e2 117{
118 IPA_PASS, /* type */
119 "pure-const", /* name */
120 OPTGROUP_NONE, /* optinfo_flags */
121 TV_IPA_PURE_CONST, /* tv_id */
122 0, /* properties_required */
123 0, /* properties_provided */
124 0, /* properties_destroyed */
125 0, /* todo_flags_start */
126 0, /* todo_flags_finish */
127};
128
7620bc82 129class pass_ipa_pure_const : public ipa_opt_pass_d
415309e2 130{
131public:
132 pass_ipa_pure_const(gcc::context *ctxt);
133
134 /* opt_pass methods: */
135 bool gate (function *) { return gate_pure_const (); }
136 unsigned int execute (function *fun);
137
138 void register_hooks (void);
139
140private:
141 bool init_p;
142
143 /* Holders of ipa cgraph hooks: */
144 struct cgraph_node_hook_list *function_insertion_hook_holder;
145 struct cgraph_2node_hook_list *node_duplication_hook_holder;
146 struct cgraph_node_hook_list *node_removal_hook_holder;
147
148}; // class pass_ipa_pure_const
149
7620bc82 150} // anon namespace
151
2c06958d 152/* Try to guess if function body will always be visible to compiler
153 when compiling the call and whether compiler will be able
154 to propagate the information by itself. */
155
156static bool
157function_always_visible_to_compiler_p (tree decl)
158{
159 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
160}
161
162/* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
163 is true if the function is known to be finite. The diagnostic is
431205b7 164 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
2c06958d 165 OPTION, this function may initialize it and it is always returned
166 by the function. */
167
431205b7 168static hash_set<tree> *
2c06958d 169suggest_attribute (int option, tree decl, bool known_finite,
431205b7 170 hash_set<tree> *warned_about,
2c06958d 171 const char * attrib_name)
172{
2c5d2e39 173 if (!option_enabled (option, &global_options))
2c06958d 174 return warned_about;
175 if (TREE_THIS_VOLATILE (decl)
176 || (known_finite && function_always_visible_to_compiler_p (decl)))
177 return warned_about;
178
179 if (!warned_about)
431205b7 180 warned_about = new hash_set<tree>;
181 if (warned_about->contains (decl))
2c06958d 182 return warned_about;
431205b7 183 warned_about->add (decl);
2c06958d 184 warning_at (DECL_SOURCE_LOCATION (decl),
185 option,
186 known_finite
187 ? _("function might be candidate for attribute %<%s%>")
188 : _("function might be candidate for attribute %<%s%>"
189 " if it is known to return normally"), attrib_name);
190 return warned_about;
191}
192
193/* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
194 is true if the function is known to be finite. */
195
196static void
197warn_function_pure (tree decl, bool known_finite)
198{
431205b7 199 static hash_set<tree> *warned_about;
2c06958d 200
201 warned_about
202 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
203 known_finite, warned_about, "pure");
204}
205
206/* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
207 is true if the function is known to be finite. */
208
209static void
210warn_function_const (tree decl, bool known_finite)
211{
431205b7 212 static hash_set<tree> *warned_about;
2c06958d 213 warned_about
214 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
215 known_finite, warned_about, "const");
216}
43d60d64 217
64641360 218static void
43d60d64 219warn_function_noreturn (tree decl)
220{
431205b7 221 static hash_set<tree> *warned_about;
08c6cbd2 222 if (!lang_hooks.missing_noreturn_ok_p (decl)
223 && targetm.warn_func_return (decl))
43d60d64 224 warned_about
225 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
226 true, warned_about, "noreturn");
227}
9631926a 228
1ee39ad1 229/* Return true if we have a function state for NODE. */
230
231static inline bool
232has_function_state (struct cgraph_node *node)
233{
f1f41a6c 234 if (!funct_state_vec.exists ()
235 || funct_state_vec.length () <= (unsigned int)node->uid)
1ee39ad1 236 return false;
f1f41a6c 237 return funct_state_vec[node->uid] != NULL;
1ee39ad1 238}
239
48e1416a 240/* Return the function state from NODE. */
f7d118a9 241
242static inline funct_state
243get_function_state (struct cgraph_node *node)
244{
f1f41a6c 245 if (!funct_state_vec.exists ()
246 || funct_state_vec.length () <= (unsigned int)node->uid
247 || !funct_state_vec[node->uid])
1ee39ad1 248 /* We might want to put correct previously_known state into varying. */
db86e424 249 return &varying_state;
f1f41a6c 250 return funct_state_vec[node->uid];
cb886925 251}
252
253/* Set the function state S for NODE. */
254
255static inline void
256set_function_state (struct cgraph_node *node, funct_state s)
257{
f1f41a6c 258 if (!funct_state_vec.exists ()
259 || funct_state_vec.length () <= (unsigned int)node->uid)
260 funct_state_vec.safe_grow_cleared (node->uid + 1);
261 funct_state_vec[node->uid] = s;
f7d118a9 262}
263
f0b5f617 264/* Check to see if the use (or definition when CHECKING_WRITE is true)
f7d118a9 265 variable T is legal in a function that is either pure or const. */
266
48e1416a 267static inline void
268check_decl (funct_state local,
023a28e1 269 tree t, bool checking_write, bool ipa)
f7d118a9 270{
f7d118a9 271 /* Do not want to do anything with volatile except mark any
272 function that uses one to be not const or pure. */
48e1416a 273 if (TREE_THIS_VOLATILE (t))
274 {
f7d118a9 275 local->pure_const_state = IPA_NEITHER;
b5cebd44 276 if (dump_file)
277 fprintf (dump_file, " Volatile operand is not const/pure");
f7d118a9 278 return;
279 }
280
281 /* Do not care about a local automatic that is not static. */
282 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
283 return;
284
b5cebd44 285 /* If the variable has the "used" attribute, treat it as if it had a
286 been touched by the devil. */
83a23b05 287 if (DECL_PRESERVE_P (t))
b5cebd44 288 {
289 local->pure_const_state = IPA_NEITHER;
290 if (dump_file)
291 fprintf (dump_file, " Used static/global variable is not const/pure\n");
292 return;
293 }
294
023a28e1 295 /* In IPA mode we are not interested in checking actual loads and stores;
296 they will be processed at propagation time using ipa_ref. */
297 if (ipa)
298 return;
299
f7d118a9 300 /* Since we have dealt with the locals and params cases above, if we
301 are CHECKING_WRITE, this cannot be a pure or constant
302 function. */
48e1416a 303 if (checking_write)
bc17fd99 304 {
305 local->pure_const_state = IPA_NEITHER;
b5cebd44 306 if (dump_file)
307 fprintf (dump_file, " static/global memory write is not const/pure\n");
bc17fd99 308 return;
309 }
f7d118a9 310
311 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
312 {
b5cebd44 313 /* Readonly reads are safe. */
314 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
315 return; /* Read of a constant, do not change the function state. */
48e1416a 316 else
f7d118a9 317 {
b5cebd44 318 if (dump_file)
319 fprintf (dump_file, " global memory read is not const\n");
f7d118a9 320 /* Just a regular read. */
321 if (local->pure_const_state == IPA_CONST)
322 local->pure_const_state = IPA_PURE;
323 }
324 }
b5cebd44 325 else
326 {
327 /* Compilation level statics can be read if they are readonly
328 variables. */
329 if (TREE_READONLY (t))
330 return;
331
332 if (dump_file)
333 fprintf (dump_file, " static memory read is not const\n");
334 /* Just a regular read. */
335 if (local->pure_const_state == IPA_CONST)
336 local->pure_const_state = IPA_PURE;
337 }
f7d118a9 338}
339
f7d118a9 340
b5cebd44 341/* Check to see if the use (or definition when CHECKING_WRITE is true)
342 variable T is legal in a function that is either pure or const. */
f7d118a9 343
48e1416a 344static inline void
5ed0b345 345check_op (funct_state local, tree t, bool checking_write)
f7d118a9 346{
3ae61172 347 t = get_base_address (t);
348 if (t && TREE_THIS_VOLATILE (t))
f7d118a9 349 {
5ed0b345 350 local->pure_const_state = IPA_NEITHER;
351 if (dump_file)
352 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
353 return;
354 }
3ae61172 355 else if (t
182cf5a9 356 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
3ae61172 357 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
358 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
359 {
360 if (dump_file)
361 fprintf (dump_file, " Indirect ref to local memory is OK\n");
362 return;
363 }
5ed0b345 364 else if (checking_write)
365 {
366 local->pure_const_state = IPA_NEITHER;
367 if (dump_file)
368 fprintf (dump_file, " Indirect ref write is not const/pure\n");
369 return;
370 }
371 else
372 {
373 if (dump_file)
374 fprintf (dump_file, " Indirect ref read is not const\n");
375 if (local->pure_const_state == IPA_CONST)
376 local->pure_const_state = IPA_PURE;
f7d118a9 377 }
378}
379
023a28e1 380/* compute state based on ECF FLAGS and store to STATE and LOOPING. */
381
382static void
383state_from_flags (enum pure_const_state_e *state, bool *looping,
384 int flags, bool cannot_lead_to_return)
385{
386 *looping = false;
387 if (flags & ECF_LOOPING_CONST_OR_PURE)
388 {
389 *looping = true;
390 if (dump_file && (dump_flags & TDF_DETAILS))
391 fprintf (dump_file, " looping");
392 }
393 if (flags & ECF_CONST)
394 {
395 *state = IPA_CONST;
396 if (dump_file && (dump_flags & TDF_DETAILS))
397 fprintf (dump_file, " const\n");
398 }
399 else if (flags & ECF_PURE)
400 {
401 *state = IPA_PURE;
402 if (dump_file && (dump_flags & TDF_DETAILS))
403 fprintf (dump_file, " pure\n");
404 }
405 else if (cannot_lead_to_return)
406 {
407 *state = IPA_PURE;
408 *looping = true;
409 if (dump_file && (dump_flags & TDF_DETAILS))
410 fprintf (dump_file, " ignoring side effects->pure looping\n");
411 }
412 else
413 {
414 if (dump_file && (dump_flags & TDF_DETAILS))
9631926a 415 fprintf (dump_file, " neither\n");
023a28e1 416 *state = IPA_NEITHER;
417 *looping = true;
418 }
419}
420
421/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
422 into STATE and LOOPING better of the two variants.
423 Be sure to merge looping correctly. IPA_NEITHER functions
424 have looping 0 even if they don't have to return. */
425
426static inline void
427better_state (enum pure_const_state_e *state, bool *looping,
428 enum pure_const_state_e state2, bool looping2)
429{
430 if (state2 < *state)
431 {
432 if (*state == IPA_NEITHER)
433 *looping = looping2;
434 else
435 *looping = MIN (*looping, looping2);
26fc128e 436 *state = state2;
023a28e1 437 }
438 else if (state2 != IPA_NEITHER)
439 *looping = MIN (*looping, looping2);
440}
441
442/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
443 into STATE and LOOPING worse of the two variants. */
444
445static inline void
446worse_state (enum pure_const_state_e *state, bool *looping,
447 enum pure_const_state_e state2, bool looping2)
448{
449 *state = MAX (*state, state2);
450 *looping = MAX (*looping, looping2);
451}
452
0a10fd82 453/* Recognize special cases of builtins that are by themselves not pure or const
7dd42908 454 but function using them is. */
455static bool
a53e7471 456special_builtin_state (enum pure_const_state_e *state, bool *looping,
7dd42908 457 tree callee)
458{
459 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
460 switch (DECL_FUNCTION_CODE (callee))
461 {
462 case BUILT_IN_RETURN:
463 case BUILT_IN_UNREACHABLE:
464 case BUILT_IN_ALLOCA:
581bf1c2 465 case BUILT_IN_ALLOCA_WITH_ALIGN:
7dd42908 466 case BUILT_IN_STACK_SAVE:
467 case BUILT_IN_STACK_RESTORE:
468 case BUILT_IN_EH_POINTER:
469 case BUILT_IN_EH_FILTER:
470 case BUILT_IN_UNWIND_RESUME:
471 case BUILT_IN_CXA_END_CLEANUP:
472 case BUILT_IN_EH_COPY_VALUES:
473 case BUILT_IN_FRAME_ADDRESS:
474 case BUILT_IN_APPLY:
475 case BUILT_IN_APPLY_ARGS:
7dd42908 476 *looping = false;
477 *state = IPA_CONST;
478 return true;
479 case BUILT_IN_PREFETCH:
480 *looping = true;
481 *state = IPA_CONST;
482 return true;
5213d6c9 483 default:
484 break;
7dd42908 485 }
486 return false;
487}
488
f7d118a9 489/* Check the parameters of a function call to CALL_EXPR to see if
490 there are any references in the parameters that are not allowed for
491 pure or const functions. Also check to see if this is either an
492 indirect call, a call outside the compilation unit, or has special
493 attributes that may also effect the purity. The CALL_EXPR node for
494 the entire call expression. */
495
496static void
1a91d914 497check_call (funct_state local, gcall *call, bool ipa)
f7d118a9 498{
75a70cf9 499 int flags = gimple_call_flags (call);
b5cebd44 500 tree callee_t = gimple_call_fndecl (call);
b5cebd44 501 bool possibly_throws = stmt_could_throw_p (call);
502 bool possibly_throws_externally = (possibly_throws
503 && stmt_can_throw_external (call));
f7d118a9 504
b5cebd44 505 if (possibly_throws)
506 {
507 unsigned int i;
508 for (i = 0; i < gimple_num_ops (call); i++)
509 if (gimple_op (call, i)
510 && tree_could_throw_p (gimple_op (call, i)))
511 {
cbeb677e 512 if (possibly_throws && cfun->can_throw_non_call_exceptions)
b5cebd44 513 {
514 if (dump_file)
515 fprintf (dump_file, " operand can throw; looping\n");
516 local->looping = true;
517 }
518 if (possibly_throws_externally)
519 {
520 if (dump_file)
521 fprintf (dump_file, " operand can throw externally\n");
522 local->can_throw = true;
523 }
524 }
525 }
48e1416a 526
f7d118a9 527 /* The const and pure flags are set by a variety of places in the
528 compiler (including here). If someone has already set the flags
529 for the callee, (such as for some of the builtins) we will use
48e1416a 530 them, otherwise we will compute our own information.
531
f7d118a9 532 Const and pure functions have less clobber effects than other
533 functions so we process these first. Otherwise if it is a call
534 outside the compilation unit or an indirect call we punt. This
535 leaves local calls which will be processed by following the call
48e1416a 536 graph. */
f7d118a9 537 if (callee_t)
538 {
7dd42908 539 enum pure_const_state_e call_state;
540 bool call_looping;
541
04c849b3 542 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
543 && !nonfreeing_call_p (call))
544 local->can_free = true;
545
a53e7471 546 if (special_builtin_state (&call_state, &call_looping, callee_t))
7dd42908 547 {
548 worse_state (&local->pure_const_state, &local->looping,
549 call_state, call_looping);
550 return;
551 }
f7d118a9 552 /* When bad things happen to bad functions, they cannot be const
553 or pure. */
554 if (setjmp_call_p (callee_t))
9c2a0c05 555 {
b5cebd44 556 if (dump_file)
557 fprintf (dump_file, " setjmp is not const/pure\n");
558 local->looping = true;
9c2a0c05 559 local->pure_const_state = IPA_NEITHER;
9c2a0c05 560 }
f7d118a9 561
562 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
563 switch (DECL_FUNCTION_CODE (callee_t))
564 {
565 case BUILT_IN_LONGJMP:
566 case BUILT_IN_NONLOCAL_GOTO:
b5cebd44 567 if (dump_file)
568 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
f7d118a9 569 local->pure_const_state = IPA_NEITHER;
b5cebd44 570 local->looping = true;
f7d118a9 571 break;
572 default:
573 break;
574 }
575 }
04c849b3 576 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
577 local->can_free = true;
f7d118a9 578
b5cebd44 579 /* When not in IPA mode, we can still handle self recursion. */
e9de52cc 580 if (!ipa && callee_t
581 && recursive_call_p (current_function_decl, callee_t))
2c06958d 582 {
583 if (dump_file)
584 fprintf (dump_file, " Recursive call can loop.\n");
585 local->looping = true;
586 }
0a10fd82 587 /* Either callee is unknown or we are doing local analysis.
ef378c27 588 Look to see if there are any bits available for the callee (such as by
589 declaration or because it is builtin) and process solely on the basis of
590 those bits. */
023a28e1 591 else if (!ipa)
f7d118a9 592 {
023a28e1 593 enum pure_const_state_e call_state;
594 bool call_looping;
cbeb677e 595 if (possibly_throws && cfun->can_throw_non_call_exceptions)
b5cebd44 596 {
597 if (dump_file)
598 fprintf (dump_file, " can throw; looping\n");
599 local->looping = true;
600 }
601 if (possibly_throws_externally)
602 {
603 if (dump_file)
604 {
e38def9c 605 fprintf (dump_file, " can throw externally to lp %i\n",
606 lookup_stmt_eh_lp (call));
b5cebd44 607 if (callee_t)
608 fprintf (dump_file, " callee:%s\n",
609 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
610 }
611 local->can_throw = true;
612 }
023a28e1 613 if (dump_file && (dump_flags & TDF_DETAILS))
614 fprintf (dump_file, " checking flags for call:");
615 state_from_flags (&call_state, &call_looping, flags,
616 ((flags & (ECF_NORETURN | ECF_NOTHROW))
617 == (ECF_NORETURN | ECF_NOTHROW))
618 || (!flag_exceptions && (flags & ECF_NORETURN)));
619 worse_state (&local->pure_const_state, &local->looping,
620 call_state, call_looping);
f7d118a9 621 }
b5cebd44 622 /* Direct functions calls are handled by IPA propagation. */
f7d118a9 623}
624
023a28e1 625/* Wrapper around check_decl for loads in local more. */
5ed0b345 626
627static bool
42acab1c 628check_load (gimple *, tree op, tree, void *data)
5ed0b345 629{
630 if (DECL_P (op))
023a28e1 631 check_decl ((funct_state)data, op, false, false);
5ed0b345 632 else
633 check_op ((funct_state)data, op, false);
634 return false;
635}
636
023a28e1 637/* Wrapper around check_decl for stores in local more. */
5ed0b345 638
639static bool
42acab1c 640check_store (gimple *, tree op, tree, void *data)
5ed0b345 641{
642 if (DECL_P (op))
023a28e1 643 check_decl ((funct_state)data, op, true, false);
644 else
645 check_op ((funct_state)data, op, true);
646 return false;
647}
648
649/* Wrapper around check_decl for loads in ipa mode. */
650
651static bool
42acab1c 652check_ipa_load (gimple *, tree op, tree, void *data)
023a28e1 653{
654 if (DECL_P (op))
655 check_decl ((funct_state)data, op, false, true);
656 else
657 check_op ((funct_state)data, op, false);
658 return false;
659}
660
661/* Wrapper around check_decl for stores in ipa mode. */
662
663static bool
42acab1c 664check_ipa_store (gimple *, tree op, tree, void *data)
023a28e1 665{
666 if (DECL_P (op))
667 check_decl ((funct_state)data, op, true, true);
5ed0b345 668 else
669 check_op ((funct_state)data, op, true);
670 return false;
671}
672
dd277d48 673/* Look into pointer pointed to by GSIP and figure out what interesting side
674 effects it has. */
b5cebd44 675static void
676check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
f7d118a9 677{
42acab1c 678 gimple *stmt = gsi_stmt (*gsip);
f7d118a9 679
9845d120 680 if (is_gimple_debug (stmt))
681 return;
682
73594827 683 /* Do consider clobber as side effects before IPA, so we rather inline
684 C++ destructors and keep clobber semantics than eliminate them.
685
686 TODO: We may get smarter during early optimizations on these and let
687 functions containing only clobbers to be optimized more. This is a common
688 case of C++ destructors. */
689
690 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
691 return;
692
b5cebd44 693 if (dump_file)
f7d118a9 694 {
b5cebd44 695 fprintf (dump_file, " scanning: ");
696 print_gimple_stmt (dump_file, stmt, 0, 0);
697 }
dd277d48 698
541c6dbb 699 if (gimple_has_volatile_ops (stmt)
700 && !gimple_clobber_p (stmt))
4b68df2e 701 {
702 local->pure_const_state = IPA_NEITHER;
703 if (dump_file)
704 fprintf (dump_file, " Volatile stmt is not const/pure\n");
705 }
706
5ed0b345 707 /* Look for loads and stores. */
023a28e1 708 walk_stmt_load_store_ops (stmt, local,
709 ipa ? check_ipa_load : check_load,
710 ipa ? check_ipa_store : check_store);
b5cebd44 711
712 if (gimple_code (stmt) != GIMPLE_CALL
713 && stmt_could_throw_p (stmt))
714 {
cbeb677e 715 if (cfun->can_throw_non_call_exceptions)
b5cebd44 716 {
717 if (dump_file)
10f4615f 718 fprintf (dump_file, " can throw; looping\n");
b5cebd44 719 local->looping = true;
720 }
721 if (stmt_can_throw_external (stmt))
722 {
723 if (dump_file)
10f4615f 724 fprintf (dump_file, " can throw externally\n");
b5cebd44 725 local->can_throw = true;
726 }
10f4615f 727 else
728 if (dump_file)
729 fprintf (dump_file, " can throw\n");
75a70cf9 730 }
75a70cf9 731 switch (gimple_code (stmt))
732 {
b5cebd44 733 case GIMPLE_CALL:
1a91d914 734 check_call (local, as_a <gcall *> (stmt), ipa);
f7d118a9 735 break;
75a70cf9 736 case GIMPLE_LABEL:
1a91d914 737 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
f7d118a9 738 /* Target of long jump. */
9c2a0c05 739 {
b5cebd44 740 if (dump_file)
10f4615f 741 fprintf (dump_file, " nonlocal label is not const/pure\n");
9c2a0c05 742 local->pure_const_state = IPA_NEITHER;
9c2a0c05 743 }
f7d118a9 744 break;
75a70cf9 745 case GIMPLE_ASM:
1a91d914 746 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
b5cebd44 747 {
97cf41ec 748 if (dump_file)
10f4615f 749 fprintf (dump_file, " memory asm clobber is not const/pure\n");
97cf41ec 750 /* Abandon all hope, ye who enter here. */
751 local->pure_const_state = IPA_NEITHER;
04c849b3 752 local->can_free = true;
b5cebd44 753 }
1a91d914 754 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
b5cebd44 755 {
756 if (dump_file)
10f4615f 757 fprintf (dump_file, " volatile is not const/pure\n");
b5cebd44 758 /* Abandon all hope, ye who enter here. */
759 local->pure_const_state = IPA_NEITHER;
04c849b3 760 local->looping = true;
761 local->can_free = true;
b5cebd44 762 }
763 return;
f7d118a9 764 default:
765 break;
766 }
f7d118a9 767}
768
cb886925 769
f7d118a9 770/* This is the main routine for finding the reference patterns for
771 global variables within a function FN. */
772
b5cebd44 773static funct_state
774analyze_function (struct cgraph_node *fn, bool ipa)
f7d118a9 775{
02774f2d 776 tree decl = fn->decl;
b5cebd44 777 funct_state l;
778 basic_block this_block;
f7d118a9 779
b5cebd44 780 l = XCNEW (struct funct_state_d);
781 l->pure_const_state = IPA_CONST;
df9b545b 782 l->state_previously_known = IPA_NEITHER;
783 l->looping_previously_known = true;
b5cebd44 784 l->looping = false;
785 l->can_throw = false;
04c849b3 786 l->can_free = false;
91bf9d9a 787 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
02774f2d 788 flags_from_decl_or_type (fn->decl),
415d1b9a 789 fn->cannot_return_p ());
91bf9d9a 790
02774f2d 791 if (fn->thunk.thunk_p || fn->alias)
91bf9d9a 792 {
793 /* Thunk gets propagated through, so nothing interesting happens. */
794 gcc_assert (ipa);
cfd85d03 795 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
796 l->pure_const_state = IPA_NEITHER;
91bf9d9a 797 return l;
798 }
f7d118a9 799
800 if (dump_file)
801 {
48e1416a 802 fprintf (dump_file, "\n\n local analysis of %s\n ",
f1c8b4d7 803 fn->name ());
f7d118a9 804 }
48e1416a 805
b5cebd44 806 push_cfun (DECL_STRUCT_FUNCTION (decl));
48e1416a 807
fc00614f 808 FOR_EACH_BB_FN (this_block, cfun)
f7d118a9 809 {
b5cebd44 810 gimple_stmt_iterator gsi;
811 struct walk_stmt_info wi;
f7d118a9 812
9af5ce0c 813 memset (&wi, 0, sizeof (wi));
b5cebd44 814 for (gsi = gsi_start_bb (this_block);
815 !gsi_end_p (gsi);
816 gsi_next (&gsi))
f7d118a9 817 {
b5cebd44 818 check_stmt (&gsi, l, ipa);
04c849b3 819 if (l->pure_const_state == IPA_NEITHER
820 && l->looping
821 && l->can_throw
822 && l->can_free)
b5cebd44 823 goto end;
f7d118a9 824 }
825 }
dcccac3e 826
827end:
b5cebd44 828 if (l->pure_const_state != IPA_NEITHER)
829 {
830 /* Const functions cannot have back edges (an
831 indication of possible infinite loop side
832 effect. */
833 if (mark_dfs_back_edges ())
c9263b6a 834 {
b1887855 835 /* Preheaders are needed for SCEV to work.
0a10fd82 836 Simple latches and recorded exits improve chances that loop will
8ff30f9a 837 proved to be finite in testcases such as in loop-15.c
838 and loop-24.c */
839 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
840 | LOOPS_HAVE_SIMPLE_LATCHES
b1887855 841 | LOOPS_HAVE_RECORDED_EXITS);
c9263b6a 842 if (dump_file && (dump_flags & TDF_DETAILS))
843 flow_loops_dump (dump_file, NULL, 0);
844 if (mark_irreducible_loops ())
845 {
846 if (dump_file)
847 fprintf (dump_file, " has irreducible loops\n");
848 l->looping = true;
849 }
48e1416a 850 else
c9263b6a 851 {
c9263b6a 852 struct loop *loop;
853 scev_initialize ();
f21d4d00 854 FOR_EACH_LOOP (loop, 0)
c9263b6a 855 if (!finite_loop_p (loop))
856 {
857 if (dump_file)
8ff30f9a 858 fprintf (dump_file, " can not prove finiteness of "
859 "loop %i\n", loop->num);
c9263b6a 860 l->looping =true;
f21d4d00 861 break;
c9263b6a 862 }
863 scev_finalize ();
864 }
865 loop_optimizer_finalize ();
866 }
b5cebd44 867 }
868
023a28e1 869 if (dump_file && (dump_flags & TDF_DETAILS))
870 fprintf (dump_file, " checking previously known:");
023a28e1 871
872 better_state (&l->pure_const_state, &l->looping,
873 l->state_previously_known,
874 l->looping_previously_known);
b5cebd44 875 if (TREE_NOTHROW (decl))
876 l->can_throw = false;
877
878 pop_cfun ();
dcccac3e 879 if (dump_file)
880 {
b5cebd44 881 if (l->looping)
882 fprintf (dump_file, "Function is locally looping.\n");
883 if (l->can_throw)
884 fprintf (dump_file, "Function is locally throwing.\n");
885 if (l->pure_const_state == IPA_CONST)
886 fprintf (dump_file, "Function is locally const.\n");
887 if (l->pure_const_state == IPA_PURE)
888 fprintf (dump_file, "Function is locally pure.\n");
04c849b3 889 if (l->can_free)
890 fprintf (dump_file, "Function can locally free.\n");
dcccac3e 891 }
b5cebd44 892 return l;
f7d118a9 893}
894
50828ed8 895/* Called when new function is inserted to callgraph late. */
896static void
897add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
898{
415d1b9a 899 if (node->get_availability () < AVAIL_INTERPOSABLE)
86844d6c 900 return;
50828ed8 901 /* There are some shared nodes, in particular the initializers on
902 static declarations. We do not need to scan them more than once
903 since all we would be interested in are the addressof
904 operations. */
d1f68cd8 905 if (node->get_availability () > AVAIL_INTERPOSABLE
906 && opt_for_fn (node->decl, flag_ipa_pure_const))
2c06958d 907 set_function_state (node, analyze_function (node, true));
50828ed8 908}
909
86844d6c 910/* Called when new clone is inserted to callgraph late. */
911
912static void
913duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
914 void *data ATTRIBUTE_UNUSED)
915{
1ee39ad1 916 if (has_function_state (src))
86844d6c 917 {
918 funct_state l = XNEW (struct funct_state_d);
1ee39ad1 919 gcc_assert (!has_function_state (dst));
86844d6c 920 memcpy (l, get_function_state (src), sizeof (*l));
921 set_function_state (dst, l);
922 }
923}
924
925/* Called when new clone is inserted to callgraph late. */
926
927static void
928remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
929{
1ee39ad1 930 if (has_function_state (node))
86844d6c 931 {
db86e424 932 funct_state l = get_function_state (node);
933 if (l != &varying_state)
934 free (l);
86844d6c 935 set_function_state (node, NULL);
936 }
937}
938
f7d118a9 939\f
415309e2 940void
941pass_ipa_pure_const::
7bfefa9d 942register_hooks (void)
f7d118a9 943{
7bfefa9d 944 if (init_p)
945 return;
946
947 init_p = true;
f7d118a9 948
86844d6c 949 node_removal_hook_holder =
35ee1c66 950 symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
86844d6c 951 node_duplication_hook_holder =
35ee1c66 952 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
50828ed8 953 function_insertion_hook_holder =
35ee1c66 954 symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
7bfefa9d 955}
956
957
958/* Analyze each function in the cgraph to see if it is locally PURE or
959 CONST. */
960
48e1416a 961static void
80880eb6 962pure_const_generate_summary (void)
7bfefa9d 963{
964 struct cgraph_node *node;
965
415309e2 966 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
967 pass->register_hooks ();
7bfefa9d 968
48e1416a 969 /* Process all of the functions.
f7d118a9 970
415d1b9a 971 We process AVAIL_INTERPOSABLE functions. We can not use the results
ef378c27 972 by default, but the info can be used at LTO with -fwhole-program or
0a10fd82 973 when function got cloned and the clone is AVAILABLE. */
ef378c27 974
7c455d87 975 FOR_EACH_DEFINED_FUNCTION (node)
d1f68cd8 976 if (node->get_availability () >= AVAIL_INTERPOSABLE
977 && opt_for_fn (node->decl, flag_ipa_pure_const))
b5cebd44 978 set_function_state (node, analyze_function (node, true));
cb886925 979}
980
7bfefa9d 981
982/* Serialize the ipa info for lto. */
983
984static void
eab36a5a 985pure_const_write_summary (void)
7bfefa9d 986{
987 struct cgraph_node *node;
988 struct lto_simple_output_block *ob
989 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
990 unsigned int count = 0;
eab36a5a 991 lto_symtab_encoder_iterator lsei;
992 lto_symtab_encoder_t encoder;
7bfefa9d 993
eab36a5a 994 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
995
996 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
997 lsei_next_function_in_partition (&lsei))
7bfefa9d 998 {
eab36a5a 999 node = lsei_cgraph_node (lsei);
02774f2d 1000 if (node->definition && has_function_state (node))
7bfefa9d 1001 count++;
1002 }
48e1416a 1003
7f385784 1004 streamer_write_uhwi_stream (ob->main_stream, count);
48e1416a 1005
7bfefa9d 1006 /* Process all of the functions. */
eab36a5a 1007 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1008 lsei_next_function_in_partition (&lsei))
7bfefa9d 1009 {
eab36a5a 1010 node = lsei_cgraph_node (lsei);
02774f2d 1011 if (node->definition && has_function_state (node))
7bfefa9d 1012 {
30baba90 1013 struct bitpack_d bp;
7bfefa9d 1014 funct_state fs;
1015 int node_ref;
70225339 1016 lto_symtab_encoder_t encoder;
48e1416a 1017
7bfefa9d 1018 fs = get_function_state (node);
1019
70225339 1020 encoder = ob->decl_state->symtab_node_encoder;
02774f2d 1021 node_ref = lto_symtab_encoder_encode (encoder, node);
7f385784 1022 streamer_write_uhwi_stream (ob->main_stream, node_ref);
48e1416a 1023
7bfefa9d 1024 /* Note that flags will need to be read in the opposite
1025 order as we are pushing the bitflags into FLAGS. */
30baba90 1026 bp = bitpack_create (ob->main_stream);
1027 bp_pack_value (&bp, fs->pure_const_state, 2);
1028 bp_pack_value (&bp, fs->state_previously_known, 2);
1029 bp_pack_value (&bp, fs->looping_previously_known, 1);
1030 bp_pack_value (&bp, fs->looping, 1);
1031 bp_pack_value (&bp, fs->can_throw, 1);
04c849b3 1032 bp_pack_value (&bp, fs->can_free, 1);
7f385784 1033 streamer_write_bitpack (&bp);
7bfefa9d 1034 }
1035 }
1036
1037 lto_destroy_simple_output_block (ob);
1038}
1039
1040
1041/* Deserialize the ipa info for lto. */
1042
48e1416a 1043static void
7bfefa9d 1044pure_const_read_summary (void)
1045{
1046 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1047 struct lto_file_decl_data *file_data;
1048 unsigned int j = 0;
1049
415309e2 1050 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1051 pass->register_hooks ();
1052
7bfefa9d 1053 while ((file_data = file_data_vec[j++]))
1054 {
1055 const char *data;
1056 size_t len;
1057 struct lto_input_block *ib
48e1416a 1058 = lto_create_simple_input_block (file_data,
1059 LTO_section_ipa_pure_const,
7bfefa9d 1060 &data, &len);
1061 if (ib)
1062 {
1063 unsigned int i;
7f385784 1064 unsigned int count = streamer_read_uhwi (ib);
7bfefa9d 1065
1066 for (i = 0; i < count; i++)
1067 {
1068 unsigned int index;
1069 struct cgraph_node *node;
30baba90 1070 struct bitpack_d bp;
7bfefa9d 1071 funct_state fs;
70225339 1072 lto_symtab_encoder_t encoder;
7bfefa9d 1073
1074 fs = XCNEW (struct funct_state_d);
7f385784 1075 index = streamer_read_uhwi (ib);
70225339 1076 encoder = file_data->symtab_node_encoder;
415d1b9a 1077 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1078 index));
7bfefa9d 1079 set_function_state (node, fs);
1080
1081 /* Note that the flags must be read in the opposite
1082 order in which they were written (the bitflags were
1083 pushed into FLAGS). */
7f385784 1084 bp = streamer_read_bitpack (ib);
7bfefa9d 1085 fs->pure_const_state
30baba90 1086 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
7bfefa9d 1087 fs->state_previously_known
30baba90 1088 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1089 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1090 fs->looping = bp_unpack_value (&bp, 1);
1091 fs->can_throw = bp_unpack_value (&bp, 1);
04c849b3 1092 fs->can_free = bp_unpack_value (&bp, 1);
fc94a528 1093 if (dump_file)
1094 {
02774f2d 1095 int flags = flags_from_decl_or_type (node->decl);
fc94a528 1096 fprintf (dump_file, "Read info for %s/%i ",
f1c8b4d7 1097 node->name (),
02774f2d 1098 node->order);
fc94a528 1099 if (flags & ECF_CONST)
1100 fprintf (dump_file, " const");
1101 if (flags & ECF_PURE)
1102 fprintf (dump_file, " pure");
1103 if (flags & ECF_NOTHROW)
1104 fprintf (dump_file, " nothrow");
1105 fprintf (dump_file, "\n pure const state: %s\n",
1106 pure_const_names[fs->pure_const_state]);
1107 fprintf (dump_file, " previously known state: %s\n",
1108 pure_const_names[fs->looping_previously_known]);
1109 if (fs->looping)
1110 fprintf (dump_file," function is locally looping\n");
1111 if (fs->looping_previously_known)
1112 fprintf (dump_file," function is previously known looping\n");
1113 if (fs->can_throw)
1114 fprintf (dump_file," function is locally throwing\n");
04c849b3 1115 if (fs->can_free)
1116 fprintf (dump_file," function can locally free\n");
fc94a528 1117 }
7bfefa9d 1118 }
1119
48e1416a 1120 lto_destroy_simple_input_block (file_data,
1121 LTO_section_ipa_pure_const,
7bfefa9d 1122 ib, data, len);
1123 }
1124 }
1125}
1126
355c1f40 1127/* We only propagate across edges that can throw externally and their callee
1128 is not interposable. */
7bfefa9d 1129
17b28e52 1130static bool
355c1f40 1131ignore_edge_for_nothrow (struct cgraph_edge *e)
17b28e52 1132{
355c1f40 1133 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1134 return true;
1135
1136 enum availability avail;
1137 cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail);
1138 return (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl));
17b28e52 1139}
1140
b2c2e188 1141/* Return true if NODE is self recursive function.
e9de52cc 1142 Indirectly recursive functions appears as non-trivial strongly
1143 connected components, so we need to care about self recursion
1144 only. */
a1e88032 1145
1146static bool
1147self_recursive_p (struct cgraph_node *node)
1148{
1149 struct cgraph_edge *e;
1150 for (e = node->callees; e; e = e->next_callee)
415d1b9a 1151 if (e->callee->function_symbol () == node)
a1e88032 1152 return true;
1153 return false;
1154}
1155
366970c6 1156/* Return true if N is cdtor that is not const or pure. In this case we may
1157 need to remove unreachable function if it is marked const/pure. */
1158
1159static bool
1160cdtor_p (cgraph_node *n, void *)
1161{
1162 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1163 return !TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl);
1164 return false;
1165}
1166
d2c6c2b4 1167/* We only propagate across edges with non-interposable callee. */
1168
1169static bool
1170ignore_edge_for_pure_const (struct cgraph_edge *e)
1171{
1172 enum availability avail;
1173 e->callee->function_or_virtual_thunk_symbol (&avail);
1174 return (avail <= AVAIL_INTERPOSABLE);
1175}
1176
1177
c0240443 1178/* Produce transitive closure over the callgraph and compute pure/const
1179 attributes. */
023a28e1 1180
366970c6 1181static bool
c0240443 1182propagate_pure_const (void)
cb886925 1183{
1184 struct cgraph_node *node;
1185 struct cgraph_node *w;
1186 struct cgraph_node **order =
35ee1c66 1187 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
cb886925 1188 int order_pos;
1189 int i;
1190 struct ipa_dfs_info * w_info;
366970c6 1191 bool remove_p = false;
cb886925 1192
d2c6c2b4 1193 order_pos = ipa_reduced_postorder (order, true, false,
1194 ignore_edge_for_pure_const);
f7d118a9 1195 if (dump_file)
1196 {
415d1b9a 1197 cgraph_node::dump_cgraph (dump_file);
48669653 1198 ipa_print_order (dump_file, "reduced", order, order_pos);
f7d118a9 1199 }
1200
9d75589a 1201 /* Propagate the local information through the call graph to produce
f7d118a9 1202 the global information. All the nodes within a cycle will have
1203 the same info so we collapse cycles first. Then we can do the
1204 propagation in one pass from the leaves to the roots. */
1205 for (i = 0; i < order_pos; i++ )
1206 {
1207 enum pure_const_state_e pure_const_state = IPA_CONST;
9c2a0c05 1208 bool looping = false;
54157bf1 1209 int count = 0;
f7d118a9 1210 node = order[i];
1211
02774f2d 1212 if (node->alias)
8c1fce46 1213 continue;
1214
fc94a528 1215 if (dump_file && (dump_flags & TDF_DETAILS))
1216 fprintf (dump_file, "Starting cycle\n");
1217
f7d118a9 1218 /* Find the worst state for any node in the cycle. */
1219 w = node;
023a28e1 1220 while (w && pure_const_state != IPA_NEITHER)
f7d118a9 1221 {
b5cebd44 1222 struct cgraph_edge *e;
023a28e1 1223 struct cgraph_edge *ie;
1224 int i;
51ce5652 1225 struct ipa_ref *ref = NULL;
023a28e1 1226
f7d118a9 1227 funct_state w_l = get_function_state (w);
fc94a528 1228 if (dump_file && (dump_flags & TDF_DETAILS))
1229 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
f1c8b4d7 1230 w->name (),
02774f2d 1231 w->order,
fc94a528 1232 pure_const_names[w_l->pure_const_state],
1233 w_l->looping);
f7d118a9 1234
023a28e1 1235 /* First merge in function body properties. */
1236 worse_state (&pure_const_state, &looping,
1237 w_l->pure_const_state, w_l->looping);
1238 if (pure_const_state == IPA_NEITHER)
1239 break;
1240
d2c6c2b4 1241 /* For interposable nodes we can not assume anything. */
415d1b9a 1242 if (w->get_availability () == AVAIL_INTERPOSABLE)
ef378c27 1243 {
023a28e1 1244 worse_state (&pure_const_state, &looping,
1245 w_l->state_previously_known,
1246 w_l->looping_previously_known);
fc94a528 1247 if (dump_file && (dump_flags & TDF_DETAILS))
1248 {
1249 fprintf (dump_file,
d2c6c2b4 1250 " Interposable. state %s looping %i\n",
fc94a528 1251 pure_const_names[w_l->state_previously_known],
1252 w_l->looping_previously_known);
1253 }
023a28e1 1254 break;
ef378c27 1255 }
9c2a0c05 1256
b5cebd44 1257 count++;
1258
023a28e1 1259 /* We consider recursive cycles as possibly infinite.
1260 This might be relaxed since infinite recursion leads to stack
1261 overflow. */
b5cebd44 1262 if (count > 1)
1263 looping = true;
48e1416a 1264
023a28e1 1265 /* Now walk the edges and merge in callee properties. */
d2c6c2b4 1266 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1267 e = e->next_callee)
f7d118a9 1268 {
b2c2e188 1269 enum availability avail;
cfd85d03 1270 struct cgraph_node *y = e->callee->
1271 function_or_virtual_thunk_symbol (&avail);
fc94a528 1272 enum pure_const_state_e edge_state = IPA_CONST;
1273 bool edge_looping = false;
54157bf1 1274
fc94a528 1275 if (dump_file && (dump_flags & TDF_DETAILS))
1276 {
1277 fprintf (dump_file,
1278 " Call to %s/%i",
f1c8b4d7 1279 e->callee->name (),
02774f2d 1280 e->callee->order);
fc94a528 1281 }
415d1b9a 1282 if (avail > AVAIL_INTERPOSABLE)
f7d118a9 1283 {
b5cebd44 1284 funct_state y_l = get_function_state (y);
fc94a528 1285 if (dump_file && (dump_flags & TDF_DETAILS))
1286 {
1287 fprintf (dump_file,
1288 " state:%s looping:%i\n",
1289 pure_const_names[y_l->pure_const_state],
1290 y_l->looping);
1291 }
4c170941 1292 if (y_l->pure_const_state > IPA_PURE
35ee1c66 1293 && e->cannot_lead_to_return_p ())
fc94a528 1294 {
1295 if (dump_file && (dump_flags & TDF_DETAILS))
023a28e1 1296 fprintf (dump_file,
1297 " Ignoring side effects"
1298 " -> pure, looping\n");
fc94a528 1299 edge_state = IPA_PURE;
1300 edge_looping = true;
1301 }
1302 else
1303 {
1304 edge_state = y_l->pure_const_state;
1305 edge_looping = y_l->looping;
1306 }
f7d118a9 1307 }
a53e7471 1308 else if (special_builtin_state (&edge_state, &edge_looping,
02774f2d 1309 y->decl))
7dd42908 1310 ;
ef378c27 1311 else
023a28e1 1312 state_from_flags (&edge_state, &edge_looping,
02774f2d 1313 flags_from_decl_or_type (y->decl),
35ee1c66 1314 e->cannot_lead_to_return_p ());
023a28e1 1315
1316 /* Merge the results with what we already know. */
1317 better_state (&edge_state, &edge_looping,
1318 w_l->state_previously_known,
1319 w_l->looping_previously_known);
1320 worse_state (&pure_const_state, &looping,
1321 edge_state, edge_looping);
1322 if (pure_const_state == IPA_NEITHER)
1323 break;
1324 }
ef378c27 1325
023a28e1 1326 /* Now process the indirect call. */
d2c6c2b4 1327 for (ie = w->indirect_calls;
1328 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
023a28e1 1329 {
1330 enum pure_const_state_e edge_state = IPA_CONST;
1331 bool edge_looping = false;
4c170941 1332
023a28e1 1333 if (dump_file && (dump_flags & TDF_DETAILS))
1334 fprintf (dump_file, " Indirect call");
1335 state_from_flags (&edge_state, &edge_looping,
1336 ie->indirect_info->ecf_flags,
35ee1c66 1337 ie->cannot_lead_to_return_p ());
023a28e1 1338 /* Merge the results with what we already know. */
1339 better_state (&edge_state, &edge_looping,
1340 w_l->state_previously_known,
1341 w_l->looping_previously_known);
1342 worse_state (&pure_const_state, &looping,
1343 edge_state, edge_looping);
fc94a528 1344 if (pure_const_state == IPA_NEITHER)
1345 break;
f7d118a9 1346 }
023a28e1 1347
1348 /* And finally all loads and stores. */
d2c6c2b4 1349 for (i = 0; w->iterate_reference (i, ref)
1350 && pure_const_state != IPA_NEITHER; i++)
023a28e1 1351 {
1352 enum pure_const_state_e ref_state = IPA_CONST;
1353 bool ref_looping = false;
1354 switch (ref->use)
1355 {
1356 case IPA_REF_LOAD:
1357 /* readonly reads are safe. */
51ce5652 1358 if (TREE_READONLY (ref->referred->decl))
023a28e1 1359 break;
1360 if (dump_file && (dump_flags & TDF_DETAILS))
1361 fprintf (dump_file, " nonreadonly global var read\n");
1362 ref_state = IPA_PURE;
1363 break;
1364 case IPA_REF_STORE:
51ce5652 1365 if (ref->cannot_lead_to_return ())
023a28e1 1366 break;
1367 ref_state = IPA_NEITHER;
1368 if (dump_file && (dump_flags & TDF_DETAILS))
1369 fprintf (dump_file, " global var write\n");
1370 break;
1371 case IPA_REF_ADDR:
058a1b7a 1372 case IPA_REF_CHKP:
023a28e1 1373 break;
5cb7c8cf 1374 default:
1375 gcc_unreachable ();
023a28e1 1376 }
1377 better_state (&ref_state, &ref_looping,
1378 w_l->state_previously_known,
1379 w_l->looping_previously_known);
1380 worse_state (&pure_const_state, &looping,
1381 ref_state, ref_looping);
1382 if (pure_const_state == IPA_NEITHER)
1383 break;
1384 }
02774f2d 1385 w_info = (struct ipa_dfs_info *) w->aux;
f7d118a9 1386 w = w_info->next_cycle;
1387 }
fc94a528 1388 if (dump_file && (dump_flags & TDF_DETAILS))
1389 fprintf (dump_file, "Result %s looping %i\n",
1390 pure_const_names [pure_const_state],
1391 looping);
f7d118a9 1392
04c849b3 1393 /* Find the worst state of can_free for any node in the cycle. */
1394 bool can_free = false;
1395 w = node;
1396 while (w && !can_free)
1397 {
1398 struct cgraph_edge *e;
1399 funct_state w_l = get_function_state (w);
1400
1401 if (w_l->can_free
1402 || w->get_availability () == AVAIL_INTERPOSABLE
1403 || w->indirect_calls)
1404 can_free = true;
1405
1406 for (e = w->callees; e && !can_free; e = e->next_callee)
1407 {
1408 enum availability avail;
cfd85d03 1409 struct cgraph_node *y = e->callee->
1410 function_or_virtual_thunk_symbol (&avail);
04c849b3 1411
1412 if (avail > AVAIL_INTERPOSABLE)
1413 can_free = get_function_state (y)->can_free;
1414 else
1415 can_free = true;
1416 }
1417 w_info = (struct ipa_dfs_info *) w->aux;
1418 w = w_info->next_cycle;
1419 }
1420
f7d118a9 1421 /* Copy back the region's pure_const_state which is shared by
1422 all nodes in the region. */
1423 w = node;
1424 while (w)
1425 {
1426 funct_state w_l = get_function_state (w);
b5cebd44 1427 enum pure_const_state_e this_state = pure_const_state;
1428 bool this_looping = looping;
f7d118a9 1429
04c849b3 1430 w_l->can_free = can_free;
1431 w->nonfreeing_fn = !can_free;
1432 if (!can_free && dump_file)
1433 fprintf (dump_file, "Function found not to call free: %s\n",
1434 w->name ());
1435
df9b545b 1436 if (w_l->state_previously_known != IPA_NEITHER
1437 && this_state > w_l->state_previously_known)
4c170941 1438 {
1439 this_state = w_l->state_previously_known;
d2c6c2b4 1440 if (this_state == IPA_NEITHER)
1441 this_looping = w_l->looping_previously_known;
4c170941 1442 }
a1e88032 1443 if (!this_looping && self_recursive_p (w))
1444 this_looping = true;
df9b545b 1445 if (!w_l->looping_previously_known)
1446 this_looping = false;
cb886925 1447
b5cebd44 1448 /* All nodes within a cycle share the same info. */
1449 w_l->pure_const_state = this_state;
1450 w_l->looping = this_looping;
1451
ce2d198d 1452 /* Inline clones share declaration with their offline copies;
1453 do not modify their declarations since the offline copy may
1454 be different. */
1455 if (!w->global.inlined_to)
1456 switch (this_state)
1457 {
1458 case IPA_CONST:
1459 if (!TREE_READONLY (w->decl))
1460 {
1461 warn_function_const (w->decl, !this_looping);
1462 if (dump_file)
1463 fprintf (dump_file, "Function found to be %sconst: %s\n",
1464 this_looping ? "looping " : "",
1465 w->name ());
1466 }
366970c6 1467 remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
1468 NULL, true);
415d1b9a 1469 w->set_const_flag (true, this_looping);
ce2d198d 1470 break;
48e1416a 1471
ce2d198d 1472 case IPA_PURE:
1473 if (!DECL_PURE_P (w->decl))
1474 {
1475 warn_function_pure (w->decl, !this_looping);
1476 if (dump_file)
1477 fprintf (dump_file, "Function found to be %spure: %s\n",
1478 this_looping ? "looping " : "",
1479 w->name ());
1480 }
366970c6 1481 remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
1482 NULL, true);
415d1b9a 1483 w->set_pure_flag (true, this_looping);
ce2d198d 1484 break;
48e1416a 1485
ce2d198d 1486 default:
1487 break;
1488 }
02774f2d 1489 w_info = (struct ipa_dfs_info *) w->aux;
17b28e52 1490 w = w_info->next_cycle;
1491 }
1492 }
1493
7771d558 1494 ipa_free_postorder_info ();
c0240443 1495 free (order);
366970c6 1496 return remove_p;
c0240443 1497}
1498
1499/* Produce transitive closure over the callgraph and compute nothrow
1500 attributes. */
1501
1502static void
1503propagate_nothrow (void)
1504{
1505 struct cgraph_node *node;
1506 struct cgraph_node *w;
1507 struct cgraph_node **order =
35ee1c66 1508 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
c0240443 1509 int order_pos;
1510 int i;
1511 struct ipa_dfs_info * w_info;
1512
355c1f40 1513 order_pos = ipa_reduced_postorder (order, true, false,
1514 ignore_edge_for_nothrow);
17b28e52 1515 if (dump_file)
1516 {
415d1b9a 1517 cgraph_node::dump_cgraph (dump_file);
7771d558 1518 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
17b28e52 1519 }
c0240443 1520
9d75589a 1521 /* Propagate the local information through the call graph to produce
17b28e52 1522 the global information. All the nodes within a cycle will have
1523 the same info so we collapse cycles first. Then we can do the
1524 propagation in one pass from the leaves to the roots. */
1525 for (i = 0; i < order_pos; i++ )
1526 {
1527 bool can_throw = false;
1528 node = order[i];
1529
02774f2d 1530 if (node->alias)
8c1fce46 1531 continue;
1532
17b28e52 1533 /* Find the worst state for any node in the cycle. */
1534 w = node;
b2c90c54 1535 while (w && !can_throw)
17b28e52 1536 {
023a28e1 1537 struct cgraph_edge *e, *ie;
17b28e52 1538
355c1f40 1539 if (!TREE_NOTHROW (w->decl))
17b28e52 1540 {
355c1f40 1541 funct_state w_l = get_function_state (w);
17b28e52 1542
355c1f40 1543 if (w_l->can_throw
1544 || w->get_availability () == AVAIL_INTERPOSABLE)
1545 can_throw = true;
1546
1547 for (e = w->callees; e && !can_throw; e = e->next_callee)
17b28e52 1548 {
355c1f40 1549 enum availability avail;
1550
1551 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1552 continue;
1553
1554 struct cgraph_node *y = e->callee->
1555 function_or_virtual_thunk_symbol (&avail);
17b28e52 1556
355c1f40 1557 /* We can use info about the callee only if we know it can
1558 not be interposed. */
1559 if (avail <= AVAIL_INTERPOSABLE
1560 || (!TREE_NOTHROW (y->decl)
1561 && get_function_state (y)->can_throw))
17b28e52 1562 can_throw = true;
1563 }
355c1f40 1564 for (ie = w->indirect_calls; ie && !can_throw;
1565 ie = ie->next_callee)
1566 if (ie->can_throw_external
1567 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1568 can_throw = true;
17b28e52 1569 }
02774f2d 1570 w_info = (struct ipa_dfs_info *) w->aux;
17b28e52 1571 w = w_info->next_cycle;
1572 }
1573
1574 /* Copy back the region's pure_const_state which is shared by
1575 all nodes in the region. */
1576 w = node;
1577 while (w)
1578 {
ecfab407 1579 funct_state w_l = get_function_state (w);
02774f2d 1580 if (!can_throw && !TREE_NOTHROW (w->decl))
b5cebd44 1581 {
ce2d198d 1582 /* Inline clones share declaration with their offline copies;
1583 do not modify their declarations since the offline copy may
1584 be different. */
1585 if (!w->global.inlined_to)
1586 {
415d1b9a 1587 w->set_nothrow_flag (true);
ce2d198d 1588 if (dump_file)
1589 fprintf (dump_file, "Function found to be nothrow: %s\n",
1590 w->name ());
1591 }
f7d118a9 1592 }
02774f2d 1593 else if (can_throw && !TREE_NOTHROW (w->decl))
ecfab407 1594 w_l->can_throw = true;
02774f2d 1595 w_info = (struct ipa_dfs_info *) w->aux;
f7d118a9 1596 w = w_info->next_cycle;
1597 }
1598 }
1599
7771d558 1600 ipa_free_postorder_info ();
f7d118a9 1601 free (order);
c0240443 1602}
1603
1604
1605/* Produce the global information by preforming a transitive closure
1606 on the local information that was produced by generate_summary. */
1607
415309e2 1608unsigned int
1609pass_ipa_pure_const::
1610execute (function *)
c0240443 1611{
1612 struct cgraph_node *node;
366970c6 1613 bool remove_p;
c0240443 1614
35ee1c66 1615 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
1616 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
1617 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
c0240443 1618
1619 /* Nothrow makes more function to not lead to return and improve
1620 later analysis. */
1621 propagate_nothrow ();
366970c6 1622 remove_p = propagate_pure_const ();
c0240443 1623
1624 /* Cleanup. */
c6d207aa 1625 FOR_EACH_FUNCTION (node)
c0240443 1626 if (has_function_state (node))
1627 free (get_function_state (node));
f1f41a6c 1628 funct_state_vec.release ();
366970c6 1629 return remove_p ? TODO_remove_functions : 0;
f7d118a9 1630}
1631
1632static bool
1633gate_pure_const (void)
1634{
d1f68cd8 1635 return flag_ipa_pure_const || in_lto_p;
f7d118a9 1636}
1637
415309e2 1638pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
1639 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
1640 pure_const_generate_summary, /* generate_summary */
1641 pure_const_write_summary, /* write_summary */
1642 pure_const_read_summary, /* read_summary */
1643 NULL, /* write_optimization_summary */
1644 NULL, /* read_optimization_summary */
1645 NULL, /* stmt_fixup */
1646 0, /* function_transform_todo_flags_start */
1647 NULL, /* function_transform */
1648 NULL), /* variable_transform */
1649 init_p(false),
1650 function_insertion_hook_holder(NULL),
1651 node_duplication_hook_holder(NULL),
1652 node_removal_hook_holder(NULL)
f7d118a9 1653{
415309e2 1654}
cbe8bda8 1655
1656ipa_opt_pass_d *
1657make_pass_ipa_pure_const (gcc::context *ctxt)
1658{
1659 return new pass_ipa_pure_const (ctxt);
1660}
1661
2c06958d 1662/* Return true if function should be skipped for local pure const analysis. */
b5cebd44 1663
2c06958d 1664static bool
1665skip_function_for_local_pure_const (struct cgraph_node *node)
b5cebd44 1666{
b5cebd44 1667 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1668 we must not promote functions that are called by already processed functions. */
1669
1670 if (function_called_by_processed_nodes_p ())
1671 {
1672 if (dump_file)
1673 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2c06958d 1674 return true;
b5cebd44 1675 }
415d1b9a 1676 if (node->get_availability () <= AVAIL_INTERPOSABLE)
b5cebd44 1677 {
1678 if (dump_file)
355c1f40 1679 fprintf (dump_file, "Function is not available or interposable; not analyzing.\n");
2c06958d 1680 return true;
b5cebd44 1681 }
2c06958d 1682 return false;
1683}
1684
1685/* Simple local pass for pure const discovery reusing the analysis from
1686 ipa_pure_const. This pass is effective when executed together with
1687 other optimization passes in early optimization pass queue. */
b5cebd44 1688
7620bc82 1689namespace {
1690
1691const pass_data pass_data_local_pure_const =
65b0537f 1692{
1693 GIMPLE_PASS, /* type */
1694 "local-pure-const", /* name */
1695 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 1696 TV_IPA_PURE_CONST, /* tv_id */
1697 0, /* properties_required */
1698 0, /* properties_provided */
1699 0, /* properties_destroyed */
1700 0, /* todo_flags_start */
1701 0, /* todo_flags_finish */
1702};
1703
7620bc82 1704class pass_local_pure_const : public gimple_opt_pass
65b0537f 1705{
1706public:
1707 pass_local_pure_const (gcc::context *ctxt)
1708 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
1709 {}
1710
1711 /* opt_pass methods: */
1712 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
1713 virtual bool gate (function *) { return gate_pure_const (); }
1714 virtual unsigned int execute (function *);
1715
1716}; // class pass_local_pure_const
1717
1718unsigned int
1719pass_local_pure_const::execute (function *fun)
2c06958d 1720{
1721 bool changed = false;
1722 funct_state l;
1723 bool skip;
1724 struct cgraph_node *node;
1725
415d1b9a 1726 node = cgraph_node::get (current_function_decl);
2c06958d 1727 skip = skip_function_for_local_pure_const (node);
1728 if (!warn_suggest_attribute_const
1729 && !warn_suggest_attribute_pure
1730 && skip)
1731 return 0;
e0933141 1732
ab3db708 1733 l = analyze_function (node, false);
1734
1735 /* Do NORETURN discovery. */
e0933141 1736 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
65b0537f 1737 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
e0933141 1738 {
65b0537f 1739 warn_function_noreturn (fun->decl);
e0933141 1740 if (dump_file)
65b0537f 1741 fprintf (dump_file, "Function found to be noreturn: %s\n",
1742 current_function_name ());
e0933141 1743
1744 /* Update declaration and reduce profile to executed once. */
1745 TREE_THIS_VOLATILE (current_function_decl) = 1;
1746 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
65b0537f 1747 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
e0933141 1748
1749 changed = true;
1750 }
ef378c27 1751
b5cebd44 1752 switch (l->pure_const_state)
1753 {
1754 case IPA_CONST:
1755 if (!TREE_READONLY (current_function_decl))
1756 {
2c06958d 1757 warn_function_const (current_function_decl, !l->looping);
1758 if (!skip)
1759 {
415d1b9a 1760 node->set_const_flag (true, l->looping);
2c06958d 1761 changed = true;
1762 }
b5cebd44 1763 if (dump_file)
1764 fprintf (dump_file, "Function found to be %sconst: %s\n",
1765 l->looping ? "looping " : "",
9631926a 1766 current_function_name ());
b5cebd44 1767 }
1768 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1769 && !l->looping)
1770 {
2c06958d 1771 if (!skip)
1772 {
415d1b9a 1773 node->set_const_flag (true, false);
2c06958d 1774 changed = true;
1775 }
b5cebd44 1776 if (dump_file)
1777 fprintf (dump_file, "Function found to be non-looping: %s\n",
9631926a 1778 current_function_name ());
b5cebd44 1779 }
1780 break;
1781
1782 case IPA_PURE:
2c06958d 1783 if (!DECL_PURE_P (current_function_decl))
b5cebd44 1784 {
2c06958d 1785 if (!skip)
1786 {
415d1b9a 1787 node->set_pure_flag (true, l->looping);
2c06958d 1788 changed = true;
1789 }
1790 warn_function_pure (current_function_decl, !l->looping);
b5cebd44 1791 if (dump_file)
1792 fprintf (dump_file, "Function found to be %spure: %s\n",
1793 l->looping ? "looping " : "",
9631926a 1794 current_function_name ());
b5cebd44 1795 }
1796 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1797 && !l->looping)
1798 {
2c06958d 1799 if (!skip)
1800 {
415d1b9a 1801 node->set_pure_flag (true, false);
2c06958d 1802 changed = true;
1803 }
b5cebd44 1804 if (dump_file)
1805 fprintf (dump_file, "Function found to be non-looping: %s\n",
9631926a 1806 current_function_name ());
b5cebd44 1807 }
1808 break;
1809
1810 default:
1811 break;
1812 }
1813 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1814 {
415d1b9a 1815 node->set_nothrow_flag (true);
b5cebd44 1816 changed = true;
1817 if (dump_file)
1818 fprintf (dump_file, "Function found to be nothrow: %s\n",
9631926a 1819 current_function_name ());
b5cebd44 1820 }
dd045aee 1821 free (l);
b5cebd44 1822 if (changed)
1823 return execute_fixup_cfg ();
1824 else
1825 return 0;
1826}
1827
7620bc82 1828} // anon namespace
1829
cbe8bda8 1830gimple_opt_pass *
1831make_pass_local_pure_const (gcc::context *ctxt)
1832{
1833 return new pass_local_pure_const (ctxt);
1834}
64641360 1835
1836/* Emit noreturn warnings. */
1837
7620bc82 1838namespace {
1839
1840const pass_data pass_data_warn_function_noreturn =
64641360 1841{
1842 GIMPLE_PASS, /* type */
1843 "*warn_function_noreturn", /* name */
1844 OPTGROUP_NONE, /* optinfo_flags */
64641360 1845 TV_NONE, /* tv_id */
1846 PROP_cfg, /* properties_required */
1847 0, /* properties_provided */
1848 0, /* properties_destroyed */
1849 0, /* todo_flags_start */
1850 0, /* todo_flags_finish */
1851};
1852
7620bc82 1853class pass_warn_function_noreturn : public gimple_opt_pass
64641360 1854{
1855public:
1856 pass_warn_function_noreturn (gcc::context *ctxt)
1857 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
1858 {}
1859
1860 /* opt_pass methods: */
31315c24 1861 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
65b0537f 1862 virtual unsigned int execute (function *fun)
1863 {
1864 if (!TREE_THIS_VOLATILE (current_function_decl)
1865 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1866 warn_function_noreturn (current_function_decl);
1867 return 0;
1868 }
64641360 1869
1870}; // class pass_warn_function_noreturn
1871
7620bc82 1872} // anon namespace
1873
64641360 1874gimple_opt_pass *
1875make_pass_warn_function_noreturn (gcc::context *ctxt)
1876{
1877 return new pass_warn_function_noreturn (ctxt);
1878}
73594827 1879
1880/* Simple local pass for pure const discovery reusing the analysis from
1881 ipa_pure_const. This pass is effective when executed together with
1882 other optimization passes in early optimization pass queue. */
1883
7620bc82 1884namespace {
1885
1886const pass_data pass_data_nothrow =
73594827 1887{
1888 GIMPLE_PASS, /* type */
1889 "nothrow", /* name */
1890 OPTGROUP_NONE, /* optinfo_flags */
1891 TV_IPA_PURE_CONST, /* tv_id */
1892 0, /* properties_required */
1893 0, /* properties_provided */
1894 0, /* properties_destroyed */
1895 0, /* todo_flags_start */
1896 0, /* todo_flags_finish */
1897};
1898
7620bc82 1899class pass_nothrow : public gimple_opt_pass
73594827 1900{
1901public:
1902 pass_nothrow (gcc::context *ctxt)
1903 : gimple_opt_pass (pass_data_nothrow, ctxt)
1904 {}
1905
1906 /* opt_pass methods: */
1907 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
1908 virtual bool gate (function *) { return optimize; }
1909 virtual unsigned int execute (function *);
1910
1911}; // class pass_nothrow
1912
1913unsigned int
1914pass_nothrow::execute (function *)
1915{
1916 struct cgraph_node *node;
1917 basic_block this_block;
1918
1919 if (TREE_NOTHROW (current_function_decl))
1920 return 0;
1921
1922 node = cgraph_node::get (current_function_decl);
1923
1924 /* We run during lowering, we can not really use availability yet. */
1925 if (cgraph_node::get (current_function_decl)->get_availability ()
1926 <= AVAIL_INTERPOSABLE)
1927 {
1928 if (dump_file)
1929 fprintf (dump_file, "Function is interposable;"
1930 " not analyzing.\n");
1931 return true;
1932 }
1933
1934 FOR_EACH_BB_FN (this_block, cfun)
1935 {
1936 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
1937 !gsi_end_p (gsi);
1938 gsi_next (&gsi))
1939 if (stmt_can_throw_external (gsi_stmt (gsi)))
1940 {
1941 if (is_gimple_call (gsi_stmt (gsi)))
1942 {
1943 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
1944 if (callee_t && recursive_call_p (current_function_decl,
1945 callee_t))
1946 continue;
1947 }
1948
1949 if (dump_file)
1950 {
1951 fprintf (dump_file, "Statement can throw: ");
1952 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
1953 }
1954 return 0;
1955 }
1956 }
1957
1958 node->set_nothrow_flag (true);
1959 if (dump_file)
1960 fprintf (dump_file, "Function found to be nothrow: %s\n",
1961 current_function_name ());
1962 return 0;
1963}
1964
7620bc82 1965} // anon namespace
1966
73594827 1967gimple_opt_pass *
1968make_pass_nothrow (gcc::context *ctxt)
1969{
1970 return new pass_nothrow (ctxt);
1971}