]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-pure-const.c
Automated conversion of passes to C++ classes
[thirdparty/gcc.git] / gcc / ipa-pure-const.c
CommitLineData
f7d118a9 1/* Callgraph based analysis of static variables.
711789cc 2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
f7d118a9 3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
8c4c00c1 9Software Foundation; either version 3, or (at your option) any later
f7d118a9 10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
f7d118a9 20
f0b5f617 21/* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
f7d118a9 24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30/* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34#include "config.h"
35#include "system.h"
36#include "coretypes.h"
37#include "tm.h"
38#include "tree.h"
39#include "tree-flow.h"
40#include "tree-inline.h"
41#include "tree-pass.h"
42#include "langhooks.h"
43#include "pointer-set.h"
44#include "ggc.h"
45#include "ipa-utils.h"
75a70cf9 46#include "gimple.h"
f7d118a9 47#include "cgraph.h"
f7d118a9 48#include "flags.h"
f7d118a9 49#include "diagnostic.h"
ce084dfc 50#include "gimple-pretty-print.h"
f7d118a9 51#include "langhooks.h"
959fce68 52#include "target.h"
7bfefa9d 53#include "lto-streamer.h"
2541503d 54#include "data-streamer.h"
55#include "tree-streamer.h"
c9263b6a 56#include "cfgloop.h"
57#include "tree-scalar-evolution.h"
2c06958d 58#include "intl.h"
59#include "opts.h"
f7d118a9 60
61static struct pointer_set_t *visited_nodes;
62
63/* Lattice values for const and pure functions. Everything starts out
64 being const, then may drop to pure and then neither depending on
65 what is found. */
66enum pure_const_state_e
67{
68 IPA_CONST,
69 IPA_PURE,
70 IPA_NEITHER
71};
72
fc94a528 73const char *pure_const_names[3] = {"const", "pure", "neither"};
74
cb886925 75/* Holder for the const_state. There is one of these per function
76 decl. */
48e1416a 77struct funct_state_d
f7d118a9 78{
cb886925 79 /* See above. */
f7d118a9 80 enum pure_const_state_e pure_const_state;
b5cebd44 81 /* What user set here; we can be always sure about this. */
48e1416a 82 enum pure_const_state_e state_previously_known;
83 bool looping_previously_known;
cb886925 84
85 /* True if the function could possibly infinite loop. There are a
86 lot of ways that this could be determined. We are pretty
87 conservative here. While it is possible to cse pure and const
88 calls, it is not legal to have dce get rid of the call if there
89 is a possibility that the call could infinite loop since this is
90 a behavioral change. */
9c2a0c05 91 bool looping;
cb886925 92
b5cebd44 93 bool can_throw;
f7d118a9 94};
95
db86e424 96/* State used when we know nothing about function. */
97static struct funct_state_d varying_state
98 = { IPA_NEITHER, IPA_NEITHER, true, true, true };
99
100
f7d118a9 101typedef struct funct_state_d * funct_state;
102
cb886925 103/* The storage of the funct_state is abstracted because there is the
104 possibility that it may be desirable to move this to the cgraph
48e1416a 105 local info. */
cb886925 106
107/* Array, indexed by cgraph node uid, of function states. */
108
f1f41a6c 109static vec<funct_state> funct_state_vec;
cb886925 110
50828ed8 111/* Holders of ipa cgraph hooks: */
112static struct cgraph_node_hook_list *function_insertion_hook_holder;
86844d6c 113static struct cgraph_2node_hook_list *node_duplication_hook_holder;
114static struct cgraph_node_hook_list *node_removal_hook_holder;
cb886925 115
2c06958d 116/* Try to guess if function body will always be visible to compiler
117 when compiling the call and whether compiler will be able
118 to propagate the information by itself. */
119
120static bool
121function_always_visible_to_compiler_p (tree decl)
122{
123 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
124}
125
126/* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
127 is true if the function is known to be finite. The diagnostic is
128 controlled by OPTION. WARNED_ABOUT is a pointer_set unique for
129 OPTION, this function may initialize it and it is always returned
130 by the function. */
131
132static struct pointer_set_t *
133suggest_attribute (int option, tree decl, bool known_finite,
134 struct pointer_set_t *warned_about,
135 const char * attrib_name)
136{
2c5d2e39 137 if (!option_enabled (option, &global_options))
2c06958d 138 return warned_about;
139 if (TREE_THIS_VOLATILE (decl)
140 || (known_finite && function_always_visible_to_compiler_p (decl)))
141 return warned_about;
142
143 if (!warned_about)
144 warned_about = pointer_set_create ();
145 if (pointer_set_contains (warned_about, decl))
146 return warned_about;
147 pointer_set_insert (warned_about, decl);
148 warning_at (DECL_SOURCE_LOCATION (decl),
149 option,
150 known_finite
151 ? _("function might be candidate for attribute %<%s%>")
152 : _("function might be candidate for attribute %<%s%>"
153 " if it is known to return normally"), attrib_name);
154 return warned_about;
155}
156
157/* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
158 is true if the function is known to be finite. */
159
160static void
161warn_function_pure (tree decl, bool known_finite)
162{
163 static struct pointer_set_t *warned_about;
164
165 warned_about
166 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
167 known_finite, warned_about, "pure");
168}
169
170/* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
171 is true if the function is known to be finite. */
172
173static void
174warn_function_const (tree decl, bool known_finite)
175{
176 static struct pointer_set_t *warned_about;
177 warned_about
178 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
179 known_finite, warned_about, "const");
180}
43d60d64 181
182void
183warn_function_noreturn (tree decl)
184{
185 static struct pointer_set_t *warned_about;
08c6cbd2 186 if (!lang_hooks.missing_noreturn_ok_p (decl)
187 && targetm.warn_func_return (decl))
43d60d64 188 warned_about
189 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
190 true, warned_about, "noreturn");
191}
9631926a 192
cb886925 193/* Init the function state. */
194
195static void
196finish_state (void)
197{
f1f41a6c 198 funct_state_vec.release ();
cb886925 199}
200
201
1ee39ad1 202/* Return true if we have a function state for NODE. */
203
204static inline bool
205has_function_state (struct cgraph_node *node)
206{
f1f41a6c 207 if (!funct_state_vec.exists ()
208 || funct_state_vec.length () <= (unsigned int)node->uid)
1ee39ad1 209 return false;
f1f41a6c 210 return funct_state_vec[node->uid] != NULL;
1ee39ad1 211}
212
48e1416a 213/* Return the function state from NODE. */
f7d118a9 214
215static inline funct_state
216get_function_state (struct cgraph_node *node)
217{
f1f41a6c 218 if (!funct_state_vec.exists ()
219 || funct_state_vec.length () <= (unsigned int)node->uid
220 || !funct_state_vec[node->uid])
1ee39ad1 221 /* We might want to put correct previously_known state into varying. */
db86e424 222 return &varying_state;
f1f41a6c 223 return funct_state_vec[node->uid];
cb886925 224}
225
226/* Set the function state S for NODE. */
227
228static inline void
229set_function_state (struct cgraph_node *node, funct_state s)
230{
f1f41a6c 231 if (!funct_state_vec.exists ()
232 || funct_state_vec.length () <= (unsigned int)node->uid)
233 funct_state_vec.safe_grow_cleared (node->uid + 1);
234 funct_state_vec[node->uid] = s;
f7d118a9 235}
236
f0b5f617 237/* Check to see if the use (or definition when CHECKING_WRITE is true)
f7d118a9 238 variable T is legal in a function that is either pure or const. */
239
48e1416a 240static inline void
241check_decl (funct_state local,
023a28e1 242 tree t, bool checking_write, bool ipa)
f7d118a9 243{
f7d118a9 244 /* Do not want to do anything with volatile except mark any
245 function that uses one to be not const or pure. */
48e1416a 246 if (TREE_THIS_VOLATILE (t))
247 {
f7d118a9 248 local->pure_const_state = IPA_NEITHER;
b5cebd44 249 if (dump_file)
250 fprintf (dump_file, " Volatile operand is not const/pure");
f7d118a9 251 return;
252 }
253
254 /* Do not care about a local automatic that is not static. */
255 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
256 return;
257
b5cebd44 258 /* If the variable has the "used" attribute, treat it as if it had a
259 been touched by the devil. */
83a23b05 260 if (DECL_PRESERVE_P (t))
b5cebd44 261 {
262 local->pure_const_state = IPA_NEITHER;
263 if (dump_file)
264 fprintf (dump_file, " Used static/global variable is not const/pure\n");
265 return;
266 }
267
023a28e1 268 /* In IPA mode we are not interested in checking actual loads and stores;
269 they will be processed at propagation time using ipa_ref. */
270 if (ipa)
271 return;
272
f7d118a9 273 /* Since we have dealt with the locals and params cases above, if we
274 are CHECKING_WRITE, this cannot be a pure or constant
275 function. */
48e1416a 276 if (checking_write)
bc17fd99 277 {
278 local->pure_const_state = IPA_NEITHER;
b5cebd44 279 if (dump_file)
280 fprintf (dump_file, " static/global memory write is not const/pure\n");
bc17fd99 281 return;
282 }
f7d118a9 283
284 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
285 {
b5cebd44 286 /* Readonly reads are safe. */
287 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
288 return; /* Read of a constant, do not change the function state. */
48e1416a 289 else
f7d118a9 290 {
b5cebd44 291 if (dump_file)
292 fprintf (dump_file, " global memory read is not const\n");
f7d118a9 293 /* Just a regular read. */
294 if (local->pure_const_state == IPA_CONST)
295 local->pure_const_state = IPA_PURE;
296 }
297 }
b5cebd44 298 else
299 {
300 /* Compilation level statics can be read if they are readonly
301 variables. */
302 if (TREE_READONLY (t))
303 return;
304
305 if (dump_file)
306 fprintf (dump_file, " static memory read is not const\n");
307 /* Just a regular read. */
308 if (local->pure_const_state == IPA_CONST)
309 local->pure_const_state = IPA_PURE;
310 }
f7d118a9 311}
312
f7d118a9 313
b5cebd44 314/* Check to see if the use (or definition when CHECKING_WRITE is true)
315 variable T is legal in a function that is either pure or const. */
f7d118a9 316
48e1416a 317static inline void
5ed0b345 318check_op (funct_state local, tree t, bool checking_write)
f7d118a9 319{
3ae61172 320 t = get_base_address (t);
321 if (t && TREE_THIS_VOLATILE (t))
f7d118a9 322 {
5ed0b345 323 local->pure_const_state = IPA_NEITHER;
324 if (dump_file)
325 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
326 return;
327 }
3ae61172 328 else if (t
182cf5a9 329 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
3ae61172 330 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
331 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
332 {
333 if (dump_file)
334 fprintf (dump_file, " Indirect ref to local memory is OK\n");
335 return;
336 }
5ed0b345 337 else if (checking_write)
338 {
339 local->pure_const_state = IPA_NEITHER;
340 if (dump_file)
341 fprintf (dump_file, " Indirect ref write is not const/pure\n");
342 return;
343 }
344 else
345 {
346 if (dump_file)
347 fprintf (dump_file, " Indirect ref read is not const\n");
348 if (local->pure_const_state == IPA_CONST)
349 local->pure_const_state = IPA_PURE;
f7d118a9 350 }
351}
352
023a28e1 353/* compute state based on ECF FLAGS and store to STATE and LOOPING. */
354
355static void
356state_from_flags (enum pure_const_state_e *state, bool *looping,
357 int flags, bool cannot_lead_to_return)
358{
359 *looping = false;
360 if (flags & ECF_LOOPING_CONST_OR_PURE)
361 {
362 *looping = true;
363 if (dump_file && (dump_flags & TDF_DETAILS))
364 fprintf (dump_file, " looping");
365 }
366 if (flags & ECF_CONST)
367 {
368 *state = IPA_CONST;
369 if (dump_file && (dump_flags & TDF_DETAILS))
370 fprintf (dump_file, " const\n");
371 }
372 else if (flags & ECF_PURE)
373 {
374 *state = IPA_PURE;
375 if (dump_file && (dump_flags & TDF_DETAILS))
376 fprintf (dump_file, " pure\n");
377 }
378 else if (cannot_lead_to_return)
379 {
380 *state = IPA_PURE;
381 *looping = true;
382 if (dump_file && (dump_flags & TDF_DETAILS))
383 fprintf (dump_file, " ignoring side effects->pure looping\n");
384 }
385 else
386 {
387 if (dump_file && (dump_flags & TDF_DETAILS))
9631926a 388 fprintf (dump_file, " neither\n");
023a28e1 389 *state = IPA_NEITHER;
390 *looping = true;
391 }
392}
393
394/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
395 into STATE and LOOPING better of the two variants.
396 Be sure to merge looping correctly. IPA_NEITHER functions
397 have looping 0 even if they don't have to return. */
398
399static inline void
400better_state (enum pure_const_state_e *state, bool *looping,
401 enum pure_const_state_e state2, bool looping2)
402{
403 if (state2 < *state)
404 {
405 if (*state == IPA_NEITHER)
406 *looping = looping2;
407 else
408 *looping = MIN (*looping, looping2);
409 }
410 else if (state2 != IPA_NEITHER)
411 *looping = MIN (*looping, looping2);
412}
413
414/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
415 into STATE and LOOPING worse of the two variants. */
416
417static inline void
418worse_state (enum pure_const_state_e *state, bool *looping,
419 enum pure_const_state_e state2, bool looping2)
420{
421 *state = MAX (*state, state2);
422 *looping = MAX (*looping, looping2);
423}
424
0a10fd82 425/* Recognize special cases of builtins that are by themselves not pure or const
7dd42908 426 but function using them is. */
427static bool
a53e7471 428special_builtin_state (enum pure_const_state_e *state, bool *looping,
7dd42908 429 tree callee)
430{
431 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
432 switch (DECL_FUNCTION_CODE (callee))
433 {
434 case BUILT_IN_RETURN:
435 case BUILT_IN_UNREACHABLE:
436 case BUILT_IN_ALLOCA:
581bf1c2 437 case BUILT_IN_ALLOCA_WITH_ALIGN:
7dd42908 438 case BUILT_IN_STACK_SAVE:
439 case BUILT_IN_STACK_RESTORE:
440 case BUILT_IN_EH_POINTER:
441 case BUILT_IN_EH_FILTER:
442 case BUILT_IN_UNWIND_RESUME:
443 case BUILT_IN_CXA_END_CLEANUP:
444 case BUILT_IN_EH_COPY_VALUES:
445 case BUILT_IN_FRAME_ADDRESS:
446 case BUILT_IN_APPLY:
447 case BUILT_IN_APPLY_ARGS:
7dd42908 448 *looping = false;
449 *state = IPA_CONST;
450 return true;
451 case BUILT_IN_PREFETCH:
452 *looping = true;
453 *state = IPA_CONST;
454 return true;
455 }
456 return false;
457}
458
f7d118a9 459/* Check the parameters of a function call to CALL_EXPR to see if
460 there are any references in the parameters that are not allowed for
461 pure or const functions. Also check to see if this is either an
462 indirect call, a call outside the compilation unit, or has special
463 attributes that may also effect the purity. The CALL_EXPR node for
464 the entire call expression. */
465
466static void
b5cebd44 467check_call (funct_state local, gimple call, bool ipa)
f7d118a9 468{
75a70cf9 469 int flags = gimple_call_flags (call);
b5cebd44 470 tree callee_t = gimple_call_fndecl (call);
b5cebd44 471 bool possibly_throws = stmt_could_throw_p (call);
472 bool possibly_throws_externally = (possibly_throws
473 && stmt_can_throw_external (call));
f7d118a9 474
b5cebd44 475 if (possibly_throws)
476 {
477 unsigned int i;
478 for (i = 0; i < gimple_num_ops (call); i++)
479 if (gimple_op (call, i)
480 && tree_could_throw_p (gimple_op (call, i)))
481 {
cbeb677e 482 if (possibly_throws && cfun->can_throw_non_call_exceptions)
b5cebd44 483 {
484 if (dump_file)
485 fprintf (dump_file, " operand can throw; looping\n");
486 local->looping = true;
487 }
488 if (possibly_throws_externally)
489 {
490 if (dump_file)
491 fprintf (dump_file, " operand can throw externally\n");
492 local->can_throw = true;
493 }
494 }
495 }
48e1416a 496
f7d118a9 497 /* The const and pure flags are set by a variety of places in the
498 compiler (including here). If someone has already set the flags
499 for the callee, (such as for some of the builtins) we will use
48e1416a 500 them, otherwise we will compute our own information.
501
f7d118a9 502 Const and pure functions have less clobber effects than other
503 functions so we process these first. Otherwise if it is a call
504 outside the compilation unit or an indirect call we punt. This
505 leaves local calls which will be processed by following the call
48e1416a 506 graph. */
f7d118a9 507 if (callee_t)
508 {
7dd42908 509 enum pure_const_state_e call_state;
510 bool call_looping;
511
a53e7471 512 if (special_builtin_state (&call_state, &call_looping, callee_t))
7dd42908 513 {
514 worse_state (&local->pure_const_state, &local->looping,
515 call_state, call_looping);
516 return;
517 }
f7d118a9 518 /* When bad things happen to bad functions, they cannot be const
519 or pure. */
520 if (setjmp_call_p (callee_t))
9c2a0c05 521 {
b5cebd44 522 if (dump_file)
523 fprintf (dump_file, " setjmp is not const/pure\n");
524 local->looping = true;
9c2a0c05 525 local->pure_const_state = IPA_NEITHER;
9c2a0c05 526 }
f7d118a9 527
528 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
529 switch (DECL_FUNCTION_CODE (callee_t))
530 {
531 case BUILT_IN_LONGJMP:
532 case BUILT_IN_NONLOCAL_GOTO:
b5cebd44 533 if (dump_file)
534 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
f7d118a9 535 local->pure_const_state = IPA_NEITHER;
b5cebd44 536 local->looping = true;
f7d118a9 537 break;
538 default:
539 break;
540 }
541 }
542
b5cebd44 543 /* When not in IPA mode, we can still handle self recursion. */
544 if (!ipa && callee_t == current_function_decl)
2c06958d 545 {
546 if (dump_file)
547 fprintf (dump_file, " Recursive call can loop.\n");
548 local->looping = true;
549 }
0a10fd82 550 /* Either callee is unknown or we are doing local analysis.
ef378c27 551 Look to see if there are any bits available for the callee (such as by
552 declaration or because it is builtin) and process solely on the basis of
553 those bits. */
023a28e1 554 else if (!ipa)
f7d118a9 555 {
023a28e1 556 enum pure_const_state_e call_state;
557 bool call_looping;
cbeb677e 558 if (possibly_throws && cfun->can_throw_non_call_exceptions)
b5cebd44 559 {
560 if (dump_file)
561 fprintf (dump_file, " can throw; looping\n");
562 local->looping = true;
563 }
564 if (possibly_throws_externally)
565 {
566 if (dump_file)
567 {
e38def9c 568 fprintf (dump_file, " can throw externally to lp %i\n",
569 lookup_stmt_eh_lp (call));
b5cebd44 570 if (callee_t)
571 fprintf (dump_file, " callee:%s\n",
572 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
573 }
574 local->can_throw = true;
575 }
023a28e1 576 if (dump_file && (dump_flags & TDF_DETAILS))
577 fprintf (dump_file, " checking flags for call:");
578 state_from_flags (&call_state, &call_looping, flags,
579 ((flags & (ECF_NORETURN | ECF_NOTHROW))
580 == (ECF_NORETURN | ECF_NOTHROW))
581 || (!flag_exceptions && (flags & ECF_NORETURN)));
582 worse_state (&local->pure_const_state, &local->looping,
583 call_state, call_looping);
f7d118a9 584 }
b5cebd44 585 /* Direct functions calls are handled by IPA propagation. */
f7d118a9 586}
587
023a28e1 588/* Wrapper around check_decl for loads in local more. */
5ed0b345 589
590static bool
591check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
592{
593 if (DECL_P (op))
023a28e1 594 check_decl ((funct_state)data, op, false, false);
5ed0b345 595 else
596 check_op ((funct_state)data, op, false);
597 return false;
598}
599
023a28e1 600/* Wrapper around check_decl for stores in local more. */
5ed0b345 601
602static bool
603check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
604{
605 if (DECL_P (op))
023a28e1 606 check_decl ((funct_state)data, op, true, false);
607 else
608 check_op ((funct_state)data, op, true);
609 return false;
610}
611
612/* Wrapper around check_decl for loads in ipa mode. */
613
614static bool
615check_ipa_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
616{
617 if (DECL_P (op))
618 check_decl ((funct_state)data, op, false, true);
619 else
620 check_op ((funct_state)data, op, false);
621 return false;
622}
623
624/* Wrapper around check_decl for stores in ipa mode. */
625
626static bool
627check_ipa_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
628{
629 if (DECL_P (op))
630 check_decl ((funct_state)data, op, true, true);
5ed0b345 631 else
632 check_op ((funct_state)data, op, true);
633 return false;
634}
635
dd277d48 636/* Look into pointer pointed to by GSIP and figure out what interesting side
637 effects it has. */
b5cebd44 638static void
639check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
f7d118a9 640{
b5cebd44 641 gimple stmt = gsi_stmt (*gsip);
f7d118a9 642
9845d120 643 if (is_gimple_debug (stmt))
644 return;
645
b5cebd44 646 if (dump_file)
f7d118a9 647 {
b5cebd44 648 fprintf (dump_file, " scanning: ");
649 print_gimple_stmt (dump_file, stmt, 0, 0);
650 }
dd277d48 651
541c6dbb 652 if (gimple_has_volatile_ops (stmt)
653 && !gimple_clobber_p (stmt))
4b68df2e 654 {
655 local->pure_const_state = IPA_NEITHER;
656 if (dump_file)
657 fprintf (dump_file, " Volatile stmt is not const/pure\n");
658 }
659
5ed0b345 660 /* Look for loads and stores. */
023a28e1 661 walk_stmt_load_store_ops (stmt, local,
662 ipa ? check_ipa_load : check_load,
663 ipa ? check_ipa_store : check_store);
b5cebd44 664
665 if (gimple_code (stmt) != GIMPLE_CALL
666 && stmt_could_throw_p (stmt))
667 {
cbeb677e 668 if (cfun->can_throw_non_call_exceptions)
b5cebd44 669 {
670 if (dump_file)
10f4615f 671 fprintf (dump_file, " can throw; looping\n");
b5cebd44 672 local->looping = true;
673 }
674 if (stmt_can_throw_external (stmt))
675 {
676 if (dump_file)
10f4615f 677 fprintf (dump_file, " can throw externally\n");
b5cebd44 678 local->can_throw = true;
679 }
10f4615f 680 else
681 if (dump_file)
682 fprintf (dump_file, " can throw\n");
75a70cf9 683 }
75a70cf9 684 switch (gimple_code (stmt))
685 {
b5cebd44 686 case GIMPLE_CALL:
b5cebd44 687 check_call (local, stmt, ipa);
f7d118a9 688 break;
75a70cf9 689 case GIMPLE_LABEL:
690 if (DECL_NONLOCAL (gimple_label_label (stmt)))
f7d118a9 691 /* Target of long jump. */
9c2a0c05 692 {
b5cebd44 693 if (dump_file)
10f4615f 694 fprintf (dump_file, " nonlocal label is not const/pure\n");
9c2a0c05 695 local->pure_const_state = IPA_NEITHER;
9c2a0c05 696 }
f7d118a9 697 break;
75a70cf9 698 case GIMPLE_ASM:
97cf41ec 699 if (gimple_asm_clobbers_memory_p (stmt))
b5cebd44 700 {
97cf41ec 701 if (dump_file)
10f4615f 702 fprintf (dump_file, " memory asm clobber is not const/pure\n");
97cf41ec 703 /* Abandon all hope, ye who enter here. */
704 local->pure_const_state = IPA_NEITHER;
b5cebd44 705 }
706 if (gimple_asm_volatile_p (stmt))
707 {
708 if (dump_file)
10f4615f 709 fprintf (dump_file, " volatile is not const/pure\n");
b5cebd44 710 /* Abandon all hope, ye who enter here. */
711 local->pure_const_state = IPA_NEITHER;
712 local->looping = true;
713 }
714 return;
f7d118a9 715 default:
716 break;
717 }
f7d118a9 718}
719
cb886925 720
f7d118a9 721/* This is the main routine for finding the reference patterns for
722 global variables within a function FN. */
723
b5cebd44 724static funct_state
725analyze_function (struct cgraph_node *fn, bool ipa)
f7d118a9 726{
7d0d0ce1 727 tree decl = fn->symbol.decl;
b5cebd44 728 funct_state l;
729 basic_block this_block;
f7d118a9 730
b5cebd44 731 l = XCNEW (struct funct_state_d);
732 l->pure_const_state = IPA_CONST;
df9b545b 733 l->state_previously_known = IPA_NEITHER;
734 l->looping_previously_known = true;
b5cebd44 735 l->looping = false;
736 l->can_throw = false;
91bf9d9a 737 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
7d0d0ce1 738 flags_from_decl_or_type (fn->symbol.decl),
91bf9d9a 739 cgraph_node_cannot_return (fn));
740
15ca8f90 741 if (fn->thunk.thunk_p || fn->symbol.alias)
91bf9d9a 742 {
743 /* Thunk gets propagated through, so nothing interesting happens. */
744 gcc_assert (ipa);
745 return l;
746 }
f7d118a9 747
748 if (dump_file)
749 {
48e1416a 750 fprintf (dump_file, "\n\n local analysis of %s\n ",
b5cebd44 751 cgraph_node_name (fn));
f7d118a9 752 }
48e1416a 753
b5cebd44 754 push_cfun (DECL_STRUCT_FUNCTION (decl));
48e1416a 755
b5cebd44 756 FOR_EACH_BB (this_block)
f7d118a9 757 {
b5cebd44 758 gimple_stmt_iterator gsi;
759 struct walk_stmt_info wi;
f7d118a9 760
b5cebd44 761 memset (&wi, 0, sizeof(wi));
762 for (gsi = gsi_start_bb (this_block);
763 !gsi_end_p (gsi);
764 gsi_next (&gsi))
f7d118a9 765 {
b5cebd44 766 check_stmt (&gsi, l, ipa);
767 if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
768 goto end;
f7d118a9 769 }
770 }
dcccac3e 771
772end:
b5cebd44 773 if (l->pure_const_state != IPA_NEITHER)
774 {
775 /* Const functions cannot have back edges (an
776 indication of possible infinite loop side
777 effect. */
778 if (mark_dfs_back_edges ())
c9263b6a 779 {
b1887855 780 /* Preheaders are needed for SCEV to work.
0a10fd82 781 Simple latches and recorded exits improve chances that loop will
8ff30f9a 782 proved to be finite in testcases such as in loop-15.c
783 and loop-24.c */
784 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
785 | LOOPS_HAVE_SIMPLE_LATCHES
b1887855 786 | LOOPS_HAVE_RECORDED_EXITS);
c9263b6a 787 if (dump_file && (dump_flags & TDF_DETAILS))
788 flow_loops_dump (dump_file, NULL, 0);
789 if (mark_irreducible_loops ())
790 {
791 if (dump_file)
792 fprintf (dump_file, " has irreducible loops\n");
793 l->looping = true;
794 }
48e1416a 795 else
c9263b6a 796 {
797 loop_iterator li;
798 struct loop *loop;
799 scev_initialize ();
800 FOR_EACH_LOOP (li, loop, 0)
801 if (!finite_loop_p (loop))
802 {
803 if (dump_file)
8ff30f9a 804 fprintf (dump_file, " can not prove finiteness of "
805 "loop %i\n", loop->num);
c9263b6a 806 l->looping =true;
83b709f2 807 FOR_EACH_LOOP_BREAK (li);
c9263b6a 808 }
809 scev_finalize ();
810 }
811 loop_optimizer_finalize ();
812 }
b5cebd44 813 }
814
023a28e1 815 if (dump_file && (dump_flags & TDF_DETAILS))
816 fprintf (dump_file, " checking previously known:");
023a28e1 817
818 better_state (&l->pure_const_state, &l->looping,
819 l->state_previously_known,
820 l->looping_previously_known);
b5cebd44 821 if (TREE_NOTHROW (decl))
822 l->can_throw = false;
823
824 pop_cfun ();
dcccac3e 825 if (dump_file)
826 {
b5cebd44 827 if (l->looping)
828 fprintf (dump_file, "Function is locally looping.\n");
829 if (l->can_throw)
830 fprintf (dump_file, "Function is locally throwing.\n");
831 if (l->pure_const_state == IPA_CONST)
832 fprintf (dump_file, "Function is locally const.\n");
833 if (l->pure_const_state == IPA_PURE)
834 fprintf (dump_file, "Function is locally pure.\n");
dcccac3e 835 }
b5cebd44 836 return l;
f7d118a9 837}
838
50828ed8 839/* Called when new function is inserted to callgraph late. */
840static void
841add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
842{
ef378c27 843 if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
86844d6c 844 return;
50828ed8 845 /* There are some shared nodes, in particular the initializers on
846 static declarations. We do not need to scan them more than once
847 since all we would be interested in are the addressof
848 operations. */
849 visited_nodes = pointer_set_create ();
2c06958d 850 if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
851 set_function_state (node, analyze_function (node, true));
50828ed8 852 pointer_set_destroy (visited_nodes);
853 visited_nodes = NULL;
854}
855
86844d6c 856/* Called when new clone is inserted to callgraph late. */
857
858static void
859duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
860 void *data ATTRIBUTE_UNUSED)
861{
1ee39ad1 862 if (has_function_state (src))
86844d6c 863 {
864 funct_state l = XNEW (struct funct_state_d);
1ee39ad1 865 gcc_assert (!has_function_state (dst));
86844d6c 866 memcpy (l, get_function_state (src), sizeof (*l));
867 set_function_state (dst, l);
868 }
869}
870
871/* Called when new clone is inserted to callgraph late. */
872
873static void
874remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
875{
1ee39ad1 876 if (has_function_state (node))
86844d6c 877 {
db86e424 878 funct_state l = get_function_state (node);
879 if (l != &varying_state)
880 free (l);
86844d6c 881 set_function_state (node, NULL);
882 }
883}
884
f7d118a9 885\f
7bfefa9d 886static void
887register_hooks (void)
f7d118a9 888{
7bfefa9d 889 static bool init_p = false;
890
891 if (init_p)
892 return;
893
894 init_p = true;
f7d118a9 895
86844d6c 896 node_removal_hook_holder =
897 cgraph_add_node_removal_hook (&remove_node_data, NULL);
898 node_duplication_hook_holder =
899 cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
50828ed8 900 function_insertion_hook_holder =
901 cgraph_add_function_insertion_hook (&add_new_function, NULL);
7bfefa9d 902}
903
904
905/* Analyze each function in the cgraph to see if it is locally PURE or
906 CONST. */
907
48e1416a 908static void
80880eb6 909pure_const_generate_summary (void)
7bfefa9d 910{
911 struct cgraph_node *node;
912
913 register_hooks ();
914
f7d118a9 915 /* There are some shared nodes, in particular the initializers on
916 static declarations. We do not need to scan them more than once
917 since all we would be interested in are the addressof
918 operations. */
919 visited_nodes = pointer_set_create ();
920
48e1416a 921 /* Process all of the functions.
f7d118a9 922
ef378c27 923 We process AVAIL_OVERWRITABLE functions. We can not use the results
924 by default, but the info can be used at LTO with -fwhole-program or
0a10fd82 925 when function got cloned and the clone is AVAILABLE. */
ef378c27 926
7c455d87 927 FOR_EACH_DEFINED_FUNCTION (node)
ef378c27 928 if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
b5cebd44 929 set_function_state (node, analyze_function (node, true));
f7d118a9 930
931 pointer_set_destroy (visited_nodes);
932 visited_nodes = NULL;
cb886925 933}
934
7bfefa9d 935
936/* Serialize the ipa info for lto. */
937
938static void
eab36a5a 939pure_const_write_summary (void)
7bfefa9d 940{
941 struct cgraph_node *node;
942 struct lto_simple_output_block *ob
943 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
944 unsigned int count = 0;
eab36a5a 945 lto_symtab_encoder_iterator lsei;
946 lto_symtab_encoder_t encoder;
7bfefa9d 947
eab36a5a 948 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
949
950 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
951 lsei_next_function_in_partition (&lsei))
7bfefa9d 952 {
eab36a5a 953 node = lsei_cgraph_node (lsei);
15ca8f90 954 if (node->symbol.definition && has_function_state (node))
7bfefa9d 955 count++;
956 }
48e1416a 957
7f385784 958 streamer_write_uhwi_stream (ob->main_stream, count);
48e1416a 959
7bfefa9d 960 /* Process all of the functions. */
eab36a5a 961 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
962 lsei_next_function_in_partition (&lsei))
7bfefa9d 963 {
eab36a5a 964 node = lsei_cgraph_node (lsei);
15ca8f90 965 if (node->symbol.definition && has_function_state (node))
7bfefa9d 966 {
30baba90 967 struct bitpack_d bp;
7bfefa9d 968 funct_state fs;
969 int node_ref;
70225339 970 lto_symtab_encoder_t encoder;
48e1416a 971
7bfefa9d 972 fs = get_function_state (node);
973
70225339 974 encoder = ob->decl_state->symtab_node_encoder;
975 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node)node);
7f385784 976 streamer_write_uhwi_stream (ob->main_stream, node_ref);
48e1416a 977
7bfefa9d 978 /* Note that flags will need to be read in the opposite
979 order as we are pushing the bitflags into FLAGS. */
30baba90 980 bp = bitpack_create (ob->main_stream);
981 bp_pack_value (&bp, fs->pure_const_state, 2);
982 bp_pack_value (&bp, fs->state_previously_known, 2);
983 bp_pack_value (&bp, fs->looping_previously_known, 1);
984 bp_pack_value (&bp, fs->looping, 1);
985 bp_pack_value (&bp, fs->can_throw, 1);
7f385784 986 streamer_write_bitpack (&bp);
7bfefa9d 987 }
988 }
989
990 lto_destroy_simple_output_block (ob);
991}
992
993
994/* Deserialize the ipa info for lto. */
995
48e1416a 996static void
7bfefa9d 997pure_const_read_summary (void)
998{
999 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1000 struct lto_file_decl_data *file_data;
1001 unsigned int j = 0;
1002
1003 register_hooks ();
1004 while ((file_data = file_data_vec[j++]))
1005 {
1006 const char *data;
1007 size_t len;
1008 struct lto_input_block *ib
48e1416a 1009 = lto_create_simple_input_block (file_data,
1010 LTO_section_ipa_pure_const,
7bfefa9d 1011 &data, &len);
1012 if (ib)
1013 {
1014 unsigned int i;
7f385784 1015 unsigned int count = streamer_read_uhwi (ib);
7bfefa9d 1016
1017 for (i = 0; i < count; i++)
1018 {
1019 unsigned int index;
1020 struct cgraph_node *node;
30baba90 1021 struct bitpack_d bp;
7bfefa9d 1022 funct_state fs;
70225339 1023 lto_symtab_encoder_t encoder;
7bfefa9d 1024
1025 fs = XCNEW (struct funct_state_d);
7f385784 1026 index = streamer_read_uhwi (ib);
70225339 1027 encoder = file_data->symtab_node_encoder;
1028 node = cgraph (lto_symtab_encoder_deref (encoder, index));
7bfefa9d 1029 set_function_state (node, fs);
1030
1031 /* Note that the flags must be read in the opposite
1032 order in which they were written (the bitflags were
1033 pushed into FLAGS). */
7f385784 1034 bp = streamer_read_bitpack (ib);
7bfefa9d 1035 fs->pure_const_state
30baba90 1036 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
7bfefa9d 1037 fs->state_previously_known
30baba90 1038 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1039 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1040 fs->looping = bp_unpack_value (&bp, 1);
1041 fs->can_throw = bp_unpack_value (&bp, 1);
fc94a528 1042 if (dump_file)
1043 {
7d0d0ce1 1044 int flags = flags_from_decl_or_type (node->symbol.decl);
fc94a528 1045 fprintf (dump_file, "Read info for %s/%i ",
1046 cgraph_node_name (node),
15c999e3 1047 node->symbol.order);
fc94a528 1048 if (flags & ECF_CONST)
1049 fprintf (dump_file, " const");
1050 if (flags & ECF_PURE)
1051 fprintf (dump_file, " pure");
1052 if (flags & ECF_NOTHROW)
1053 fprintf (dump_file, " nothrow");
1054 fprintf (dump_file, "\n pure const state: %s\n",
1055 pure_const_names[fs->pure_const_state]);
1056 fprintf (dump_file, " previously known state: %s\n",
1057 pure_const_names[fs->looping_previously_known]);
1058 if (fs->looping)
1059 fprintf (dump_file," function is locally looping\n");
1060 if (fs->looping_previously_known)
1061 fprintf (dump_file," function is previously known looping\n");
1062 if (fs->can_throw)
1063 fprintf (dump_file," function is locally throwing\n");
1064 }
7bfefa9d 1065 }
1066
48e1416a 1067 lto_destroy_simple_input_block (file_data,
1068 LTO_section_ipa_pure_const,
7bfefa9d 1069 ib, data, len);
1070 }
1071 }
1072}
1073
1074
17b28e52 1075static bool
1076ignore_edge (struct cgraph_edge *e)
1077{
1078 return (!e->can_throw_external);
1079}
1080
b2c2e188 1081/* Return true if NODE is self recursive function.
1082 ??? self recursive and indirectly recursive funcions should
a04e8d62 1083 be the same, so this function seems unnecessary. */
a1e88032 1084
1085static bool
1086self_recursive_p (struct cgraph_node *node)
1087{
1088 struct cgraph_edge *e;
1089 for (e = node->callees; e; e = e->next_callee)
b2c2e188 1090 if (cgraph_function_node (e->callee, NULL) == node)
a1e88032 1091 return true;
1092 return false;
1093}
1094
c0240443 1095/* Produce transitive closure over the callgraph and compute pure/const
1096 attributes. */
023a28e1 1097
c0240443 1098static void
1099propagate_pure_const (void)
cb886925 1100{
1101 struct cgraph_node *node;
1102 struct cgraph_node *w;
1103 struct cgraph_node **order =
1104 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1105 int order_pos;
1106 int i;
1107 struct ipa_dfs_info * w_info;
1108
7771d558 1109 order_pos = ipa_reduced_postorder (order, true, false, NULL);
f7d118a9 1110 if (dump_file)
1111 {
1112 dump_cgraph (dump_file);
48669653 1113 ipa_print_order (dump_file, "reduced", order, order_pos);
f7d118a9 1114 }
1115
9d75589a 1116 /* Propagate the local information through the call graph to produce
f7d118a9 1117 the global information. All the nodes within a cycle will have
1118 the same info so we collapse cycles first. Then we can do the
1119 propagation in one pass from the leaves to the roots. */
1120 for (i = 0; i < order_pos; i++ )
1121 {
1122 enum pure_const_state_e pure_const_state = IPA_CONST;
9c2a0c05 1123 bool looping = false;
54157bf1 1124 int count = 0;
f7d118a9 1125 node = order[i];
1126
15ca8f90 1127 if (node->symbol.alias)
8c1fce46 1128 continue;
1129
fc94a528 1130 if (dump_file && (dump_flags & TDF_DETAILS))
1131 fprintf (dump_file, "Starting cycle\n");
1132
f7d118a9 1133 /* Find the worst state for any node in the cycle. */
1134 w = node;
023a28e1 1135 while (w && pure_const_state != IPA_NEITHER)
f7d118a9 1136 {
b5cebd44 1137 struct cgraph_edge *e;
023a28e1 1138 struct cgraph_edge *ie;
1139 int i;
1140 struct ipa_ref *ref;
1141
f7d118a9 1142 funct_state w_l = get_function_state (w);
fc94a528 1143 if (dump_file && (dump_flags & TDF_DETAILS))
1144 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
1145 cgraph_node_name (w),
15c999e3 1146 w->symbol.order,
fc94a528 1147 pure_const_names[w_l->pure_const_state],
1148 w_l->looping);
f7d118a9 1149
023a28e1 1150 /* First merge in function body properties. */
1151 worse_state (&pure_const_state, &looping,
1152 w_l->pure_const_state, w_l->looping);
1153 if (pure_const_state == IPA_NEITHER)
1154 break;
1155
1156 /* For overwritable nodes we can not assume anything. */
ef378c27 1157 if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1158 {
023a28e1 1159 worse_state (&pure_const_state, &looping,
1160 w_l->state_previously_known,
1161 w_l->looping_previously_known);
fc94a528 1162 if (dump_file && (dump_flags & TDF_DETAILS))
1163 {
1164 fprintf (dump_file,
1165 " Overwritable. state %s looping %i\n",
1166 pure_const_names[w_l->state_previously_known],
1167 w_l->looping_previously_known);
1168 }
023a28e1 1169 break;
ef378c27 1170 }
9c2a0c05 1171
b5cebd44 1172 count++;
1173
023a28e1 1174 /* We consider recursive cycles as possibly infinite.
1175 This might be relaxed since infinite recursion leads to stack
1176 overflow. */
b5cebd44 1177 if (count > 1)
1178 looping = true;
48e1416a 1179
023a28e1 1180 /* Now walk the edges and merge in callee properties. */
48e1416a 1181 for (e = w->callees; e; e = e->next_callee)
f7d118a9 1182 {
b2c2e188 1183 enum availability avail;
1184 struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
fc94a528 1185 enum pure_const_state_e edge_state = IPA_CONST;
1186 bool edge_looping = false;
54157bf1 1187
fc94a528 1188 if (dump_file && (dump_flags & TDF_DETAILS))
1189 {
1190 fprintf (dump_file,
1191 " Call to %s/%i",
1192 cgraph_node_name (e->callee),
15c999e3 1193 e->callee->symbol.order);
fc94a528 1194 }
b2c2e188 1195 if (avail > AVAIL_OVERWRITABLE)
f7d118a9 1196 {
b5cebd44 1197 funct_state y_l = get_function_state (y);
fc94a528 1198 if (dump_file && (dump_flags & TDF_DETAILS))
1199 {
1200 fprintf (dump_file,
1201 " state:%s looping:%i\n",
1202 pure_const_names[y_l->pure_const_state],
1203 y_l->looping);
1204 }
4c170941 1205 if (y_l->pure_const_state > IPA_PURE
1ee39ad1 1206 && cgraph_edge_cannot_lead_to_return (e))
fc94a528 1207 {
1208 if (dump_file && (dump_flags & TDF_DETAILS))
023a28e1 1209 fprintf (dump_file,
1210 " Ignoring side effects"
1211 " -> pure, looping\n");
fc94a528 1212 edge_state = IPA_PURE;
1213 edge_looping = true;
1214 }
1215 else
1216 {
1217 edge_state = y_l->pure_const_state;
1218 edge_looping = y_l->looping;
1219 }
f7d118a9 1220 }
a53e7471 1221 else if (special_builtin_state (&edge_state, &edge_looping,
7d0d0ce1 1222 y->symbol.decl))
7dd42908 1223 ;
ef378c27 1224 else
023a28e1 1225 state_from_flags (&edge_state, &edge_looping,
7d0d0ce1 1226 flags_from_decl_or_type (y->symbol.decl),
7dd42908 1227 cgraph_edge_cannot_lead_to_return (e));
023a28e1 1228
1229 /* Merge the results with what we already know. */
1230 better_state (&edge_state, &edge_looping,
1231 w_l->state_previously_known,
1232 w_l->looping_previously_known);
1233 worse_state (&pure_const_state, &looping,
1234 edge_state, edge_looping);
1235 if (pure_const_state == IPA_NEITHER)
1236 break;
1237 }
1238 if (pure_const_state == IPA_NEITHER)
1239 break;
ef378c27 1240
023a28e1 1241 /* Now process the indirect call. */
451e322a 1242 for (ie = w->indirect_calls; ie; ie = ie->next_callee)
023a28e1 1243 {
1244 enum pure_const_state_e edge_state = IPA_CONST;
1245 bool edge_looping = false;
4c170941 1246
023a28e1 1247 if (dump_file && (dump_flags & TDF_DETAILS))
1248 fprintf (dump_file, " Indirect call");
1249 state_from_flags (&edge_state, &edge_looping,
1250 ie->indirect_info->ecf_flags,
1251 cgraph_edge_cannot_lead_to_return (ie));
1252 /* Merge the results with what we already know. */
1253 better_state (&edge_state, &edge_looping,
1254 w_l->state_previously_known,
1255 w_l->looping_previously_known);
1256 worse_state (&pure_const_state, &looping,
1257 edge_state, edge_looping);
fc94a528 1258 if (pure_const_state == IPA_NEITHER)
1259 break;
f7d118a9 1260 }
023a28e1 1261 if (pure_const_state == IPA_NEITHER)
1262 break;
1263
1264 /* And finally all loads and stores. */
7d0d0ce1 1265 for (i = 0; ipa_ref_list_reference_iterate (&w->symbol.ref_list, i, ref); i++)
023a28e1 1266 {
1267 enum pure_const_state_e ref_state = IPA_CONST;
1268 bool ref_looping = false;
1269 switch (ref->use)
1270 {
1271 case IPA_REF_LOAD:
1272 /* readonly reads are safe. */
7d0d0ce1 1273 if (TREE_READONLY (ipa_ref_varpool_node (ref)->symbol.decl))
023a28e1 1274 break;
1275 if (dump_file && (dump_flags & TDF_DETAILS))
1276 fprintf (dump_file, " nonreadonly global var read\n");
1277 ref_state = IPA_PURE;
1278 break;
1279 case IPA_REF_STORE:
1280 if (ipa_ref_cannot_lead_to_return (ref))
1281 break;
1282 ref_state = IPA_NEITHER;
1283 if (dump_file && (dump_flags & TDF_DETAILS))
1284 fprintf (dump_file, " global var write\n");
1285 break;
1286 case IPA_REF_ADDR:
1287 break;
1288 }
1289 better_state (&ref_state, &ref_looping,
1290 w_l->state_previously_known,
1291 w_l->looping_previously_known);
1292 worse_state (&pure_const_state, &looping,
1293 ref_state, ref_looping);
1294 if (pure_const_state == IPA_NEITHER)
1295 break;
1296 }
7d0d0ce1 1297 w_info = (struct ipa_dfs_info *) w->symbol.aux;
f7d118a9 1298 w = w_info->next_cycle;
1299 }
fc94a528 1300 if (dump_file && (dump_flags & TDF_DETAILS))
1301 fprintf (dump_file, "Result %s looping %i\n",
1302 pure_const_names [pure_const_state],
1303 looping);
f7d118a9 1304
1305 /* Copy back the region's pure_const_state which is shared by
1306 all nodes in the region. */
1307 w = node;
1308 while (w)
1309 {
1310 funct_state w_l = get_function_state (w);
b5cebd44 1311 enum pure_const_state_e this_state = pure_const_state;
1312 bool this_looping = looping;
f7d118a9 1313
df9b545b 1314 if (w_l->state_previously_known != IPA_NEITHER
1315 && this_state > w_l->state_previously_known)
4c170941 1316 {
1317 this_state = w_l->state_previously_known;
1318 this_looping |= w_l->looping_previously_known;
1319 }
a1e88032 1320 if (!this_looping && self_recursive_p (w))
1321 this_looping = true;
df9b545b 1322 if (!w_l->looping_previously_known)
1323 this_looping = false;
cb886925 1324
b5cebd44 1325 /* All nodes within a cycle share the same info. */
1326 w_l->pure_const_state = this_state;
1327 w_l->looping = this_looping;
1328
1329 switch (this_state)
1330 {
1331 case IPA_CONST:
7d0d0ce1 1332 if (!TREE_READONLY (w->symbol.decl))
2c06958d 1333 {
7d0d0ce1 1334 warn_function_const (w->symbol.decl, !this_looping);
2c06958d 1335 if (dump_file)
1336 fprintf (dump_file, "Function found to be %sconst: %s\n",
1337 this_looping ? "looping " : "",
1338 cgraph_node_name (w));
1339 }
7f74ac6b 1340 cgraph_set_const_flag (w, true, this_looping);
b5cebd44 1341 break;
48e1416a 1342
b5cebd44 1343 case IPA_PURE:
7d0d0ce1 1344 if (!DECL_PURE_P (w->symbol.decl))
2c06958d 1345 {
7d0d0ce1 1346 warn_function_pure (w->symbol.decl, !this_looping);
2c06958d 1347 if (dump_file)
1348 fprintf (dump_file, "Function found to be %spure: %s\n",
1349 this_looping ? "looping " : "",
1350 cgraph_node_name (w));
1351 }
7f74ac6b 1352 cgraph_set_pure_flag (w, true, this_looping);
b5cebd44 1353 break;
48e1416a 1354
b5cebd44 1355 default:
1356 break;
1357 }
7d0d0ce1 1358 w_info = (struct ipa_dfs_info *) w->symbol.aux;
17b28e52 1359 w = w_info->next_cycle;
1360 }
1361 }
1362
7771d558 1363 ipa_free_postorder_info ();
c0240443 1364 free (order);
1365}
1366
1367/* Produce transitive closure over the callgraph and compute nothrow
1368 attributes. */
1369
1370static void
1371propagate_nothrow (void)
1372{
1373 struct cgraph_node *node;
1374 struct cgraph_node *w;
1375 struct cgraph_node **order =
1376 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1377 int order_pos;
1378 int i;
1379 struct ipa_dfs_info * w_info;
1380
7771d558 1381 order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
17b28e52 1382 if (dump_file)
1383 {
1384 dump_cgraph (dump_file);
7771d558 1385 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
17b28e52 1386 }
c0240443 1387
9d75589a 1388 /* Propagate the local information through the call graph to produce
17b28e52 1389 the global information. All the nodes within a cycle will have
1390 the same info so we collapse cycles first. Then we can do the
1391 propagation in one pass from the leaves to the roots. */
1392 for (i = 0; i < order_pos; i++ )
1393 {
1394 bool can_throw = false;
1395 node = order[i];
1396
15ca8f90 1397 if (node->symbol.alias)
8c1fce46 1398 continue;
1399
17b28e52 1400 /* Find the worst state for any node in the cycle. */
1401 w = node;
1402 while (w)
1403 {
023a28e1 1404 struct cgraph_edge *e, *ie;
17b28e52 1405 funct_state w_l = get_function_state (w);
1406
ef378c27 1407 if (w_l->can_throw
1408 || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
17b28e52 1409 can_throw = true;
1410
1411 if (can_throw)
1412 break;
48e1416a 1413
1414 for (e = w->callees; e; e = e->next_callee)
17b28e52 1415 {
b2c2e188 1416 enum availability avail;
1417 struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
17b28e52 1418
b2c2e188 1419 if (avail > AVAIL_OVERWRITABLE)
17b28e52 1420 {
1421 funct_state y_l = get_function_state (y);
1422
48e1416a 1423 if (can_throw)
17b28e52 1424 break;
7d0d0ce1 1425 if (y_l->can_throw && !TREE_NOTHROW (w->symbol.decl)
17b28e52 1426 && e->can_throw_external)
1427 can_throw = true;
1428 }
7d0d0ce1 1429 else if (e->can_throw_external && !TREE_NOTHROW (y->symbol.decl))
ef378c27 1430 can_throw = true;
17b28e52 1431 }
023a28e1 1432 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1433 if (ie->can_throw_external)
73bd65ab 1434 {
1435 can_throw = true;
1436 break;
1437 }
7d0d0ce1 1438 w_info = (struct ipa_dfs_info *) w->symbol.aux;
17b28e52 1439 w = w_info->next_cycle;
1440 }
1441
1442 /* Copy back the region's pure_const_state which is shared by
1443 all nodes in the region. */
1444 w = node;
1445 while (w)
1446 {
ecfab407 1447 funct_state w_l = get_function_state (w);
7d0d0ce1 1448 if (!can_throw && !TREE_NOTHROW (w->symbol.decl))
b5cebd44 1449 {
16fcf0f4 1450 cgraph_set_nothrow_flag (w, true);
b5cebd44 1451 if (dump_file)
48e1416a 1452 fprintf (dump_file, "Function found to be nothrow: %s\n",
b5cebd44 1453 cgraph_node_name (w));
f7d118a9 1454 }
7d0d0ce1 1455 else if (can_throw && !TREE_NOTHROW (w->symbol.decl))
ecfab407 1456 w_l->can_throw = true;
7d0d0ce1 1457 w_info = (struct ipa_dfs_info *) w->symbol.aux;
f7d118a9 1458 w = w_info->next_cycle;
1459 }
1460 }
1461
7771d558 1462 ipa_free_postorder_info ();
f7d118a9 1463 free (order);
c0240443 1464}
1465
1466
1467/* Produce the global information by preforming a transitive closure
1468 on the local information that was produced by generate_summary. */
1469
1470static unsigned int
1471propagate (void)
1472{
1473 struct cgraph_node *node;
1474
1475 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
1476 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1477 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1478
1479 /* Nothrow makes more function to not lead to return and improve
1480 later analysis. */
1481 propagate_nothrow ();
1482 propagate_pure_const ();
1483
1484 /* Cleanup. */
c6d207aa 1485 FOR_EACH_FUNCTION (node)
c0240443 1486 if (has_function_state (node))
1487 free (get_function_state (node));
f1f41a6c 1488 funct_state_vec.release ();
cb886925 1489 finish_state ();
2a1990e9 1490 return 0;
f7d118a9 1491}
1492
1493static bool
1494gate_pure_const (void)
1495{
86844d6c 1496 return (flag_ipa_pure_const
f7d118a9 1497 /* Don't bother doing anything if the program has errors. */
852f689e 1498 && !seen_error ());
f7d118a9 1499}
1500
cbe8bda8 1501namespace {
1502
1503const pass_data pass_data_ipa_pure_const =
f7d118a9 1504{
cbe8bda8 1505 IPA_PASS, /* type */
1506 "pure-const", /* name */
1507 OPTGROUP_NONE, /* optinfo_flags */
1508 true, /* has_gate */
1509 true, /* has_execute */
1510 TV_IPA_PURE_CONST, /* tv_id */
1511 0, /* properties_required */
1512 0, /* properties_provided */
1513 0, /* properties_destroyed */
1514 0, /* todo_flags_start */
1515 0, /* todo_flags_finish */
f7d118a9 1516};
b5cebd44 1517
cbe8bda8 1518class pass_ipa_pure_const : public ipa_opt_pass_d
1519{
1520public:
1521 pass_ipa_pure_const(gcc::context *ctxt)
1522 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
1523 pure_const_generate_summary, /* generate_summary */
1524 pure_const_write_summary, /* write_summary */
1525 pure_const_read_summary, /* read_summary */
1526 NULL, /* write_optimization_summary */
1527 NULL, /* read_optimization_summary */
1528 NULL, /* stmt_fixup */
1529 0, /* function_transform_todo_flags_start */
1530 NULL, /* function_transform */
1531 NULL) /* variable_transform */
1532 {}
1533
1534 /* opt_pass methods: */
1535 bool gate () { return gate_pure_const (); }
1536 unsigned int execute () { return propagate (); }
1537
1538}; // class pass_ipa_pure_const
1539
1540} // anon namespace
1541
1542ipa_opt_pass_d *
1543make_pass_ipa_pure_const (gcc::context *ctxt)
1544{
1545 return new pass_ipa_pure_const (ctxt);
1546}
1547
2c06958d 1548/* Return true if function should be skipped for local pure const analysis. */
b5cebd44 1549
2c06958d 1550static bool
1551skip_function_for_local_pure_const (struct cgraph_node *node)
b5cebd44 1552{
b5cebd44 1553 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1554 we must not promote functions that are called by already processed functions. */
1555
1556 if (function_called_by_processed_nodes_p ())
1557 {
1558 if (dump_file)
1559 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2c06958d 1560 return true;
b5cebd44 1561 }
16fcf0f4 1562 if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
b5cebd44 1563 {
1564 if (dump_file)
0a10fd82 1565 fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
2c06958d 1566 return true;
b5cebd44 1567 }
2c06958d 1568 return false;
1569}
1570
1571/* Simple local pass for pure const discovery reusing the analysis from
1572 ipa_pure_const. This pass is effective when executed together with
1573 other optimization passes in early optimization pass queue. */
b5cebd44 1574
2c06958d 1575static unsigned int
1576local_pure_const (void)
1577{
1578 bool changed = false;
1579 funct_state l;
1580 bool skip;
1581 struct cgraph_node *node;
1582
fd6a3c41 1583 node = cgraph_get_node (current_function_decl);
2c06958d 1584 skip = skip_function_for_local_pure_const (node);
1585 if (!warn_suggest_attribute_const
1586 && !warn_suggest_attribute_pure
1587 && skip)
1588 return 0;
e0933141 1589
ab3db708 1590 l = analyze_function (node, false);
1591
1592 /* Do NORETURN discovery. */
e0933141 1593 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1594 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
1595 {
43d60d64 1596 warn_function_noreturn (cfun->decl);
e0933141 1597 if (dump_file)
1598 fprintf (dump_file, "Function found to be noreturn: %s\n",
9631926a 1599 current_function_name ());
e0933141 1600
1601 /* Update declaration and reduce profile to executed once. */
1602 TREE_THIS_VOLATILE (current_function_decl) = 1;
1603 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1604 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1605
1606 changed = true;
1607 }
ef378c27 1608
b5cebd44 1609 switch (l->pure_const_state)
1610 {
1611 case IPA_CONST:
1612 if (!TREE_READONLY (current_function_decl))
1613 {
2c06958d 1614 warn_function_const (current_function_decl, !l->looping);
1615 if (!skip)
1616 {
7f74ac6b 1617 cgraph_set_const_flag (node, true, l->looping);
2c06958d 1618 changed = true;
1619 }
b5cebd44 1620 if (dump_file)
1621 fprintf (dump_file, "Function found to be %sconst: %s\n",
1622 l->looping ? "looping " : "",
9631926a 1623 current_function_name ());
b5cebd44 1624 }
1625 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1626 && !l->looping)
1627 {
2c06958d 1628 if (!skip)
1629 {
7f74ac6b 1630 cgraph_set_const_flag (node, true, false);
2c06958d 1631 changed = true;
1632 }
b5cebd44 1633 if (dump_file)
1634 fprintf (dump_file, "Function found to be non-looping: %s\n",
9631926a 1635 current_function_name ());
b5cebd44 1636 }
1637 break;
1638
1639 case IPA_PURE:
2c06958d 1640 if (!DECL_PURE_P (current_function_decl))
b5cebd44 1641 {
2c06958d 1642 if (!skip)
1643 {
7f74ac6b 1644 cgraph_set_pure_flag (node, true, l->looping);
2c06958d 1645 changed = true;
1646 }
1647 warn_function_pure (current_function_decl, !l->looping);
b5cebd44 1648 if (dump_file)
1649 fprintf (dump_file, "Function found to be %spure: %s\n",
1650 l->looping ? "looping " : "",
9631926a 1651 current_function_name ());
b5cebd44 1652 }
1653 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1654 && !l->looping)
1655 {
2c06958d 1656 if (!skip)
1657 {
7f74ac6b 1658 cgraph_set_pure_flag (node, true, false);
2c06958d 1659 changed = true;
1660 }
b5cebd44 1661 if (dump_file)
1662 fprintf (dump_file, "Function found to be non-looping: %s\n",
9631926a 1663 current_function_name ());
b5cebd44 1664 }
1665 break;
1666
1667 default:
1668 break;
1669 }
1670 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1671 {
16fcf0f4 1672 cgraph_set_nothrow_flag (node, true);
b5cebd44 1673 changed = true;
1674 if (dump_file)
1675 fprintf (dump_file, "Function found to be nothrow: %s\n",
9631926a 1676 current_function_name ());
b5cebd44 1677 }
dd045aee 1678 free (l);
b5cebd44 1679 if (changed)
1680 return execute_fixup_cfg ();
1681 else
1682 return 0;
1683}
1684
cbe8bda8 1685namespace {
1686
1687const pass_data pass_data_local_pure_const =
b5cebd44 1688{
cbe8bda8 1689 GIMPLE_PASS, /* type */
1690 "local-pure-const", /* name */
1691 OPTGROUP_NONE, /* optinfo_flags */
1692 true, /* has_gate */
1693 true, /* has_execute */
1694 TV_IPA_PURE_CONST, /* tv_id */
1695 0, /* properties_required */
1696 0, /* properties_provided */
1697 0, /* properties_destroyed */
1698 0, /* todo_flags_start */
1699 0, /* todo_flags_finish */
b5cebd44 1700};
cbe8bda8 1701
1702class pass_local_pure_const : public gimple_opt_pass
1703{
1704public:
1705 pass_local_pure_const(gcc::context *ctxt)
1706 : gimple_opt_pass(pass_data_local_pure_const, ctxt)
1707 {}
1708
1709 /* opt_pass methods: */
1710 opt_pass * clone () { return new pass_local_pure_const (ctxt_); }
1711 bool gate () { return gate_pure_const (); }
1712 unsigned int execute () { return local_pure_const (); }
1713
1714}; // class pass_local_pure_const
1715
1716} // anon namespace
1717
1718gimple_opt_pass *
1719make_pass_local_pure_const (gcc::context *ctxt)
1720{
1721 return new pass_local_pure_const (ctxt);
1722}