]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-pure-const.c
2012-11-01 Sharad Singhai <singhai@google.com>
[thirdparty/gcc.git] / gcc / ipa-pure-const.c
CommitLineData
f7d118a9 1/* Callgraph based analysis of static variables.
7cf0dbf3 2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
f7d118a9 4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
f7d118a9 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
f7d118a9 21
f0b5f617 22/* This file marks functions as being either const (TREE_READONLY) or
23 pure (DECL_PURE_P). It can also set a variant of these that
24 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
f7d118a9 25
26 This must be run after inlining decisions have been made since
27 otherwise, the local sets will not contain information that is
28 consistent with post inlined state. The global sets are not prone
29 to this problem since they are by definition transitive. */
30
31/* The code in this module is called by the ipa pass manager. It
32 should be one of the later passes since it's information is used by
33 the rest of the compilation. */
34
35#include "config.h"
36#include "system.h"
37#include "coretypes.h"
38#include "tm.h"
39#include "tree.h"
40#include "tree-flow.h"
41#include "tree-inline.h"
42#include "tree-pass.h"
43#include "langhooks.h"
44#include "pointer-set.h"
45#include "ggc.h"
46#include "ipa-utils.h"
75a70cf9 47#include "gimple.h"
f7d118a9 48#include "cgraph.h"
f7d118a9 49#include "flags.h"
f7d118a9 50#include "diagnostic.h"
ce084dfc 51#include "gimple-pretty-print.h"
f7d118a9 52#include "langhooks.h"
959fce68 53#include "target.h"
7bfefa9d 54#include "lto-streamer.h"
2541503d 55#include "data-streamer.h"
56#include "tree-streamer.h"
c9263b6a 57#include "cfgloop.h"
58#include "tree-scalar-evolution.h"
2c06958d 59#include "intl.h"
60#include "opts.h"
f7d118a9 61
62static struct pointer_set_t *visited_nodes;
63
64/* Lattice values for const and pure functions. Everything starts out
65 being const, then may drop to pure and then neither depending on
66 what is found. */
67enum pure_const_state_e
68{
69 IPA_CONST,
70 IPA_PURE,
71 IPA_NEITHER
72};
73
fc94a528 74const char *pure_const_names[3] = {"const", "pure", "neither"};
75
cb886925 76/* Holder for the const_state. There is one of these per function
77 decl. */
48e1416a 78struct funct_state_d
f7d118a9 79{
cb886925 80 /* See above. */
f7d118a9 81 enum pure_const_state_e pure_const_state;
b5cebd44 82 /* What user set here; we can be always sure about this. */
48e1416a 83 enum pure_const_state_e state_previously_known;
84 bool looping_previously_known;
cb886925 85
86 /* True if the function could possibly infinite loop. There are a
87 lot of ways that this could be determined. We are pretty
88 conservative here. While it is possible to cse pure and const
89 calls, it is not legal to have dce get rid of the call if there
90 is a possibility that the call could infinite loop since this is
91 a behavioral change. */
9c2a0c05 92 bool looping;
cb886925 93
b5cebd44 94 bool can_throw;
f7d118a9 95};
96
db86e424 97/* State used when we know nothing about function. */
98static struct funct_state_d varying_state
99 = { IPA_NEITHER, IPA_NEITHER, true, true, true };
100
101
f7d118a9 102typedef struct funct_state_d * funct_state;
103
cb886925 104/* The storage of the funct_state is abstracted because there is the
105 possibility that it may be desirable to move this to the cgraph
48e1416a 106 local info. */
cb886925 107
108/* Array, indexed by cgraph node uid, of function states. */
109
86844d6c 110DEF_VEC_P (funct_state);
111DEF_VEC_ALLOC_P (funct_state, heap);
112static VEC (funct_state, heap) *funct_state_vec;
cb886925 113
50828ed8 114/* Holders of ipa cgraph hooks: */
115static struct cgraph_node_hook_list *function_insertion_hook_holder;
86844d6c 116static struct cgraph_2node_hook_list *node_duplication_hook_holder;
117static struct cgraph_node_hook_list *node_removal_hook_holder;
cb886925 118
2c06958d 119/* Try to guess if function body will always be visible to compiler
120 when compiling the call and whether compiler will be able
121 to propagate the information by itself. */
122
123static bool
124function_always_visible_to_compiler_p (tree decl)
125{
126 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
127}
128
129/* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
130 is true if the function is known to be finite. The diagnostic is
131 controlled by OPTION. WARNED_ABOUT is a pointer_set unique for
132 OPTION, this function may initialize it and it is always returned
133 by the function. */
134
135static struct pointer_set_t *
136suggest_attribute (int option, tree decl, bool known_finite,
137 struct pointer_set_t *warned_about,
138 const char * attrib_name)
139{
2c5d2e39 140 if (!option_enabled (option, &global_options))
2c06958d 141 return warned_about;
142 if (TREE_THIS_VOLATILE (decl)
143 || (known_finite && function_always_visible_to_compiler_p (decl)))
144 return warned_about;
145
146 if (!warned_about)
147 warned_about = pointer_set_create ();
148 if (pointer_set_contains (warned_about, decl))
149 return warned_about;
150 pointer_set_insert (warned_about, decl);
151 warning_at (DECL_SOURCE_LOCATION (decl),
152 option,
153 known_finite
154 ? _("function might be candidate for attribute %<%s%>")
155 : _("function might be candidate for attribute %<%s%>"
156 " if it is known to return normally"), attrib_name);
157 return warned_about;
158}
159
160/* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
161 is true if the function is known to be finite. */
162
163static void
164warn_function_pure (tree decl, bool known_finite)
165{
166 static struct pointer_set_t *warned_about;
167
168 warned_about
169 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
170 known_finite, warned_about, "pure");
171}
172
173/* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
174 is true if the function is known to be finite. */
175
176static void
177warn_function_const (tree decl, bool known_finite)
178{
179 static struct pointer_set_t *warned_about;
180 warned_about
181 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
182 known_finite, warned_about, "const");
183}
43d60d64 184
185void
186warn_function_noreturn (tree decl)
187{
188 static struct pointer_set_t *warned_about;
08c6cbd2 189 if (!lang_hooks.missing_noreturn_ok_p (decl)
190 && targetm.warn_func_return (decl))
43d60d64 191 warned_about
192 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
193 true, warned_about, "noreturn");
194}
9631926a 195
cb886925 196/* Init the function state. */
197
198static void
199finish_state (void)
200{
201 free (funct_state_vec);
202}
203
204
1ee39ad1 205/* Return true if we have a function state for NODE. */
206
207static inline bool
208has_function_state (struct cgraph_node *node)
209{
210 if (!funct_state_vec
211 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
212 return false;
213 return VEC_index (funct_state, funct_state_vec, node->uid) != NULL;
214}
215
48e1416a 216/* Return the function state from NODE. */
f7d118a9 217
218static inline funct_state
219get_function_state (struct cgraph_node *node)
220{
86844d6c 221 if (!funct_state_vec
db86e424 222 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid
223 || !VEC_index (funct_state, funct_state_vec, node->uid))
1ee39ad1 224 /* We might want to put correct previously_known state into varying. */
db86e424 225 return &varying_state;
226 return VEC_index (funct_state, funct_state_vec, node->uid);
cb886925 227}
228
229/* Set the function state S for NODE. */
230
231static inline void
232set_function_state (struct cgraph_node *node, funct_state s)
233{
86844d6c 234 if (!funct_state_vec
235 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
236 VEC_safe_grow_cleared (funct_state, heap, funct_state_vec, node->uid + 1);
237 VEC_replace (funct_state, funct_state_vec, node->uid, s);
f7d118a9 238}
239
f0b5f617 240/* Check to see if the use (or definition when CHECKING_WRITE is true)
f7d118a9 241 variable T is legal in a function that is either pure or const. */
242
48e1416a 243static inline void
244check_decl (funct_state local,
023a28e1 245 tree t, bool checking_write, bool ipa)
f7d118a9 246{
f7d118a9 247 /* Do not want to do anything with volatile except mark any
248 function that uses one to be not const or pure. */
48e1416a 249 if (TREE_THIS_VOLATILE (t))
250 {
f7d118a9 251 local->pure_const_state = IPA_NEITHER;
b5cebd44 252 if (dump_file)
253 fprintf (dump_file, " Volatile operand is not const/pure");
f7d118a9 254 return;
255 }
256
257 /* Do not care about a local automatic that is not static. */
258 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
259 return;
260
b5cebd44 261 /* If the variable has the "used" attribute, treat it as if it had a
262 been touched by the devil. */
83a23b05 263 if (DECL_PRESERVE_P (t))
b5cebd44 264 {
265 local->pure_const_state = IPA_NEITHER;
266 if (dump_file)
267 fprintf (dump_file, " Used static/global variable is not const/pure\n");
268 return;
269 }
270
023a28e1 271 /* In IPA mode we are not interested in checking actual loads and stores;
272 they will be processed at propagation time using ipa_ref. */
273 if (ipa)
274 return;
275
f7d118a9 276 /* Since we have dealt with the locals and params cases above, if we
277 are CHECKING_WRITE, this cannot be a pure or constant
278 function. */
48e1416a 279 if (checking_write)
bc17fd99 280 {
281 local->pure_const_state = IPA_NEITHER;
b5cebd44 282 if (dump_file)
283 fprintf (dump_file, " static/global memory write is not const/pure\n");
bc17fd99 284 return;
285 }
f7d118a9 286
287 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
288 {
b5cebd44 289 /* Readonly reads are safe. */
290 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
291 return; /* Read of a constant, do not change the function state. */
48e1416a 292 else
f7d118a9 293 {
b5cebd44 294 if (dump_file)
295 fprintf (dump_file, " global memory read is not const\n");
f7d118a9 296 /* Just a regular read. */
297 if (local->pure_const_state == IPA_CONST)
298 local->pure_const_state = IPA_PURE;
299 }
300 }
b5cebd44 301 else
302 {
303 /* Compilation level statics can be read if they are readonly
304 variables. */
305 if (TREE_READONLY (t))
306 return;
307
308 if (dump_file)
309 fprintf (dump_file, " static memory read is not const\n");
310 /* Just a regular read. */
311 if (local->pure_const_state == IPA_CONST)
312 local->pure_const_state = IPA_PURE;
313 }
f7d118a9 314}
315
f7d118a9 316
b5cebd44 317/* Check to see if the use (or definition when CHECKING_WRITE is true)
318 variable T is legal in a function that is either pure or const. */
f7d118a9 319
48e1416a 320static inline void
5ed0b345 321check_op (funct_state local, tree t, bool checking_write)
f7d118a9 322{
3ae61172 323 t = get_base_address (t);
324 if (t && TREE_THIS_VOLATILE (t))
f7d118a9 325 {
5ed0b345 326 local->pure_const_state = IPA_NEITHER;
327 if (dump_file)
328 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
329 return;
330 }
3ae61172 331 else if (t
182cf5a9 332 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
3ae61172 333 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
334 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
335 {
336 if (dump_file)
337 fprintf (dump_file, " Indirect ref to local memory is OK\n");
338 return;
339 }
5ed0b345 340 else if (checking_write)
341 {
342 local->pure_const_state = IPA_NEITHER;
343 if (dump_file)
344 fprintf (dump_file, " Indirect ref write is not const/pure\n");
345 return;
346 }
347 else
348 {
349 if (dump_file)
350 fprintf (dump_file, " Indirect ref read is not const\n");
351 if (local->pure_const_state == IPA_CONST)
352 local->pure_const_state = IPA_PURE;
f7d118a9 353 }
354}
355
023a28e1 356/* compute state based on ECF FLAGS and store to STATE and LOOPING. */
357
358static void
359state_from_flags (enum pure_const_state_e *state, bool *looping,
360 int flags, bool cannot_lead_to_return)
361{
362 *looping = false;
363 if (flags & ECF_LOOPING_CONST_OR_PURE)
364 {
365 *looping = true;
366 if (dump_file && (dump_flags & TDF_DETAILS))
367 fprintf (dump_file, " looping");
368 }
369 if (flags & ECF_CONST)
370 {
371 *state = IPA_CONST;
372 if (dump_file && (dump_flags & TDF_DETAILS))
373 fprintf (dump_file, " const\n");
374 }
375 else if (flags & ECF_PURE)
376 {
377 *state = IPA_PURE;
378 if (dump_file && (dump_flags & TDF_DETAILS))
379 fprintf (dump_file, " pure\n");
380 }
381 else if (cannot_lead_to_return)
382 {
383 *state = IPA_PURE;
384 *looping = true;
385 if (dump_file && (dump_flags & TDF_DETAILS))
386 fprintf (dump_file, " ignoring side effects->pure looping\n");
387 }
388 else
389 {
390 if (dump_file && (dump_flags & TDF_DETAILS))
9631926a 391 fprintf (dump_file, " neither\n");
023a28e1 392 *state = IPA_NEITHER;
393 *looping = true;
394 }
395}
396
397/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
398 into STATE and LOOPING better of the two variants.
399 Be sure to merge looping correctly. IPA_NEITHER functions
400 have looping 0 even if they don't have to return. */
401
402static inline void
403better_state (enum pure_const_state_e *state, bool *looping,
404 enum pure_const_state_e state2, bool looping2)
405{
406 if (state2 < *state)
407 {
408 if (*state == IPA_NEITHER)
409 *looping = looping2;
410 else
411 *looping = MIN (*looping, looping2);
412 }
413 else if (state2 != IPA_NEITHER)
414 *looping = MIN (*looping, looping2);
415}
416
417/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
418 into STATE and LOOPING worse of the two variants. */
419
420static inline void
421worse_state (enum pure_const_state_e *state, bool *looping,
422 enum pure_const_state_e state2, bool looping2)
423{
424 *state = MAX (*state, state2);
425 *looping = MAX (*looping, looping2);
426}
427
0a10fd82 428/* Recognize special cases of builtins that are by themselves not pure or const
7dd42908 429 but function using them is. */
430static bool
a53e7471 431special_builtin_state (enum pure_const_state_e *state, bool *looping,
7dd42908 432 tree callee)
433{
434 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
435 switch (DECL_FUNCTION_CODE (callee))
436 {
437 case BUILT_IN_RETURN:
438 case BUILT_IN_UNREACHABLE:
439 case BUILT_IN_ALLOCA:
581bf1c2 440 case BUILT_IN_ALLOCA_WITH_ALIGN:
7dd42908 441 case BUILT_IN_STACK_SAVE:
442 case BUILT_IN_STACK_RESTORE:
443 case BUILT_IN_EH_POINTER:
444 case BUILT_IN_EH_FILTER:
445 case BUILT_IN_UNWIND_RESUME:
446 case BUILT_IN_CXA_END_CLEANUP:
447 case BUILT_IN_EH_COPY_VALUES:
448 case BUILT_IN_FRAME_ADDRESS:
449 case BUILT_IN_APPLY:
450 case BUILT_IN_APPLY_ARGS:
7dd42908 451 *looping = false;
452 *state = IPA_CONST;
453 return true;
454 case BUILT_IN_PREFETCH:
455 *looping = true;
456 *state = IPA_CONST;
457 return true;
458 }
459 return false;
460}
461
f7d118a9 462/* Check the parameters of a function call to CALL_EXPR to see if
463 there are any references in the parameters that are not allowed for
464 pure or const functions. Also check to see if this is either an
465 indirect call, a call outside the compilation unit, or has special
466 attributes that may also effect the purity. The CALL_EXPR node for
467 the entire call expression. */
468
469static void
b5cebd44 470check_call (funct_state local, gimple call, bool ipa)
f7d118a9 471{
75a70cf9 472 int flags = gimple_call_flags (call);
b5cebd44 473 tree callee_t = gimple_call_fndecl (call);
b5cebd44 474 bool possibly_throws = stmt_could_throw_p (call);
475 bool possibly_throws_externally = (possibly_throws
476 && stmt_can_throw_external (call));
f7d118a9 477
b5cebd44 478 if (possibly_throws)
479 {
480 unsigned int i;
481 for (i = 0; i < gimple_num_ops (call); i++)
482 if (gimple_op (call, i)
483 && tree_could_throw_p (gimple_op (call, i)))
484 {
cbeb677e 485 if (possibly_throws && cfun->can_throw_non_call_exceptions)
b5cebd44 486 {
487 if (dump_file)
488 fprintf (dump_file, " operand can throw; looping\n");
489 local->looping = true;
490 }
491 if (possibly_throws_externally)
492 {
493 if (dump_file)
494 fprintf (dump_file, " operand can throw externally\n");
495 local->can_throw = true;
496 }
497 }
498 }
48e1416a 499
f7d118a9 500 /* The const and pure flags are set by a variety of places in the
501 compiler (including here). If someone has already set the flags
502 for the callee, (such as for some of the builtins) we will use
48e1416a 503 them, otherwise we will compute our own information.
504
f7d118a9 505 Const and pure functions have less clobber effects than other
506 functions so we process these first. Otherwise if it is a call
507 outside the compilation unit or an indirect call we punt. This
508 leaves local calls which will be processed by following the call
48e1416a 509 graph. */
f7d118a9 510 if (callee_t)
511 {
7dd42908 512 enum pure_const_state_e call_state;
513 bool call_looping;
514
a53e7471 515 if (special_builtin_state (&call_state, &call_looping, callee_t))
7dd42908 516 {
517 worse_state (&local->pure_const_state, &local->looping,
518 call_state, call_looping);
519 return;
520 }
f7d118a9 521 /* When bad things happen to bad functions, they cannot be const
522 or pure. */
523 if (setjmp_call_p (callee_t))
9c2a0c05 524 {
b5cebd44 525 if (dump_file)
526 fprintf (dump_file, " setjmp is not const/pure\n");
527 local->looping = true;
9c2a0c05 528 local->pure_const_state = IPA_NEITHER;
9c2a0c05 529 }
f7d118a9 530
531 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
532 switch (DECL_FUNCTION_CODE (callee_t))
533 {
534 case BUILT_IN_LONGJMP:
535 case BUILT_IN_NONLOCAL_GOTO:
b5cebd44 536 if (dump_file)
537 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
f7d118a9 538 local->pure_const_state = IPA_NEITHER;
b5cebd44 539 local->looping = true;
f7d118a9 540 break;
541 default:
542 break;
543 }
544 }
545
b5cebd44 546 /* When not in IPA mode, we can still handle self recursion. */
547 if (!ipa && callee_t == current_function_decl)
2c06958d 548 {
549 if (dump_file)
550 fprintf (dump_file, " Recursive call can loop.\n");
551 local->looping = true;
552 }
0a10fd82 553 /* Either callee is unknown or we are doing local analysis.
ef378c27 554 Look to see if there are any bits available for the callee (such as by
555 declaration or because it is builtin) and process solely on the basis of
556 those bits. */
023a28e1 557 else if (!ipa)
f7d118a9 558 {
023a28e1 559 enum pure_const_state_e call_state;
560 bool call_looping;
cbeb677e 561 if (possibly_throws && cfun->can_throw_non_call_exceptions)
b5cebd44 562 {
563 if (dump_file)
564 fprintf (dump_file, " can throw; looping\n");
565 local->looping = true;
566 }
567 if (possibly_throws_externally)
568 {
569 if (dump_file)
570 {
e38def9c 571 fprintf (dump_file, " can throw externally to lp %i\n",
572 lookup_stmt_eh_lp (call));
b5cebd44 573 if (callee_t)
574 fprintf (dump_file, " callee:%s\n",
575 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
576 }
577 local->can_throw = true;
578 }
023a28e1 579 if (dump_file && (dump_flags & TDF_DETAILS))
580 fprintf (dump_file, " checking flags for call:");
581 state_from_flags (&call_state, &call_looping, flags,
582 ((flags & (ECF_NORETURN | ECF_NOTHROW))
583 == (ECF_NORETURN | ECF_NOTHROW))
584 || (!flag_exceptions && (flags & ECF_NORETURN)));
585 worse_state (&local->pure_const_state, &local->looping,
586 call_state, call_looping);
f7d118a9 587 }
b5cebd44 588 /* Direct functions calls are handled by IPA propagation. */
f7d118a9 589}
590
023a28e1 591/* Wrapper around check_decl for loads in local more. */
5ed0b345 592
593static bool
594check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
595{
596 if (DECL_P (op))
023a28e1 597 check_decl ((funct_state)data, op, false, false);
5ed0b345 598 else
599 check_op ((funct_state)data, op, false);
600 return false;
601}
602
023a28e1 603/* Wrapper around check_decl for stores in local more. */
5ed0b345 604
605static bool
606check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
607{
608 if (DECL_P (op))
023a28e1 609 check_decl ((funct_state)data, op, true, false);
610 else
611 check_op ((funct_state)data, op, true);
612 return false;
613}
614
615/* Wrapper around check_decl for loads in ipa mode. */
616
617static bool
618check_ipa_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
619{
620 if (DECL_P (op))
621 check_decl ((funct_state)data, op, false, true);
622 else
623 check_op ((funct_state)data, op, false);
624 return false;
625}
626
627/* Wrapper around check_decl for stores in ipa mode. */
628
629static bool
630check_ipa_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
631{
632 if (DECL_P (op))
633 check_decl ((funct_state)data, op, true, true);
5ed0b345 634 else
635 check_op ((funct_state)data, op, true);
636 return false;
637}
638
dd277d48 639/* Look into pointer pointed to by GSIP and figure out what interesting side
640 effects it has. */
b5cebd44 641static void
642check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
f7d118a9 643{
b5cebd44 644 gimple stmt = gsi_stmt (*gsip);
f7d118a9 645
9845d120 646 if (is_gimple_debug (stmt))
647 return;
648
b5cebd44 649 if (dump_file)
f7d118a9 650 {
b5cebd44 651 fprintf (dump_file, " scanning: ");
652 print_gimple_stmt (dump_file, stmt, 0, 0);
653 }
dd277d48 654
541c6dbb 655 if (gimple_has_volatile_ops (stmt)
656 && !gimple_clobber_p (stmt))
4b68df2e 657 {
658 local->pure_const_state = IPA_NEITHER;
659 if (dump_file)
660 fprintf (dump_file, " Volatile stmt is not const/pure\n");
661 }
662
5ed0b345 663 /* Look for loads and stores. */
023a28e1 664 walk_stmt_load_store_ops (stmt, local,
665 ipa ? check_ipa_load : check_load,
666 ipa ? check_ipa_store : check_store);
b5cebd44 667
668 if (gimple_code (stmt) != GIMPLE_CALL
669 && stmt_could_throw_p (stmt))
670 {
cbeb677e 671 if (cfun->can_throw_non_call_exceptions)
b5cebd44 672 {
673 if (dump_file)
674 fprintf (dump_file, " can throw; looping");
675 local->looping = true;
676 }
677 if (stmt_can_throw_external (stmt))
678 {
679 if (dump_file)
680 fprintf (dump_file, " can throw externally");
681 local->can_throw = true;
682 }
75a70cf9 683 }
75a70cf9 684 switch (gimple_code (stmt))
685 {
b5cebd44 686 case GIMPLE_CALL:
b5cebd44 687 check_call (local, stmt, ipa);
f7d118a9 688 break;
75a70cf9 689 case GIMPLE_LABEL:
690 if (DECL_NONLOCAL (gimple_label_label (stmt)))
f7d118a9 691 /* Target of long jump. */
9c2a0c05 692 {
b5cebd44 693 if (dump_file)
694 fprintf (dump_file, " nonlocal label is not const/pure");
9c2a0c05 695 local->pure_const_state = IPA_NEITHER;
9c2a0c05 696 }
f7d118a9 697 break;
75a70cf9 698 case GIMPLE_ASM:
97cf41ec 699 if (gimple_asm_clobbers_memory_p (stmt))
b5cebd44 700 {
97cf41ec 701 if (dump_file)
702 fprintf (dump_file, " memory asm clobber is not const/pure");
703 /* Abandon all hope, ye who enter here. */
704 local->pure_const_state = IPA_NEITHER;
b5cebd44 705 }
706 if (gimple_asm_volatile_p (stmt))
707 {
708 if (dump_file)
709 fprintf (dump_file, " volatile is not const/pure");
710 /* Abandon all hope, ye who enter here. */
711 local->pure_const_state = IPA_NEITHER;
712 local->looping = true;
713 }
714 return;
f7d118a9 715 default:
716 break;
717 }
f7d118a9 718}
719
cb886925 720
f7d118a9 721/* This is the main routine for finding the reference patterns for
722 global variables within a function FN. */
723
b5cebd44 724static funct_state
725analyze_function (struct cgraph_node *fn, bool ipa)
f7d118a9 726{
7d0d0ce1 727 tree decl = fn->symbol.decl;
b5cebd44 728 funct_state l;
729 basic_block this_block;
f7d118a9 730
b5cebd44 731 l = XCNEW (struct funct_state_d);
732 l->pure_const_state = IPA_CONST;
df9b545b 733 l->state_previously_known = IPA_NEITHER;
734 l->looping_previously_known = true;
b5cebd44 735 l->looping = false;
736 l->can_throw = false;
91bf9d9a 737 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
7d0d0ce1 738 flags_from_decl_or_type (fn->symbol.decl),
91bf9d9a 739 cgraph_node_cannot_return (fn));
740
b2c2e188 741 if (fn->thunk.thunk_p || fn->alias)
91bf9d9a 742 {
743 /* Thunk gets propagated through, so nothing interesting happens. */
744 gcc_assert (ipa);
745 return l;
746 }
f7d118a9 747
748 if (dump_file)
749 {
48e1416a 750 fprintf (dump_file, "\n\n local analysis of %s\n ",
b5cebd44 751 cgraph_node_name (fn));
f7d118a9 752 }
48e1416a 753
b5cebd44 754 push_cfun (DECL_STRUCT_FUNCTION (decl));
48e1416a 755
b5cebd44 756 FOR_EACH_BB (this_block)
f7d118a9 757 {
b5cebd44 758 gimple_stmt_iterator gsi;
759 struct walk_stmt_info wi;
f7d118a9 760
b5cebd44 761 memset (&wi, 0, sizeof(wi));
762 for (gsi = gsi_start_bb (this_block);
763 !gsi_end_p (gsi);
764 gsi_next (&gsi))
f7d118a9 765 {
b5cebd44 766 check_stmt (&gsi, l, ipa);
767 if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
768 goto end;
f7d118a9 769 }
770 }
dcccac3e 771
772end:
b5cebd44 773 if (l->pure_const_state != IPA_NEITHER)
774 {
775 /* Const functions cannot have back edges (an
776 indication of possible infinite loop side
777 effect. */
778 if (mark_dfs_back_edges ())
c9263b6a 779 {
b1887855 780 /* Preheaders are needed for SCEV to work.
0a10fd82 781 Simple latches and recorded exits improve chances that loop will
b1887855 782 proved to be finite in testcases such as in loop-15.c and loop-24.c */
783 loop_optimizer_init (LOOPS_NORMAL
784 | LOOPS_HAVE_RECORDED_EXITS);
c9263b6a 785 if (dump_file && (dump_flags & TDF_DETAILS))
786 flow_loops_dump (dump_file, NULL, 0);
787 if (mark_irreducible_loops ())
788 {
789 if (dump_file)
790 fprintf (dump_file, " has irreducible loops\n");
791 l->looping = true;
792 }
48e1416a 793 else
c9263b6a 794 {
795 loop_iterator li;
796 struct loop *loop;
797 scev_initialize ();
798 FOR_EACH_LOOP (li, loop, 0)
799 if (!finite_loop_p (loop))
800 {
801 if (dump_file)
802 fprintf (dump_file, " can not prove finiteness of loop %i\n", loop->num);
803 l->looping =true;
83b709f2 804 FOR_EACH_LOOP_BREAK (li);
c9263b6a 805 }
806 scev_finalize ();
807 }
808 loop_optimizer_finalize ();
809 }
b5cebd44 810 }
811
023a28e1 812 if (dump_file && (dump_flags & TDF_DETAILS))
813 fprintf (dump_file, " checking previously known:");
023a28e1 814
815 better_state (&l->pure_const_state, &l->looping,
816 l->state_previously_known,
817 l->looping_previously_known);
b5cebd44 818 if (TREE_NOTHROW (decl))
819 l->can_throw = false;
820
821 pop_cfun ();
dcccac3e 822 if (dump_file)
823 {
b5cebd44 824 if (l->looping)
825 fprintf (dump_file, "Function is locally looping.\n");
826 if (l->can_throw)
827 fprintf (dump_file, "Function is locally throwing.\n");
828 if (l->pure_const_state == IPA_CONST)
829 fprintf (dump_file, "Function is locally const.\n");
830 if (l->pure_const_state == IPA_PURE)
831 fprintf (dump_file, "Function is locally pure.\n");
dcccac3e 832 }
b5cebd44 833 return l;
f7d118a9 834}
835
50828ed8 836/* Called when new function is inserted to callgraph late. */
837static void
838add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
839{
ef378c27 840 if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
86844d6c 841 return;
50828ed8 842 /* There are some shared nodes, in particular the initializers on
843 static declarations. We do not need to scan them more than once
844 since all we would be interested in are the addressof
845 operations. */
846 visited_nodes = pointer_set_create ();
2c06958d 847 if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
848 set_function_state (node, analyze_function (node, true));
50828ed8 849 pointer_set_destroy (visited_nodes);
850 visited_nodes = NULL;
851}
852
86844d6c 853/* Called when new clone is inserted to callgraph late. */
854
855static void
856duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
857 void *data ATTRIBUTE_UNUSED)
858{
1ee39ad1 859 if (has_function_state (src))
86844d6c 860 {
861 funct_state l = XNEW (struct funct_state_d);
1ee39ad1 862 gcc_assert (!has_function_state (dst));
86844d6c 863 memcpy (l, get_function_state (src), sizeof (*l));
864 set_function_state (dst, l);
865 }
866}
867
868/* Called when new clone is inserted to callgraph late. */
869
870static void
871remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
872{
1ee39ad1 873 if (has_function_state (node))
86844d6c 874 {
db86e424 875 funct_state l = get_function_state (node);
876 if (l != &varying_state)
877 free (l);
86844d6c 878 set_function_state (node, NULL);
879 }
880}
881
f7d118a9 882\f
7bfefa9d 883static void
884register_hooks (void)
f7d118a9 885{
7bfefa9d 886 static bool init_p = false;
887
888 if (init_p)
889 return;
890
891 init_p = true;
f7d118a9 892
86844d6c 893 node_removal_hook_holder =
894 cgraph_add_node_removal_hook (&remove_node_data, NULL);
895 node_duplication_hook_holder =
896 cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
50828ed8 897 function_insertion_hook_holder =
898 cgraph_add_function_insertion_hook (&add_new_function, NULL);
7bfefa9d 899}
900
901
902/* Analyze each function in the cgraph to see if it is locally PURE or
903 CONST. */
904
48e1416a 905static void
7bfefa9d 906generate_summary (void)
907{
908 struct cgraph_node *node;
909
910 register_hooks ();
911
f7d118a9 912 /* There are some shared nodes, in particular the initializers on
913 static declarations. We do not need to scan them more than once
914 since all we would be interested in are the addressof
915 operations. */
916 visited_nodes = pointer_set_create ();
917
48e1416a 918 /* Process all of the functions.
f7d118a9 919
ef378c27 920 We process AVAIL_OVERWRITABLE functions. We can not use the results
921 by default, but the info can be used at LTO with -fwhole-program or
0a10fd82 922 when function got cloned and the clone is AVAILABLE. */
ef378c27 923
7c455d87 924 FOR_EACH_DEFINED_FUNCTION (node)
ef378c27 925 if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
b5cebd44 926 set_function_state (node, analyze_function (node, true));
f7d118a9 927
928 pointer_set_destroy (visited_nodes);
929 visited_nodes = NULL;
cb886925 930}
931
7bfefa9d 932
933/* Serialize the ipa info for lto. */
934
935static void
eab36a5a 936pure_const_write_summary (void)
7bfefa9d 937{
938 struct cgraph_node *node;
939 struct lto_simple_output_block *ob
940 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
941 unsigned int count = 0;
eab36a5a 942 lto_symtab_encoder_iterator lsei;
943 lto_symtab_encoder_t encoder;
7bfefa9d 944
eab36a5a 945 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
946
947 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
948 lsei_next_function_in_partition (&lsei))
7bfefa9d 949 {
eab36a5a 950 node = lsei_cgraph_node (lsei);
1ee39ad1 951 if (node->analyzed && has_function_state (node))
7bfefa9d 952 count++;
953 }
48e1416a 954
7f385784 955 streamer_write_uhwi_stream (ob->main_stream, count);
48e1416a 956
7bfefa9d 957 /* Process all of the functions. */
eab36a5a 958 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
959 lsei_next_function_in_partition (&lsei))
7bfefa9d 960 {
eab36a5a 961 node = lsei_cgraph_node (lsei);
1ee39ad1 962 if (node->analyzed && has_function_state (node))
7bfefa9d 963 {
30baba90 964 struct bitpack_d bp;
7bfefa9d 965 funct_state fs;
966 int node_ref;
70225339 967 lto_symtab_encoder_t encoder;
48e1416a 968
7bfefa9d 969 fs = get_function_state (node);
970
70225339 971 encoder = ob->decl_state->symtab_node_encoder;
972 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node)node);
7f385784 973 streamer_write_uhwi_stream (ob->main_stream, node_ref);
48e1416a 974
7bfefa9d 975 /* Note that flags will need to be read in the opposite
976 order as we are pushing the bitflags into FLAGS. */
30baba90 977 bp = bitpack_create (ob->main_stream);
978 bp_pack_value (&bp, fs->pure_const_state, 2);
979 bp_pack_value (&bp, fs->state_previously_known, 2);
980 bp_pack_value (&bp, fs->looping_previously_known, 1);
981 bp_pack_value (&bp, fs->looping, 1);
982 bp_pack_value (&bp, fs->can_throw, 1);
7f385784 983 streamer_write_bitpack (&bp);
7bfefa9d 984 }
985 }
986
987 lto_destroy_simple_output_block (ob);
988}
989
990
991/* Deserialize the ipa info for lto. */
992
48e1416a 993static void
7bfefa9d 994pure_const_read_summary (void)
995{
996 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
997 struct lto_file_decl_data *file_data;
998 unsigned int j = 0;
999
1000 register_hooks ();
1001 while ((file_data = file_data_vec[j++]))
1002 {
1003 const char *data;
1004 size_t len;
1005 struct lto_input_block *ib
48e1416a 1006 = lto_create_simple_input_block (file_data,
1007 LTO_section_ipa_pure_const,
7bfefa9d 1008 &data, &len);
1009 if (ib)
1010 {
1011 unsigned int i;
7f385784 1012 unsigned int count = streamer_read_uhwi (ib);
7bfefa9d 1013
1014 for (i = 0; i < count; i++)
1015 {
1016 unsigned int index;
1017 struct cgraph_node *node;
30baba90 1018 struct bitpack_d bp;
7bfefa9d 1019 funct_state fs;
70225339 1020 lto_symtab_encoder_t encoder;
7bfefa9d 1021
1022 fs = XCNEW (struct funct_state_d);
7f385784 1023 index = streamer_read_uhwi (ib);
70225339 1024 encoder = file_data->symtab_node_encoder;
1025 node = cgraph (lto_symtab_encoder_deref (encoder, index));
7bfefa9d 1026 set_function_state (node, fs);
1027
1028 /* Note that the flags must be read in the opposite
1029 order in which they were written (the bitflags were
1030 pushed into FLAGS). */
7f385784 1031 bp = streamer_read_bitpack (ib);
7bfefa9d 1032 fs->pure_const_state
30baba90 1033 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
7bfefa9d 1034 fs->state_previously_known
30baba90 1035 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1036 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1037 fs->looping = bp_unpack_value (&bp, 1);
1038 fs->can_throw = bp_unpack_value (&bp, 1);
fc94a528 1039 if (dump_file)
1040 {
7d0d0ce1 1041 int flags = flags_from_decl_or_type (node->symbol.decl);
fc94a528 1042 fprintf (dump_file, "Read info for %s/%i ",
1043 cgraph_node_name (node),
1044 node->uid);
1045 if (flags & ECF_CONST)
1046 fprintf (dump_file, " const");
1047 if (flags & ECF_PURE)
1048 fprintf (dump_file, " pure");
1049 if (flags & ECF_NOTHROW)
1050 fprintf (dump_file, " nothrow");
1051 fprintf (dump_file, "\n pure const state: %s\n",
1052 pure_const_names[fs->pure_const_state]);
1053 fprintf (dump_file, " previously known state: %s\n",
1054 pure_const_names[fs->looping_previously_known]);
1055 if (fs->looping)
1056 fprintf (dump_file," function is locally looping\n");
1057 if (fs->looping_previously_known)
1058 fprintf (dump_file," function is previously known looping\n");
1059 if (fs->can_throw)
1060 fprintf (dump_file," function is locally throwing\n");
1061 }
7bfefa9d 1062 }
1063
48e1416a 1064 lto_destroy_simple_input_block (file_data,
1065 LTO_section_ipa_pure_const,
7bfefa9d 1066 ib, data, len);
1067 }
1068 }
1069}
1070
1071
17b28e52 1072static bool
1073ignore_edge (struct cgraph_edge *e)
1074{
1075 return (!e->can_throw_external);
1076}
1077
b2c2e188 1078/* Return true if NODE is self recursive function.
1079 ??? self recursive and indirectly recursive funcions should
1080 be the same, so this function seems unnecesary. */
a1e88032 1081
1082static bool
1083self_recursive_p (struct cgraph_node *node)
1084{
1085 struct cgraph_edge *e;
1086 for (e = node->callees; e; e = e->next_callee)
b2c2e188 1087 if (cgraph_function_node (e->callee, NULL) == node)
a1e88032 1088 return true;
1089 return false;
1090}
1091
c0240443 1092/* Produce transitive closure over the callgraph and compute pure/const
1093 attributes. */
023a28e1 1094
c0240443 1095static void
1096propagate_pure_const (void)
cb886925 1097{
1098 struct cgraph_node *node;
1099 struct cgraph_node *w;
1100 struct cgraph_node **order =
1101 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1102 int order_pos;
1103 int i;
1104 struct ipa_dfs_info * w_info;
1105
7771d558 1106 order_pos = ipa_reduced_postorder (order, true, false, NULL);
f7d118a9 1107 if (dump_file)
1108 {
1109 dump_cgraph (dump_file);
7771d558 1110 ipa_print_order(dump_file, "reduced", order, order_pos);
f7d118a9 1111 }
1112
9d75589a 1113 /* Propagate the local information through the call graph to produce
f7d118a9 1114 the global information. All the nodes within a cycle will have
1115 the same info so we collapse cycles first. Then we can do the
1116 propagation in one pass from the leaves to the roots. */
1117 for (i = 0; i < order_pos; i++ )
1118 {
1119 enum pure_const_state_e pure_const_state = IPA_CONST;
9c2a0c05 1120 bool looping = false;
54157bf1 1121 int count = 0;
f7d118a9 1122 node = order[i];
1123
8c1fce46 1124 if (node->alias)
1125 continue;
1126
fc94a528 1127 if (dump_file && (dump_flags & TDF_DETAILS))
1128 fprintf (dump_file, "Starting cycle\n");
1129
f7d118a9 1130 /* Find the worst state for any node in the cycle. */
1131 w = node;
023a28e1 1132 while (w && pure_const_state != IPA_NEITHER)
f7d118a9 1133 {
b5cebd44 1134 struct cgraph_edge *e;
023a28e1 1135 struct cgraph_edge *ie;
1136 int i;
1137 struct ipa_ref *ref;
1138
f7d118a9 1139 funct_state w_l = get_function_state (w);
fc94a528 1140 if (dump_file && (dump_flags & TDF_DETAILS))
1141 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
1142 cgraph_node_name (w),
1143 w->uid,
1144 pure_const_names[w_l->pure_const_state],
1145 w_l->looping);
f7d118a9 1146
023a28e1 1147 /* First merge in function body properties. */
1148 worse_state (&pure_const_state, &looping,
1149 w_l->pure_const_state, w_l->looping);
1150 if (pure_const_state == IPA_NEITHER)
1151 break;
1152
1153 /* For overwritable nodes we can not assume anything. */
ef378c27 1154 if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1155 {
023a28e1 1156 worse_state (&pure_const_state, &looping,
1157 w_l->state_previously_known,
1158 w_l->looping_previously_known);
fc94a528 1159 if (dump_file && (dump_flags & TDF_DETAILS))
1160 {
1161 fprintf (dump_file,
1162 " Overwritable. state %s looping %i\n",
1163 pure_const_names[w_l->state_previously_known],
1164 w_l->looping_previously_known);
1165 }
023a28e1 1166 break;
ef378c27 1167 }
9c2a0c05 1168
b5cebd44 1169 count++;
1170
023a28e1 1171 /* We consider recursive cycles as possibly infinite.
1172 This might be relaxed since infinite recursion leads to stack
1173 overflow. */
b5cebd44 1174 if (count > 1)
1175 looping = true;
48e1416a 1176
023a28e1 1177 /* Now walk the edges and merge in callee properties. */
48e1416a 1178 for (e = w->callees; e; e = e->next_callee)
f7d118a9 1179 {
b2c2e188 1180 enum availability avail;
1181 struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
fc94a528 1182 enum pure_const_state_e edge_state = IPA_CONST;
1183 bool edge_looping = false;
54157bf1 1184
fc94a528 1185 if (dump_file && (dump_flags & TDF_DETAILS))
1186 {
1187 fprintf (dump_file,
1188 " Call to %s/%i",
1189 cgraph_node_name (e->callee),
1190 e->callee->uid);
1191 }
b2c2e188 1192 if (avail > AVAIL_OVERWRITABLE)
f7d118a9 1193 {
b5cebd44 1194 funct_state y_l = get_function_state (y);
fc94a528 1195 if (dump_file && (dump_flags & TDF_DETAILS))
1196 {
1197 fprintf (dump_file,
1198 " state:%s looping:%i\n",
1199 pure_const_names[y_l->pure_const_state],
1200 y_l->looping);
1201 }
4c170941 1202 if (y_l->pure_const_state > IPA_PURE
1ee39ad1 1203 && cgraph_edge_cannot_lead_to_return (e))
fc94a528 1204 {
1205 if (dump_file && (dump_flags & TDF_DETAILS))
023a28e1 1206 fprintf (dump_file,
1207 " Ignoring side effects"
1208 " -> pure, looping\n");
fc94a528 1209 edge_state = IPA_PURE;
1210 edge_looping = true;
1211 }
1212 else
1213 {
1214 edge_state = y_l->pure_const_state;
1215 edge_looping = y_l->looping;
1216 }
f7d118a9 1217 }
a53e7471 1218 else if (special_builtin_state (&edge_state, &edge_looping,
7d0d0ce1 1219 y->symbol.decl))
7dd42908 1220 ;
ef378c27 1221 else
023a28e1 1222 state_from_flags (&edge_state, &edge_looping,
7d0d0ce1 1223 flags_from_decl_or_type (y->symbol.decl),
7dd42908 1224 cgraph_edge_cannot_lead_to_return (e));
023a28e1 1225
1226 /* Merge the results with what we already know. */
1227 better_state (&edge_state, &edge_looping,
1228 w_l->state_previously_known,
1229 w_l->looping_previously_known);
1230 worse_state (&pure_const_state, &looping,
1231 edge_state, edge_looping);
1232 if (pure_const_state == IPA_NEITHER)
1233 break;
1234 }
1235 if (pure_const_state == IPA_NEITHER)
1236 break;
ef378c27 1237
023a28e1 1238 /* Now process the indirect call. */
451e322a 1239 for (ie = w->indirect_calls; ie; ie = ie->next_callee)
023a28e1 1240 {
1241 enum pure_const_state_e edge_state = IPA_CONST;
1242 bool edge_looping = false;
4c170941 1243
023a28e1 1244 if (dump_file && (dump_flags & TDF_DETAILS))
1245 fprintf (dump_file, " Indirect call");
1246 state_from_flags (&edge_state, &edge_looping,
1247 ie->indirect_info->ecf_flags,
1248 cgraph_edge_cannot_lead_to_return (ie));
1249 /* Merge the results with what we already know. */
1250 better_state (&edge_state, &edge_looping,
1251 w_l->state_previously_known,
1252 w_l->looping_previously_known);
1253 worse_state (&pure_const_state, &looping,
1254 edge_state, edge_looping);
fc94a528 1255 if (pure_const_state == IPA_NEITHER)
1256 break;
f7d118a9 1257 }
023a28e1 1258 if (pure_const_state == IPA_NEITHER)
1259 break;
1260
1261 /* And finally all loads and stores. */
7d0d0ce1 1262 for (i = 0; ipa_ref_list_reference_iterate (&w->symbol.ref_list, i, ref); i++)
023a28e1 1263 {
1264 enum pure_const_state_e ref_state = IPA_CONST;
1265 bool ref_looping = false;
1266 switch (ref->use)
1267 {
1268 case IPA_REF_LOAD:
1269 /* readonly reads are safe. */
7d0d0ce1 1270 if (TREE_READONLY (ipa_ref_varpool_node (ref)->symbol.decl))
023a28e1 1271 break;
1272 if (dump_file && (dump_flags & TDF_DETAILS))
1273 fprintf (dump_file, " nonreadonly global var read\n");
1274 ref_state = IPA_PURE;
1275 break;
1276 case IPA_REF_STORE:
1277 if (ipa_ref_cannot_lead_to_return (ref))
1278 break;
1279 ref_state = IPA_NEITHER;
1280 if (dump_file && (dump_flags & TDF_DETAILS))
1281 fprintf (dump_file, " global var write\n");
1282 break;
1283 case IPA_REF_ADDR:
1284 break;
1285 }
1286 better_state (&ref_state, &ref_looping,
1287 w_l->state_previously_known,
1288 w_l->looping_previously_known);
1289 worse_state (&pure_const_state, &looping,
1290 ref_state, ref_looping);
1291 if (pure_const_state == IPA_NEITHER)
1292 break;
1293 }
7d0d0ce1 1294 w_info = (struct ipa_dfs_info *) w->symbol.aux;
f7d118a9 1295 w = w_info->next_cycle;
1296 }
fc94a528 1297 if (dump_file && (dump_flags & TDF_DETAILS))
1298 fprintf (dump_file, "Result %s looping %i\n",
1299 pure_const_names [pure_const_state],
1300 looping);
f7d118a9 1301
1302 /* Copy back the region's pure_const_state which is shared by
1303 all nodes in the region. */
1304 w = node;
1305 while (w)
1306 {
1307 funct_state w_l = get_function_state (w);
b5cebd44 1308 enum pure_const_state_e this_state = pure_const_state;
1309 bool this_looping = looping;
f7d118a9 1310
df9b545b 1311 if (w_l->state_previously_known != IPA_NEITHER
1312 && this_state > w_l->state_previously_known)
4c170941 1313 {
1314 this_state = w_l->state_previously_known;
1315 this_looping |= w_l->looping_previously_known;
1316 }
a1e88032 1317 if (!this_looping && self_recursive_p (w))
1318 this_looping = true;
df9b545b 1319 if (!w_l->looping_previously_known)
1320 this_looping = false;
cb886925 1321
b5cebd44 1322 /* All nodes within a cycle share the same info. */
1323 w_l->pure_const_state = this_state;
1324 w_l->looping = this_looping;
1325
1326 switch (this_state)
1327 {
1328 case IPA_CONST:
7d0d0ce1 1329 if (!TREE_READONLY (w->symbol.decl))
2c06958d 1330 {
7d0d0ce1 1331 warn_function_const (w->symbol.decl, !this_looping);
2c06958d 1332 if (dump_file)
1333 fprintf (dump_file, "Function found to be %sconst: %s\n",
1334 this_looping ? "looping " : "",
1335 cgraph_node_name (w));
1336 }
7f74ac6b 1337 cgraph_set_const_flag (w, true, this_looping);
b5cebd44 1338 break;
48e1416a 1339
b5cebd44 1340 case IPA_PURE:
7d0d0ce1 1341 if (!DECL_PURE_P (w->symbol.decl))
2c06958d 1342 {
7d0d0ce1 1343 warn_function_pure (w->symbol.decl, !this_looping);
2c06958d 1344 if (dump_file)
1345 fprintf (dump_file, "Function found to be %spure: %s\n",
1346 this_looping ? "looping " : "",
1347 cgraph_node_name (w));
1348 }
7f74ac6b 1349 cgraph_set_pure_flag (w, true, this_looping);
b5cebd44 1350 break;
48e1416a 1351
b5cebd44 1352 default:
1353 break;
1354 }
7d0d0ce1 1355 w_info = (struct ipa_dfs_info *) w->symbol.aux;
17b28e52 1356 w = w_info->next_cycle;
1357 }
1358 }
1359
7771d558 1360 ipa_free_postorder_info ();
c0240443 1361 free (order);
1362}
1363
1364/* Produce transitive closure over the callgraph and compute nothrow
1365 attributes. */
1366
1367static void
1368propagate_nothrow (void)
1369{
1370 struct cgraph_node *node;
1371 struct cgraph_node *w;
1372 struct cgraph_node **order =
1373 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1374 int order_pos;
1375 int i;
1376 struct ipa_dfs_info * w_info;
1377
7771d558 1378 order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
17b28e52 1379 if (dump_file)
1380 {
1381 dump_cgraph (dump_file);
7771d558 1382 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
17b28e52 1383 }
c0240443 1384
9d75589a 1385 /* Propagate the local information through the call graph to produce
17b28e52 1386 the global information. All the nodes within a cycle will have
1387 the same info so we collapse cycles first. Then we can do the
1388 propagation in one pass from the leaves to the roots. */
1389 for (i = 0; i < order_pos; i++ )
1390 {
1391 bool can_throw = false;
1392 node = order[i];
1393
8c1fce46 1394 if (node->alias)
1395 continue;
1396
17b28e52 1397 /* Find the worst state for any node in the cycle. */
1398 w = node;
1399 while (w)
1400 {
023a28e1 1401 struct cgraph_edge *e, *ie;
17b28e52 1402 funct_state w_l = get_function_state (w);
1403
ef378c27 1404 if (w_l->can_throw
1405 || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
17b28e52 1406 can_throw = true;
1407
1408 if (can_throw)
1409 break;
48e1416a 1410
1411 for (e = w->callees; e; e = e->next_callee)
17b28e52 1412 {
b2c2e188 1413 enum availability avail;
1414 struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
17b28e52 1415
b2c2e188 1416 if (avail > AVAIL_OVERWRITABLE)
17b28e52 1417 {
1418 funct_state y_l = get_function_state (y);
1419
48e1416a 1420 if (can_throw)
17b28e52 1421 break;
7d0d0ce1 1422 if (y_l->can_throw && !TREE_NOTHROW (w->symbol.decl)
17b28e52 1423 && e->can_throw_external)
1424 can_throw = true;
1425 }
7d0d0ce1 1426 else if (e->can_throw_external && !TREE_NOTHROW (y->symbol.decl))
ef378c27 1427 can_throw = true;
17b28e52 1428 }
023a28e1 1429 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1430 if (ie->can_throw_external)
1431 can_throw = true;
7d0d0ce1 1432 w_info = (struct ipa_dfs_info *) w->symbol.aux;
17b28e52 1433 w = w_info->next_cycle;
1434 }
1435
1436 /* Copy back the region's pure_const_state which is shared by
1437 all nodes in the region. */
1438 w = node;
1439 while (w)
1440 {
ecfab407 1441 funct_state w_l = get_function_state (w);
7d0d0ce1 1442 if (!can_throw && !TREE_NOTHROW (w->symbol.decl))
b5cebd44 1443 {
16fcf0f4 1444 cgraph_set_nothrow_flag (w, true);
b5cebd44 1445 if (dump_file)
48e1416a 1446 fprintf (dump_file, "Function found to be nothrow: %s\n",
b5cebd44 1447 cgraph_node_name (w));
f7d118a9 1448 }
7d0d0ce1 1449 else if (can_throw && !TREE_NOTHROW (w->symbol.decl))
ecfab407 1450 w_l->can_throw = true;
7d0d0ce1 1451 w_info = (struct ipa_dfs_info *) w->symbol.aux;
f7d118a9 1452 w = w_info->next_cycle;
1453 }
1454 }
1455
7771d558 1456 ipa_free_postorder_info ();
f7d118a9 1457 free (order);
c0240443 1458}
1459
1460
1461/* Produce the global information by preforming a transitive closure
1462 on the local information that was produced by generate_summary. */
1463
1464static unsigned int
1465propagate (void)
1466{
1467 struct cgraph_node *node;
1468
1469 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
1470 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1471 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1472
1473 /* Nothrow makes more function to not lead to return and improve
1474 later analysis. */
1475 propagate_nothrow ();
1476 propagate_pure_const ();
1477
1478 /* Cleanup. */
7c455d87 1479 FOR_EACH_DEFINED_FUNCTION (node)
c0240443 1480 if (has_function_state (node))
1481 free (get_function_state (node));
86844d6c 1482 VEC_free (funct_state, heap, funct_state_vec);
cb886925 1483 finish_state ();
2a1990e9 1484 return 0;
f7d118a9 1485}
1486
1487static bool
1488gate_pure_const (void)
1489{
86844d6c 1490 return (flag_ipa_pure_const
f7d118a9 1491 /* Don't bother doing anything if the program has errors. */
852f689e 1492 && !seen_error ());
f7d118a9 1493}
1494
26dbec0a 1495struct ipa_opt_pass_d pass_ipa_pure_const =
f7d118a9 1496{
20099e35 1497 {
cb886925 1498 IPA_PASS,
d3d7bd46 1499 "pure-const", /* name */
c7875731 1500 OPTGROUP_NONE, /* optinfo_flags */
f7d118a9 1501 gate_pure_const, /* gate */
cb886925 1502 propagate, /* execute */
f7d118a9 1503 NULL, /* sub */
1504 NULL, /* next */
1505 0, /* static_pass_number */
1506 TV_IPA_PURE_CONST, /* tv_id */
1507 0, /* properties_required */
1508 0, /* properties_provided */
1509 0, /* properties_destroyed */
1510 0, /* todo_flags_start */
20099e35 1511 0 /* todo_flags_finish */
cb886925 1512 },
1513 generate_summary, /* generate_summary */
7bfefa9d 1514 pure_const_write_summary, /* write_summary */
1515 pure_const_read_summary, /* read_summary */
ddc90d88 1516 NULL, /* write_optimization_summary */
1517 NULL, /* read_optimization_summary */
90464c8b 1518 NULL, /* stmt_fixup */
cb886925 1519 0, /* TODOs */
1520 NULL, /* function_transform */
1521 NULL /* variable_transform */
f7d118a9 1522};
b5cebd44 1523
2c06958d 1524/* Return true if function should be skipped for local pure const analysis. */
b5cebd44 1525
2c06958d 1526static bool
1527skip_function_for_local_pure_const (struct cgraph_node *node)
b5cebd44 1528{
b5cebd44 1529 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1530 we must not promote functions that are called by already processed functions. */
1531
1532 if (function_called_by_processed_nodes_p ())
1533 {
1534 if (dump_file)
1535 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2c06958d 1536 return true;
b5cebd44 1537 }
16fcf0f4 1538 if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
b5cebd44 1539 {
1540 if (dump_file)
0a10fd82 1541 fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
2c06958d 1542 return true;
b5cebd44 1543 }
2c06958d 1544 return false;
1545}
1546
1547/* Simple local pass for pure const discovery reusing the analysis from
1548 ipa_pure_const. This pass is effective when executed together with
1549 other optimization passes in early optimization pass queue. */
b5cebd44 1550
2c06958d 1551static unsigned int
1552local_pure_const (void)
1553{
1554 bool changed = false;
1555 funct_state l;
1556 bool skip;
1557 struct cgraph_node *node;
1558
fd6a3c41 1559 node = cgraph_get_node (current_function_decl);
2c06958d 1560 skip = skip_function_for_local_pure_const (node);
1561 if (!warn_suggest_attribute_const
1562 && !warn_suggest_attribute_pure
1563 && skip)
1564 return 0;
e0933141 1565
ab3db708 1566 l = analyze_function (node, false);
1567
1568 /* Do NORETURN discovery. */
e0933141 1569 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1570 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
1571 {
43d60d64 1572 warn_function_noreturn (cfun->decl);
e0933141 1573 if (dump_file)
1574 fprintf (dump_file, "Function found to be noreturn: %s\n",
9631926a 1575 current_function_name ());
e0933141 1576
1577 /* Update declaration and reduce profile to executed once. */
1578 TREE_THIS_VOLATILE (current_function_decl) = 1;
1579 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1580 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1581
1582 changed = true;
1583 }
ef378c27 1584
b5cebd44 1585 switch (l->pure_const_state)
1586 {
1587 case IPA_CONST:
1588 if (!TREE_READONLY (current_function_decl))
1589 {
2c06958d 1590 warn_function_const (current_function_decl, !l->looping);
1591 if (!skip)
1592 {
7f74ac6b 1593 cgraph_set_const_flag (node, true, l->looping);
2c06958d 1594 changed = true;
1595 }
b5cebd44 1596 if (dump_file)
1597 fprintf (dump_file, "Function found to be %sconst: %s\n",
1598 l->looping ? "looping " : "",
9631926a 1599 current_function_name ());
b5cebd44 1600 }
1601 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1602 && !l->looping)
1603 {
2c06958d 1604 if (!skip)
1605 {
7f74ac6b 1606 cgraph_set_const_flag (node, true, false);
2c06958d 1607 changed = true;
1608 }
b5cebd44 1609 if (dump_file)
1610 fprintf (dump_file, "Function found to be non-looping: %s\n",
9631926a 1611 current_function_name ());
b5cebd44 1612 }
1613 break;
1614
1615 case IPA_PURE:
2c06958d 1616 if (!DECL_PURE_P (current_function_decl))
b5cebd44 1617 {
2c06958d 1618 if (!skip)
1619 {
7f74ac6b 1620 cgraph_set_pure_flag (node, true, l->looping);
2c06958d 1621 changed = true;
1622 }
1623 warn_function_pure (current_function_decl, !l->looping);
b5cebd44 1624 if (dump_file)
1625 fprintf (dump_file, "Function found to be %spure: %s\n",
1626 l->looping ? "looping " : "",
9631926a 1627 current_function_name ());
b5cebd44 1628 }
1629 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1630 && !l->looping)
1631 {
2c06958d 1632 if (!skip)
1633 {
7f74ac6b 1634 cgraph_set_pure_flag (node, true, false);
2c06958d 1635 changed = true;
1636 }
b5cebd44 1637 if (dump_file)
1638 fprintf (dump_file, "Function found to be non-looping: %s\n",
9631926a 1639 current_function_name ());
b5cebd44 1640 }
1641 break;
1642
1643 default:
1644 break;
1645 }
1646 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1647 {
16fcf0f4 1648 cgraph_set_nothrow_flag (node, true);
b5cebd44 1649 changed = true;
1650 if (dump_file)
1651 fprintf (dump_file, "Function found to be nothrow: %s\n",
9631926a 1652 current_function_name ());
b5cebd44 1653 }
dd045aee 1654 free (l);
b5cebd44 1655 if (changed)
1656 return execute_fixup_cfg ();
1657 else
1658 return 0;
1659}
1660
1661struct gimple_opt_pass pass_local_pure_const =
1662{
1663 {
1664 GIMPLE_PASS,
1665 "local-pure-const", /* name */
c7875731 1666 OPTGROUP_NONE, /* optinfo_flags */
b5cebd44 1667 gate_pure_const, /* gate */
1668 local_pure_const, /* execute */
1669 NULL, /* sub */
1670 NULL, /* next */
1671 0, /* static_pass_number */
1672 TV_IPA_PURE_CONST, /* tv_id */
1673 0, /* properties_required */
1674 0, /* properties_provided */
1675 0, /* properties_destroyed */
1676 0, /* todo_flags_start */
1677 0 /* todo_flags_finish */
1678 }
1679};