]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-pure-const.c
Update Copyright years for files modified in 2011 and/or 2012.
[thirdparty/gcc.git] / gcc / ipa-pure-const.c
CommitLineData
f7d118a9 1/* Callgraph based analysis of static variables.
71e45bc2 2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
7cf0dbf3 3 Free Software Foundation, Inc.
f7d118a9 4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
f7d118a9 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
f7d118a9 21
f0b5f617 22/* This file marks functions as being either const (TREE_READONLY) or
23 pure (DECL_PURE_P). It can also set a variant of these that
24 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
f7d118a9 25
26 This must be run after inlining decisions have been made since
27 otherwise, the local sets will not contain information that is
28 consistent with post inlined state. The global sets are not prone
29 to this problem since they are by definition transitive. */
30
31/* The code in this module is called by the ipa pass manager. It
32 should be one of the later passes since it's information is used by
33 the rest of the compilation. */
34
35#include "config.h"
36#include "system.h"
37#include "coretypes.h"
38#include "tm.h"
39#include "tree.h"
40#include "tree-flow.h"
41#include "tree-inline.h"
42#include "tree-pass.h"
43#include "langhooks.h"
44#include "pointer-set.h"
45#include "ggc.h"
46#include "ipa-utils.h"
75a70cf9 47#include "gimple.h"
f7d118a9 48#include "cgraph.h"
f7d118a9 49#include "flags.h"
f7d118a9 50#include "diagnostic.h"
ce084dfc 51#include "gimple-pretty-print.h"
f7d118a9 52#include "langhooks.h"
959fce68 53#include "target.h"
7bfefa9d 54#include "lto-streamer.h"
2541503d 55#include "data-streamer.h"
56#include "tree-streamer.h"
c9263b6a 57#include "cfgloop.h"
58#include "tree-scalar-evolution.h"
2c06958d 59#include "intl.h"
60#include "opts.h"
f7d118a9 61
62static struct pointer_set_t *visited_nodes;
63
64/* Lattice values for const and pure functions. Everything starts out
65 being const, then may drop to pure and then neither depending on
66 what is found. */
67enum pure_const_state_e
68{
69 IPA_CONST,
70 IPA_PURE,
71 IPA_NEITHER
72};
73
fc94a528 74const char *pure_const_names[3] = {"const", "pure", "neither"};
75
cb886925 76/* Holder for the const_state. There is one of these per function
77 decl. */
48e1416a 78struct funct_state_d
f7d118a9 79{
cb886925 80 /* See above. */
f7d118a9 81 enum pure_const_state_e pure_const_state;
b5cebd44 82 /* What user set here; we can be always sure about this. */
48e1416a 83 enum pure_const_state_e state_previously_known;
84 bool looping_previously_known;
cb886925 85
86 /* True if the function could possibly infinite loop. There are a
87 lot of ways that this could be determined. We are pretty
88 conservative here. While it is possible to cse pure and const
89 calls, it is not legal to have dce get rid of the call if there
90 is a possibility that the call could infinite loop since this is
91 a behavioral change. */
9c2a0c05 92 bool looping;
cb886925 93
b5cebd44 94 bool can_throw;
f7d118a9 95};
96
db86e424 97/* State used when we know nothing about function. */
98static struct funct_state_d varying_state
99 = { IPA_NEITHER, IPA_NEITHER, true, true, true };
100
101
f7d118a9 102typedef struct funct_state_d * funct_state;
103
cb886925 104/* The storage of the funct_state is abstracted because there is the
105 possibility that it may be desirable to move this to the cgraph
48e1416a 106 local info. */
cb886925 107
108/* Array, indexed by cgraph node uid, of function states. */
109
f1f41a6c 110static vec<funct_state> funct_state_vec;
cb886925 111
50828ed8 112/* Holders of ipa cgraph hooks: */
113static struct cgraph_node_hook_list *function_insertion_hook_holder;
86844d6c 114static struct cgraph_2node_hook_list *node_duplication_hook_holder;
115static struct cgraph_node_hook_list *node_removal_hook_holder;
cb886925 116
2c06958d 117/* Try to guess if function body will always be visible to compiler
118 when compiling the call and whether compiler will be able
119 to propagate the information by itself. */
120
121static bool
122function_always_visible_to_compiler_p (tree decl)
123{
124 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
125}
126
127/* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
128 is true if the function is known to be finite. The diagnostic is
129 controlled by OPTION. WARNED_ABOUT is a pointer_set unique for
130 OPTION, this function may initialize it and it is always returned
131 by the function. */
132
133static struct pointer_set_t *
134suggest_attribute (int option, tree decl, bool known_finite,
135 struct pointer_set_t *warned_about,
136 const char * attrib_name)
137{
2c5d2e39 138 if (!option_enabled (option, &global_options))
2c06958d 139 return warned_about;
140 if (TREE_THIS_VOLATILE (decl)
141 || (known_finite && function_always_visible_to_compiler_p (decl)))
142 return warned_about;
143
144 if (!warned_about)
145 warned_about = pointer_set_create ();
146 if (pointer_set_contains (warned_about, decl))
147 return warned_about;
148 pointer_set_insert (warned_about, decl);
149 warning_at (DECL_SOURCE_LOCATION (decl),
150 option,
151 known_finite
152 ? _("function might be candidate for attribute %<%s%>")
153 : _("function might be candidate for attribute %<%s%>"
154 " if it is known to return normally"), attrib_name);
155 return warned_about;
156}
157
158/* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
159 is true if the function is known to be finite. */
160
161static void
162warn_function_pure (tree decl, bool known_finite)
163{
164 static struct pointer_set_t *warned_about;
165
166 warned_about
167 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
168 known_finite, warned_about, "pure");
169}
170
171/* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
172 is true if the function is known to be finite. */
173
174static void
175warn_function_const (tree decl, bool known_finite)
176{
177 static struct pointer_set_t *warned_about;
178 warned_about
179 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
180 known_finite, warned_about, "const");
181}
43d60d64 182
183void
184warn_function_noreturn (tree decl)
185{
186 static struct pointer_set_t *warned_about;
08c6cbd2 187 if (!lang_hooks.missing_noreturn_ok_p (decl)
188 && targetm.warn_func_return (decl))
43d60d64 189 warned_about
190 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
191 true, warned_about, "noreturn");
192}
9631926a 193
cb886925 194/* Init the function state. */
195
196static void
197finish_state (void)
198{
f1f41a6c 199 funct_state_vec.release ();
cb886925 200}
201
202
1ee39ad1 203/* Return true if we have a function state for NODE. */
204
205static inline bool
206has_function_state (struct cgraph_node *node)
207{
f1f41a6c 208 if (!funct_state_vec.exists ()
209 || funct_state_vec.length () <= (unsigned int)node->uid)
1ee39ad1 210 return false;
f1f41a6c 211 return funct_state_vec[node->uid] != NULL;
1ee39ad1 212}
213
48e1416a 214/* Return the function state from NODE. */
f7d118a9 215
216static inline funct_state
217get_function_state (struct cgraph_node *node)
218{
f1f41a6c 219 if (!funct_state_vec.exists ()
220 || funct_state_vec.length () <= (unsigned int)node->uid
221 || !funct_state_vec[node->uid])
1ee39ad1 222 /* We might want to put correct previously_known state into varying. */
db86e424 223 return &varying_state;
f1f41a6c 224 return funct_state_vec[node->uid];
cb886925 225}
226
227/* Set the function state S for NODE. */
228
229static inline void
230set_function_state (struct cgraph_node *node, funct_state s)
231{
f1f41a6c 232 if (!funct_state_vec.exists ()
233 || funct_state_vec.length () <= (unsigned int)node->uid)
234 funct_state_vec.safe_grow_cleared (node->uid + 1);
235 funct_state_vec[node->uid] = s;
f7d118a9 236}
237
f0b5f617 238/* Check to see if the use (or definition when CHECKING_WRITE is true)
f7d118a9 239 variable T is legal in a function that is either pure or const. */
240
48e1416a 241static inline void
242check_decl (funct_state local,
023a28e1 243 tree t, bool checking_write, bool ipa)
f7d118a9 244{
f7d118a9 245 /* Do not want to do anything with volatile except mark any
246 function that uses one to be not const or pure. */
48e1416a 247 if (TREE_THIS_VOLATILE (t))
248 {
f7d118a9 249 local->pure_const_state = IPA_NEITHER;
b5cebd44 250 if (dump_file)
251 fprintf (dump_file, " Volatile operand is not const/pure");
f7d118a9 252 return;
253 }
254
255 /* Do not care about a local automatic that is not static. */
256 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
257 return;
258
b5cebd44 259 /* If the variable has the "used" attribute, treat it as if it had a
260 been touched by the devil. */
83a23b05 261 if (DECL_PRESERVE_P (t))
b5cebd44 262 {
263 local->pure_const_state = IPA_NEITHER;
264 if (dump_file)
265 fprintf (dump_file, " Used static/global variable is not const/pure\n");
266 return;
267 }
268
023a28e1 269 /* In IPA mode we are not interested in checking actual loads and stores;
270 they will be processed at propagation time using ipa_ref. */
271 if (ipa)
272 return;
273
f7d118a9 274 /* Since we have dealt with the locals and params cases above, if we
275 are CHECKING_WRITE, this cannot be a pure or constant
276 function. */
48e1416a 277 if (checking_write)
bc17fd99 278 {
279 local->pure_const_state = IPA_NEITHER;
b5cebd44 280 if (dump_file)
281 fprintf (dump_file, " static/global memory write is not const/pure\n");
bc17fd99 282 return;
283 }
f7d118a9 284
285 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
286 {
b5cebd44 287 /* Readonly reads are safe. */
288 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
289 return; /* Read of a constant, do not change the function state. */
48e1416a 290 else
f7d118a9 291 {
b5cebd44 292 if (dump_file)
293 fprintf (dump_file, " global memory read is not const\n");
f7d118a9 294 /* Just a regular read. */
295 if (local->pure_const_state == IPA_CONST)
296 local->pure_const_state = IPA_PURE;
297 }
298 }
b5cebd44 299 else
300 {
301 /* Compilation level statics can be read if they are readonly
302 variables. */
303 if (TREE_READONLY (t))
304 return;
305
306 if (dump_file)
307 fprintf (dump_file, " static memory read is not const\n");
308 /* Just a regular read. */
309 if (local->pure_const_state == IPA_CONST)
310 local->pure_const_state = IPA_PURE;
311 }
f7d118a9 312}
313
f7d118a9 314
b5cebd44 315/* Check to see if the use (or definition when CHECKING_WRITE is true)
316 variable T is legal in a function that is either pure or const. */
f7d118a9 317
48e1416a 318static inline void
5ed0b345 319check_op (funct_state local, tree t, bool checking_write)
f7d118a9 320{
3ae61172 321 t = get_base_address (t);
322 if (t && TREE_THIS_VOLATILE (t))
f7d118a9 323 {
5ed0b345 324 local->pure_const_state = IPA_NEITHER;
325 if (dump_file)
326 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
327 return;
328 }
3ae61172 329 else if (t
182cf5a9 330 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
3ae61172 331 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
332 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
333 {
334 if (dump_file)
335 fprintf (dump_file, " Indirect ref to local memory is OK\n");
336 return;
337 }
5ed0b345 338 else if (checking_write)
339 {
340 local->pure_const_state = IPA_NEITHER;
341 if (dump_file)
342 fprintf (dump_file, " Indirect ref write is not const/pure\n");
343 return;
344 }
345 else
346 {
347 if (dump_file)
348 fprintf (dump_file, " Indirect ref read is not const\n");
349 if (local->pure_const_state == IPA_CONST)
350 local->pure_const_state = IPA_PURE;
f7d118a9 351 }
352}
353
023a28e1 354/* compute state based on ECF FLAGS and store to STATE and LOOPING. */
355
356static void
357state_from_flags (enum pure_const_state_e *state, bool *looping,
358 int flags, bool cannot_lead_to_return)
359{
360 *looping = false;
361 if (flags & ECF_LOOPING_CONST_OR_PURE)
362 {
363 *looping = true;
364 if (dump_file && (dump_flags & TDF_DETAILS))
365 fprintf (dump_file, " looping");
366 }
367 if (flags & ECF_CONST)
368 {
369 *state = IPA_CONST;
370 if (dump_file && (dump_flags & TDF_DETAILS))
371 fprintf (dump_file, " const\n");
372 }
373 else if (flags & ECF_PURE)
374 {
375 *state = IPA_PURE;
376 if (dump_file && (dump_flags & TDF_DETAILS))
377 fprintf (dump_file, " pure\n");
378 }
379 else if (cannot_lead_to_return)
380 {
381 *state = IPA_PURE;
382 *looping = true;
383 if (dump_file && (dump_flags & TDF_DETAILS))
384 fprintf (dump_file, " ignoring side effects->pure looping\n");
385 }
386 else
387 {
388 if (dump_file && (dump_flags & TDF_DETAILS))
9631926a 389 fprintf (dump_file, " neither\n");
023a28e1 390 *state = IPA_NEITHER;
391 *looping = true;
392 }
393}
394
395/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
396 into STATE and LOOPING better of the two variants.
397 Be sure to merge looping correctly. IPA_NEITHER functions
398 have looping 0 even if they don't have to return. */
399
400static inline void
401better_state (enum pure_const_state_e *state, bool *looping,
402 enum pure_const_state_e state2, bool looping2)
403{
404 if (state2 < *state)
405 {
406 if (*state == IPA_NEITHER)
407 *looping = looping2;
408 else
409 *looping = MIN (*looping, looping2);
410 }
411 else if (state2 != IPA_NEITHER)
412 *looping = MIN (*looping, looping2);
413}
414
415/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
416 into STATE and LOOPING worse of the two variants. */
417
418static inline void
419worse_state (enum pure_const_state_e *state, bool *looping,
420 enum pure_const_state_e state2, bool looping2)
421{
422 *state = MAX (*state, state2);
423 *looping = MAX (*looping, looping2);
424}
425
0a10fd82 426/* Recognize special cases of builtins that are by themselves not pure or const
7dd42908 427 but function using them is. */
428static bool
a53e7471 429special_builtin_state (enum pure_const_state_e *state, bool *looping,
7dd42908 430 tree callee)
431{
432 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
433 switch (DECL_FUNCTION_CODE (callee))
434 {
435 case BUILT_IN_RETURN:
436 case BUILT_IN_UNREACHABLE:
437 case BUILT_IN_ALLOCA:
581bf1c2 438 case BUILT_IN_ALLOCA_WITH_ALIGN:
7dd42908 439 case BUILT_IN_STACK_SAVE:
440 case BUILT_IN_STACK_RESTORE:
441 case BUILT_IN_EH_POINTER:
442 case BUILT_IN_EH_FILTER:
443 case BUILT_IN_UNWIND_RESUME:
444 case BUILT_IN_CXA_END_CLEANUP:
445 case BUILT_IN_EH_COPY_VALUES:
446 case BUILT_IN_FRAME_ADDRESS:
447 case BUILT_IN_APPLY:
448 case BUILT_IN_APPLY_ARGS:
7dd42908 449 *looping = false;
450 *state = IPA_CONST;
451 return true;
452 case BUILT_IN_PREFETCH:
453 *looping = true;
454 *state = IPA_CONST;
455 return true;
456 }
457 return false;
458}
459
f7d118a9 460/* Check the parameters of a function call to CALL_EXPR to see if
461 there are any references in the parameters that are not allowed for
462 pure or const functions. Also check to see if this is either an
463 indirect call, a call outside the compilation unit, or has special
464 attributes that may also effect the purity. The CALL_EXPR node for
465 the entire call expression. */
466
467static void
b5cebd44 468check_call (funct_state local, gimple call, bool ipa)
f7d118a9 469{
75a70cf9 470 int flags = gimple_call_flags (call);
b5cebd44 471 tree callee_t = gimple_call_fndecl (call);
b5cebd44 472 bool possibly_throws = stmt_could_throw_p (call);
473 bool possibly_throws_externally = (possibly_throws
474 && stmt_can_throw_external (call));
f7d118a9 475
b5cebd44 476 if (possibly_throws)
477 {
478 unsigned int i;
479 for (i = 0; i < gimple_num_ops (call); i++)
480 if (gimple_op (call, i)
481 && tree_could_throw_p (gimple_op (call, i)))
482 {
cbeb677e 483 if (possibly_throws && cfun->can_throw_non_call_exceptions)
b5cebd44 484 {
485 if (dump_file)
486 fprintf (dump_file, " operand can throw; looping\n");
487 local->looping = true;
488 }
489 if (possibly_throws_externally)
490 {
491 if (dump_file)
492 fprintf (dump_file, " operand can throw externally\n");
493 local->can_throw = true;
494 }
495 }
496 }
48e1416a 497
f7d118a9 498 /* The const and pure flags are set by a variety of places in the
499 compiler (including here). If someone has already set the flags
500 for the callee, (such as for some of the builtins) we will use
48e1416a 501 them, otherwise we will compute our own information.
502
f7d118a9 503 Const and pure functions have less clobber effects than other
504 functions so we process these first. Otherwise if it is a call
505 outside the compilation unit or an indirect call we punt. This
506 leaves local calls which will be processed by following the call
48e1416a 507 graph. */
f7d118a9 508 if (callee_t)
509 {
7dd42908 510 enum pure_const_state_e call_state;
511 bool call_looping;
512
a53e7471 513 if (special_builtin_state (&call_state, &call_looping, callee_t))
7dd42908 514 {
515 worse_state (&local->pure_const_state, &local->looping,
516 call_state, call_looping);
517 return;
518 }
f7d118a9 519 /* When bad things happen to bad functions, they cannot be const
520 or pure. */
521 if (setjmp_call_p (callee_t))
9c2a0c05 522 {
b5cebd44 523 if (dump_file)
524 fprintf (dump_file, " setjmp is not const/pure\n");
525 local->looping = true;
9c2a0c05 526 local->pure_const_state = IPA_NEITHER;
9c2a0c05 527 }
f7d118a9 528
529 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
530 switch (DECL_FUNCTION_CODE (callee_t))
531 {
532 case BUILT_IN_LONGJMP:
533 case BUILT_IN_NONLOCAL_GOTO:
b5cebd44 534 if (dump_file)
535 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
f7d118a9 536 local->pure_const_state = IPA_NEITHER;
b5cebd44 537 local->looping = true;
f7d118a9 538 break;
539 default:
540 break;
541 }
542 }
543
b5cebd44 544 /* When not in IPA mode, we can still handle self recursion. */
545 if (!ipa && callee_t == current_function_decl)
2c06958d 546 {
547 if (dump_file)
548 fprintf (dump_file, " Recursive call can loop.\n");
549 local->looping = true;
550 }
0a10fd82 551 /* Either callee is unknown or we are doing local analysis.
ef378c27 552 Look to see if there are any bits available for the callee (such as by
553 declaration or because it is builtin) and process solely on the basis of
554 those bits. */
023a28e1 555 else if (!ipa)
f7d118a9 556 {
023a28e1 557 enum pure_const_state_e call_state;
558 bool call_looping;
cbeb677e 559 if (possibly_throws && cfun->can_throw_non_call_exceptions)
b5cebd44 560 {
561 if (dump_file)
562 fprintf (dump_file, " can throw; looping\n");
563 local->looping = true;
564 }
565 if (possibly_throws_externally)
566 {
567 if (dump_file)
568 {
e38def9c 569 fprintf (dump_file, " can throw externally to lp %i\n",
570 lookup_stmt_eh_lp (call));
b5cebd44 571 if (callee_t)
572 fprintf (dump_file, " callee:%s\n",
573 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
574 }
575 local->can_throw = true;
576 }
023a28e1 577 if (dump_file && (dump_flags & TDF_DETAILS))
578 fprintf (dump_file, " checking flags for call:");
579 state_from_flags (&call_state, &call_looping, flags,
580 ((flags & (ECF_NORETURN | ECF_NOTHROW))
581 == (ECF_NORETURN | ECF_NOTHROW))
582 || (!flag_exceptions && (flags & ECF_NORETURN)));
583 worse_state (&local->pure_const_state, &local->looping,
584 call_state, call_looping);
f7d118a9 585 }
b5cebd44 586 /* Direct functions calls are handled by IPA propagation. */
f7d118a9 587}
588
023a28e1 589/* Wrapper around check_decl for loads in local more. */
5ed0b345 590
591static bool
592check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
593{
594 if (DECL_P (op))
023a28e1 595 check_decl ((funct_state)data, op, false, false);
5ed0b345 596 else
597 check_op ((funct_state)data, op, false);
598 return false;
599}
600
023a28e1 601/* Wrapper around check_decl for stores in local more. */
5ed0b345 602
603static bool
604check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
605{
606 if (DECL_P (op))
023a28e1 607 check_decl ((funct_state)data, op, true, false);
608 else
609 check_op ((funct_state)data, op, true);
610 return false;
611}
612
613/* Wrapper around check_decl for loads in ipa mode. */
614
615static bool
616check_ipa_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
617{
618 if (DECL_P (op))
619 check_decl ((funct_state)data, op, false, true);
620 else
621 check_op ((funct_state)data, op, false);
622 return false;
623}
624
625/* Wrapper around check_decl for stores in ipa mode. */
626
627static bool
628check_ipa_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
629{
630 if (DECL_P (op))
631 check_decl ((funct_state)data, op, true, true);
5ed0b345 632 else
633 check_op ((funct_state)data, op, true);
634 return false;
635}
636
dd277d48 637/* Look into pointer pointed to by GSIP and figure out what interesting side
638 effects it has. */
b5cebd44 639static void
640check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
f7d118a9 641{
b5cebd44 642 gimple stmt = gsi_stmt (*gsip);
f7d118a9 643
9845d120 644 if (is_gimple_debug (stmt))
645 return;
646
b5cebd44 647 if (dump_file)
f7d118a9 648 {
b5cebd44 649 fprintf (dump_file, " scanning: ");
650 print_gimple_stmt (dump_file, stmt, 0, 0);
651 }
dd277d48 652
541c6dbb 653 if (gimple_has_volatile_ops (stmt)
654 && !gimple_clobber_p (stmt))
4b68df2e 655 {
656 local->pure_const_state = IPA_NEITHER;
657 if (dump_file)
658 fprintf (dump_file, " Volatile stmt is not const/pure\n");
659 }
660
5ed0b345 661 /* Look for loads and stores. */
023a28e1 662 walk_stmt_load_store_ops (stmt, local,
663 ipa ? check_ipa_load : check_load,
664 ipa ? check_ipa_store : check_store);
b5cebd44 665
666 if (gimple_code (stmt) != GIMPLE_CALL
667 && stmt_could_throw_p (stmt))
668 {
cbeb677e 669 if (cfun->can_throw_non_call_exceptions)
b5cebd44 670 {
671 if (dump_file)
10f4615f 672 fprintf (dump_file, " can throw; looping\n");
b5cebd44 673 local->looping = true;
674 }
675 if (stmt_can_throw_external (stmt))
676 {
677 if (dump_file)
10f4615f 678 fprintf (dump_file, " can throw externally\n");
b5cebd44 679 local->can_throw = true;
680 }
10f4615f 681 else
682 if (dump_file)
683 fprintf (dump_file, " can throw\n");
75a70cf9 684 }
75a70cf9 685 switch (gimple_code (stmt))
686 {
b5cebd44 687 case GIMPLE_CALL:
b5cebd44 688 check_call (local, stmt, ipa);
f7d118a9 689 break;
75a70cf9 690 case GIMPLE_LABEL:
691 if (DECL_NONLOCAL (gimple_label_label (stmt)))
f7d118a9 692 /* Target of long jump. */
9c2a0c05 693 {
b5cebd44 694 if (dump_file)
10f4615f 695 fprintf (dump_file, " nonlocal label is not const/pure\n");
9c2a0c05 696 local->pure_const_state = IPA_NEITHER;
9c2a0c05 697 }
f7d118a9 698 break;
75a70cf9 699 case GIMPLE_ASM:
97cf41ec 700 if (gimple_asm_clobbers_memory_p (stmt))
b5cebd44 701 {
97cf41ec 702 if (dump_file)
10f4615f 703 fprintf (dump_file, " memory asm clobber is not const/pure\n");
97cf41ec 704 /* Abandon all hope, ye who enter here. */
705 local->pure_const_state = IPA_NEITHER;
b5cebd44 706 }
707 if (gimple_asm_volatile_p (stmt))
708 {
709 if (dump_file)
10f4615f 710 fprintf (dump_file, " volatile is not const/pure\n");
b5cebd44 711 /* Abandon all hope, ye who enter here. */
712 local->pure_const_state = IPA_NEITHER;
713 local->looping = true;
714 }
715 return;
f7d118a9 716 default:
717 break;
718 }
f7d118a9 719}
720
cb886925 721
f7d118a9 722/* This is the main routine for finding the reference patterns for
723 global variables within a function FN. */
724
b5cebd44 725static funct_state
726analyze_function (struct cgraph_node *fn, bool ipa)
f7d118a9 727{
7d0d0ce1 728 tree decl = fn->symbol.decl;
b5cebd44 729 funct_state l;
730 basic_block this_block;
f7d118a9 731
b5cebd44 732 l = XCNEW (struct funct_state_d);
733 l->pure_const_state = IPA_CONST;
df9b545b 734 l->state_previously_known = IPA_NEITHER;
735 l->looping_previously_known = true;
b5cebd44 736 l->looping = false;
737 l->can_throw = false;
91bf9d9a 738 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
7d0d0ce1 739 flags_from_decl_or_type (fn->symbol.decl),
91bf9d9a 740 cgraph_node_cannot_return (fn));
741
b2c2e188 742 if (fn->thunk.thunk_p || fn->alias)
91bf9d9a 743 {
744 /* Thunk gets propagated through, so nothing interesting happens. */
745 gcc_assert (ipa);
746 return l;
747 }
f7d118a9 748
749 if (dump_file)
750 {
48e1416a 751 fprintf (dump_file, "\n\n local analysis of %s\n ",
b5cebd44 752 cgraph_node_name (fn));
f7d118a9 753 }
48e1416a 754
b5cebd44 755 push_cfun (DECL_STRUCT_FUNCTION (decl));
48e1416a 756
b5cebd44 757 FOR_EACH_BB (this_block)
f7d118a9 758 {
b5cebd44 759 gimple_stmt_iterator gsi;
760 struct walk_stmt_info wi;
f7d118a9 761
b5cebd44 762 memset (&wi, 0, sizeof(wi));
763 for (gsi = gsi_start_bb (this_block);
764 !gsi_end_p (gsi);
765 gsi_next (&gsi))
f7d118a9 766 {
b5cebd44 767 check_stmt (&gsi, l, ipa);
768 if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
769 goto end;
f7d118a9 770 }
771 }
dcccac3e 772
773end:
b5cebd44 774 if (l->pure_const_state != IPA_NEITHER)
775 {
776 /* Const functions cannot have back edges (an
777 indication of possible infinite loop side
778 effect. */
779 if (mark_dfs_back_edges ())
c9263b6a 780 {
b1887855 781 /* Preheaders are needed for SCEV to work.
0a10fd82 782 Simple latches and recorded exits improve chances that loop will
b1887855 783 proved to be finite in testcases such as in loop-15.c and loop-24.c */
784 loop_optimizer_init (LOOPS_NORMAL
785 | LOOPS_HAVE_RECORDED_EXITS);
c9263b6a 786 if (dump_file && (dump_flags & TDF_DETAILS))
787 flow_loops_dump (dump_file, NULL, 0);
788 if (mark_irreducible_loops ())
789 {
790 if (dump_file)
791 fprintf (dump_file, " has irreducible loops\n");
792 l->looping = true;
793 }
48e1416a 794 else
c9263b6a 795 {
796 loop_iterator li;
797 struct loop *loop;
798 scev_initialize ();
799 FOR_EACH_LOOP (li, loop, 0)
800 if (!finite_loop_p (loop))
801 {
802 if (dump_file)
803 fprintf (dump_file, " can not prove finiteness of loop %i\n", loop->num);
804 l->looping =true;
83b709f2 805 FOR_EACH_LOOP_BREAK (li);
c9263b6a 806 }
807 scev_finalize ();
808 }
809 loop_optimizer_finalize ();
810 }
b5cebd44 811 }
812
023a28e1 813 if (dump_file && (dump_flags & TDF_DETAILS))
814 fprintf (dump_file, " checking previously known:");
023a28e1 815
816 better_state (&l->pure_const_state, &l->looping,
817 l->state_previously_known,
818 l->looping_previously_known);
b5cebd44 819 if (TREE_NOTHROW (decl))
820 l->can_throw = false;
821
822 pop_cfun ();
dcccac3e 823 if (dump_file)
824 {
b5cebd44 825 if (l->looping)
826 fprintf (dump_file, "Function is locally looping.\n");
827 if (l->can_throw)
828 fprintf (dump_file, "Function is locally throwing.\n");
829 if (l->pure_const_state == IPA_CONST)
830 fprintf (dump_file, "Function is locally const.\n");
831 if (l->pure_const_state == IPA_PURE)
832 fprintf (dump_file, "Function is locally pure.\n");
dcccac3e 833 }
b5cebd44 834 return l;
f7d118a9 835}
836
50828ed8 837/* Called when new function is inserted to callgraph late. */
838static void
839add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
840{
ef378c27 841 if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
86844d6c 842 return;
50828ed8 843 /* There are some shared nodes, in particular the initializers on
844 static declarations. We do not need to scan them more than once
845 since all we would be interested in are the addressof
846 operations. */
847 visited_nodes = pointer_set_create ();
2c06958d 848 if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
849 set_function_state (node, analyze_function (node, true));
50828ed8 850 pointer_set_destroy (visited_nodes);
851 visited_nodes = NULL;
852}
853
86844d6c 854/* Called when new clone is inserted to callgraph late. */
855
856static void
857duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
858 void *data ATTRIBUTE_UNUSED)
859{
1ee39ad1 860 if (has_function_state (src))
86844d6c 861 {
862 funct_state l = XNEW (struct funct_state_d);
1ee39ad1 863 gcc_assert (!has_function_state (dst));
86844d6c 864 memcpy (l, get_function_state (src), sizeof (*l));
865 set_function_state (dst, l);
866 }
867}
868
869/* Called when new clone is inserted to callgraph late. */
870
871static void
872remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
873{
1ee39ad1 874 if (has_function_state (node))
86844d6c 875 {
db86e424 876 funct_state l = get_function_state (node);
877 if (l != &varying_state)
878 free (l);
86844d6c 879 set_function_state (node, NULL);
880 }
881}
882
f7d118a9 883\f
7bfefa9d 884static void
885register_hooks (void)
f7d118a9 886{
7bfefa9d 887 static bool init_p = false;
888
889 if (init_p)
890 return;
891
892 init_p = true;
f7d118a9 893
86844d6c 894 node_removal_hook_holder =
895 cgraph_add_node_removal_hook (&remove_node_data, NULL);
896 node_duplication_hook_holder =
897 cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
50828ed8 898 function_insertion_hook_holder =
899 cgraph_add_function_insertion_hook (&add_new_function, NULL);
7bfefa9d 900}
901
902
903/* Analyze each function in the cgraph to see if it is locally PURE or
904 CONST. */
905
48e1416a 906static void
7bfefa9d 907generate_summary (void)
908{
909 struct cgraph_node *node;
910
911 register_hooks ();
912
f7d118a9 913 /* There are some shared nodes, in particular the initializers on
914 static declarations. We do not need to scan them more than once
915 since all we would be interested in are the addressof
916 operations. */
917 visited_nodes = pointer_set_create ();
918
48e1416a 919 /* Process all of the functions.
f7d118a9 920
ef378c27 921 We process AVAIL_OVERWRITABLE functions. We can not use the results
922 by default, but the info can be used at LTO with -fwhole-program or
0a10fd82 923 when function got cloned and the clone is AVAILABLE. */
ef378c27 924
7c455d87 925 FOR_EACH_DEFINED_FUNCTION (node)
ef378c27 926 if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
b5cebd44 927 set_function_state (node, analyze_function (node, true));
f7d118a9 928
929 pointer_set_destroy (visited_nodes);
930 visited_nodes = NULL;
cb886925 931}
932
7bfefa9d 933
934/* Serialize the ipa info for lto. */
935
936static void
eab36a5a 937pure_const_write_summary (void)
7bfefa9d 938{
939 struct cgraph_node *node;
940 struct lto_simple_output_block *ob
941 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
942 unsigned int count = 0;
eab36a5a 943 lto_symtab_encoder_iterator lsei;
944 lto_symtab_encoder_t encoder;
7bfefa9d 945
eab36a5a 946 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
947
948 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
949 lsei_next_function_in_partition (&lsei))
7bfefa9d 950 {
eab36a5a 951 node = lsei_cgraph_node (lsei);
1ee39ad1 952 if (node->analyzed && has_function_state (node))
7bfefa9d 953 count++;
954 }
48e1416a 955
7f385784 956 streamer_write_uhwi_stream (ob->main_stream, count);
48e1416a 957
7bfefa9d 958 /* Process all of the functions. */
eab36a5a 959 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
960 lsei_next_function_in_partition (&lsei))
7bfefa9d 961 {
eab36a5a 962 node = lsei_cgraph_node (lsei);
1ee39ad1 963 if (node->analyzed && has_function_state (node))
7bfefa9d 964 {
30baba90 965 struct bitpack_d bp;
7bfefa9d 966 funct_state fs;
967 int node_ref;
70225339 968 lto_symtab_encoder_t encoder;
48e1416a 969
7bfefa9d 970 fs = get_function_state (node);
971
70225339 972 encoder = ob->decl_state->symtab_node_encoder;
973 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node)node);
7f385784 974 streamer_write_uhwi_stream (ob->main_stream, node_ref);
48e1416a 975
7bfefa9d 976 /* Note that flags will need to be read in the opposite
977 order as we are pushing the bitflags into FLAGS. */
30baba90 978 bp = bitpack_create (ob->main_stream);
979 bp_pack_value (&bp, fs->pure_const_state, 2);
980 bp_pack_value (&bp, fs->state_previously_known, 2);
981 bp_pack_value (&bp, fs->looping_previously_known, 1);
982 bp_pack_value (&bp, fs->looping, 1);
983 bp_pack_value (&bp, fs->can_throw, 1);
7f385784 984 streamer_write_bitpack (&bp);
7bfefa9d 985 }
986 }
987
988 lto_destroy_simple_output_block (ob);
989}
990
991
992/* Deserialize the ipa info for lto. */
993
48e1416a 994static void
7bfefa9d 995pure_const_read_summary (void)
996{
997 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
998 struct lto_file_decl_data *file_data;
999 unsigned int j = 0;
1000
1001 register_hooks ();
1002 while ((file_data = file_data_vec[j++]))
1003 {
1004 const char *data;
1005 size_t len;
1006 struct lto_input_block *ib
48e1416a 1007 = lto_create_simple_input_block (file_data,
1008 LTO_section_ipa_pure_const,
7bfefa9d 1009 &data, &len);
1010 if (ib)
1011 {
1012 unsigned int i;
7f385784 1013 unsigned int count = streamer_read_uhwi (ib);
7bfefa9d 1014
1015 for (i = 0; i < count; i++)
1016 {
1017 unsigned int index;
1018 struct cgraph_node *node;
30baba90 1019 struct bitpack_d bp;
7bfefa9d 1020 funct_state fs;
70225339 1021 lto_symtab_encoder_t encoder;
7bfefa9d 1022
1023 fs = XCNEW (struct funct_state_d);
7f385784 1024 index = streamer_read_uhwi (ib);
70225339 1025 encoder = file_data->symtab_node_encoder;
1026 node = cgraph (lto_symtab_encoder_deref (encoder, index));
7bfefa9d 1027 set_function_state (node, fs);
1028
1029 /* Note that the flags must be read in the opposite
1030 order in which they were written (the bitflags were
1031 pushed into FLAGS). */
7f385784 1032 bp = streamer_read_bitpack (ib);
7bfefa9d 1033 fs->pure_const_state
30baba90 1034 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
7bfefa9d 1035 fs->state_previously_known
30baba90 1036 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1037 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1038 fs->looping = bp_unpack_value (&bp, 1);
1039 fs->can_throw = bp_unpack_value (&bp, 1);
fc94a528 1040 if (dump_file)
1041 {
7d0d0ce1 1042 int flags = flags_from_decl_or_type (node->symbol.decl);
fc94a528 1043 fprintf (dump_file, "Read info for %s/%i ",
1044 cgraph_node_name (node),
1045 node->uid);
1046 if (flags & ECF_CONST)
1047 fprintf (dump_file, " const");
1048 if (flags & ECF_PURE)
1049 fprintf (dump_file, " pure");
1050 if (flags & ECF_NOTHROW)
1051 fprintf (dump_file, " nothrow");
1052 fprintf (dump_file, "\n pure const state: %s\n",
1053 pure_const_names[fs->pure_const_state]);
1054 fprintf (dump_file, " previously known state: %s\n",
1055 pure_const_names[fs->looping_previously_known]);
1056 if (fs->looping)
1057 fprintf (dump_file," function is locally looping\n");
1058 if (fs->looping_previously_known)
1059 fprintf (dump_file," function is previously known looping\n");
1060 if (fs->can_throw)
1061 fprintf (dump_file," function is locally throwing\n");
1062 }
7bfefa9d 1063 }
1064
48e1416a 1065 lto_destroy_simple_input_block (file_data,
1066 LTO_section_ipa_pure_const,
7bfefa9d 1067 ib, data, len);
1068 }
1069 }
1070}
1071
1072
17b28e52 1073static bool
1074ignore_edge (struct cgraph_edge *e)
1075{
1076 return (!e->can_throw_external);
1077}
1078
b2c2e188 1079/* Return true if NODE is self recursive function.
1080 ??? self recursive and indirectly recursive funcions should
1081 be the same, so this function seems unnecesary. */
a1e88032 1082
1083static bool
1084self_recursive_p (struct cgraph_node *node)
1085{
1086 struct cgraph_edge *e;
1087 for (e = node->callees; e; e = e->next_callee)
b2c2e188 1088 if (cgraph_function_node (e->callee, NULL) == node)
a1e88032 1089 return true;
1090 return false;
1091}
1092
c0240443 1093/* Produce transitive closure over the callgraph and compute pure/const
1094 attributes. */
023a28e1 1095
c0240443 1096static void
1097propagate_pure_const (void)
cb886925 1098{
1099 struct cgraph_node *node;
1100 struct cgraph_node *w;
1101 struct cgraph_node **order =
1102 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1103 int order_pos;
1104 int i;
1105 struct ipa_dfs_info * w_info;
1106
7771d558 1107 order_pos = ipa_reduced_postorder (order, true, false, NULL);
f7d118a9 1108 if (dump_file)
1109 {
1110 dump_cgraph (dump_file);
7771d558 1111 ipa_print_order(dump_file, "reduced", order, order_pos);
f7d118a9 1112 }
1113
9d75589a 1114 /* Propagate the local information through the call graph to produce
f7d118a9 1115 the global information. All the nodes within a cycle will have
1116 the same info so we collapse cycles first. Then we can do the
1117 propagation in one pass from the leaves to the roots. */
1118 for (i = 0; i < order_pos; i++ )
1119 {
1120 enum pure_const_state_e pure_const_state = IPA_CONST;
9c2a0c05 1121 bool looping = false;
54157bf1 1122 int count = 0;
f7d118a9 1123 node = order[i];
1124
8c1fce46 1125 if (node->alias)
1126 continue;
1127
fc94a528 1128 if (dump_file && (dump_flags & TDF_DETAILS))
1129 fprintf (dump_file, "Starting cycle\n");
1130
f7d118a9 1131 /* Find the worst state for any node in the cycle. */
1132 w = node;
023a28e1 1133 while (w && pure_const_state != IPA_NEITHER)
f7d118a9 1134 {
b5cebd44 1135 struct cgraph_edge *e;
023a28e1 1136 struct cgraph_edge *ie;
1137 int i;
1138 struct ipa_ref *ref;
1139
f7d118a9 1140 funct_state w_l = get_function_state (w);
fc94a528 1141 if (dump_file && (dump_flags & TDF_DETAILS))
1142 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
1143 cgraph_node_name (w),
1144 w->uid,
1145 pure_const_names[w_l->pure_const_state],
1146 w_l->looping);
f7d118a9 1147
023a28e1 1148 /* First merge in function body properties. */
1149 worse_state (&pure_const_state, &looping,
1150 w_l->pure_const_state, w_l->looping);
1151 if (pure_const_state == IPA_NEITHER)
1152 break;
1153
1154 /* For overwritable nodes we can not assume anything. */
ef378c27 1155 if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1156 {
023a28e1 1157 worse_state (&pure_const_state, &looping,
1158 w_l->state_previously_known,
1159 w_l->looping_previously_known);
fc94a528 1160 if (dump_file && (dump_flags & TDF_DETAILS))
1161 {
1162 fprintf (dump_file,
1163 " Overwritable. state %s looping %i\n",
1164 pure_const_names[w_l->state_previously_known],
1165 w_l->looping_previously_known);
1166 }
023a28e1 1167 break;
ef378c27 1168 }
9c2a0c05 1169
b5cebd44 1170 count++;
1171
023a28e1 1172 /* We consider recursive cycles as possibly infinite.
1173 This might be relaxed since infinite recursion leads to stack
1174 overflow. */
b5cebd44 1175 if (count > 1)
1176 looping = true;
48e1416a 1177
023a28e1 1178 /* Now walk the edges and merge in callee properties. */
48e1416a 1179 for (e = w->callees; e; e = e->next_callee)
f7d118a9 1180 {
b2c2e188 1181 enum availability avail;
1182 struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
fc94a528 1183 enum pure_const_state_e edge_state = IPA_CONST;
1184 bool edge_looping = false;
54157bf1 1185
fc94a528 1186 if (dump_file && (dump_flags & TDF_DETAILS))
1187 {
1188 fprintf (dump_file,
1189 " Call to %s/%i",
1190 cgraph_node_name (e->callee),
1191 e->callee->uid);
1192 }
b2c2e188 1193 if (avail > AVAIL_OVERWRITABLE)
f7d118a9 1194 {
b5cebd44 1195 funct_state y_l = get_function_state (y);
fc94a528 1196 if (dump_file && (dump_flags & TDF_DETAILS))
1197 {
1198 fprintf (dump_file,
1199 " state:%s looping:%i\n",
1200 pure_const_names[y_l->pure_const_state],
1201 y_l->looping);
1202 }
4c170941 1203 if (y_l->pure_const_state > IPA_PURE
1ee39ad1 1204 && cgraph_edge_cannot_lead_to_return (e))
fc94a528 1205 {
1206 if (dump_file && (dump_flags & TDF_DETAILS))
023a28e1 1207 fprintf (dump_file,
1208 " Ignoring side effects"
1209 " -> pure, looping\n");
fc94a528 1210 edge_state = IPA_PURE;
1211 edge_looping = true;
1212 }
1213 else
1214 {
1215 edge_state = y_l->pure_const_state;
1216 edge_looping = y_l->looping;
1217 }
f7d118a9 1218 }
a53e7471 1219 else if (special_builtin_state (&edge_state, &edge_looping,
7d0d0ce1 1220 y->symbol.decl))
7dd42908 1221 ;
ef378c27 1222 else
023a28e1 1223 state_from_flags (&edge_state, &edge_looping,
7d0d0ce1 1224 flags_from_decl_or_type (y->symbol.decl),
7dd42908 1225 cgraph_edge_cannot_lead_to_return (e));
023a28e1 1226
1227 /* Merge the results with what we already know. */
1228 better_state (&edge_state, &edge_looping,
1229 w_l->state_previously_known,
1230 w_l->looping_previously_known);
1231 worse_state (&pure_const_state, &looping,
1232 edge_state, edge_looping);
1233 if (pure_const_state == IPA_NEITHER)
1234 break;
1235 }
1236 if (pure_const_state == IPA_NEITHER)
1237 break;
ef378c27 1238
023a28e1 1239 /* Now process the indirect call. */
451e322a 1240 for (ie = w->indirect_calls; ie; ie = ie->next_callee)
023a28e1 1241 {
1242 enum pure_const_state_e edge_state = IPA_CONST;
1243 bool edge_looping = false;
4c170941 1244
023a28e1 1245 if (dump_file && (dump_flags & TDF_DETAILS))
1246 fprintf (dump_file, " Indirect call");
1247 state_from_flags (&edge_state, &edge_looping,
1248 ie->indirect_info->ecf_flags,
1249 cgraph_edge_cannot_lead_to_return (ie));
1250 /* Merge the results with what we already know. */
1251 better_state (&edge_state, &edge_looping,
1252 w_l->state_previously_known,
1253 w_l->looping_previously_known);
1254 worse_state (&pure_const_state, &looping,
1255 edge_state, edge_looping);
fc94a528 1256 if (pure_const_state == IPA_NEITHER)
1257 break;
f7d118a9 1258 }
023a28e1 1259 if (pure_const_state == IPA_NEITHER)
1260 break;
1261
1262 /* And finally all loads and stores. */
7d0d0ce1 1263 for (i = 0; ipa_ref_list_reference_iterate (&w->symbol.ref_list, i, ref); i++)
023a28e1 1264 {
1265 enum pure_const_state_e ref_state = IPA_CONST;
1266 bool ref_looping = false;
1267 switch (ref->use)
1268 {
1269 case IPA_REF_LOAD:
1270 /* readonly reads are safe. */
7d0d0ce1 1271 if (TREE_READONLY (ipa_ref_varpool_node (ref)->symbol.decl))
023a28e1 1272 break;
1273 if (dump_file && (dump_flags & TDF_DETAILS))
1274 fprintf (dump_file, " nonreadonly global var read\n");
1275 ref_state = IPA_PURE;
1276 break;
1277 case IPA_REF_STORE:
1278 if (ipa_ref_cannot_lead_to_return (ref))
1279 break;
1280 ref_state = IPA_NEITHER;
1281 if (dump_file && (dump_flags & TDF_DETAILS))
1282 fprintf (dump_file, " global var write\n");
1283 break;
1284 case IPA_REF_ADDR:
1285 break;
1286 }
1287 better_state (&ref_state, &ref_looping,
1288 w_l->state_previously_known,
1289 w_l->looping_previously_known);
1290 worse_state (&pure_const_state, &looping,
1291 ref_state, ref_looping);
1292 if (pure_const_state == IPA_NEITHER)
1293 break;
1294 }
7d0d0ce1 1295 w_info = (struct ipa_dfs_info *) w->symbol.aux;
f7d118a9 1296 w = w_info->next_cycle;
1297 }
fc94a528 1298 if (dump_file && (dump_flags & TDF_DETAILS))
1299 fprintf (dump_file, "Result %s looping %i\n",
1300 pure_const_names [pure_const_state],
1301 looping);
f7d118a9 1302
1303 /* Copy back the region's pure_const_state which is shared by
1304 all nodes in the region. */
1305 w = node;
1306 while (w)
1307 {
1308 funct_state w_l = get_function_state (w);
b5cebd44 1309 enum pure_const_state_e this_state = pure_const_state;
1310 bool this_looping = looping;
f7d118a9 1311
df9b545b 1312 if (w_l->state_previously_known != IPA_NEITHER
1313 && this_state > w_l->state_previously_known)
4c170941 1314 {
1315 this_state = w_l->state_previously_known;
1316 this_looping |= w_l->looping_previously_known;
1317 }
a1e88032 1318 if (!this_looping && self_recursive_p (w))
1319 this_looping = true;
df9b545b 1320 if (!w_l->looping_previously_known)
1321 this_looping = false;
cb886925 1322
b5cebd44 1323 /* All nodes within a cycle share the same info. */
1324 w_l->pure_const_state = this_state;
1325 w_l->looping = this_looping;
1326
1327 switch (this_state)
1328 {
1329 case IPA_CONST:
7d0d0ce1 1330 if (!TREE_READONLY (w->symbol.decl))
2c06958d 1331 {
7d0d0ce1 1332 warn_function_const (w->symbol.decl, !this_looping);
2c06958d 1333 if (dump_file)
1334 fprintf (dump_file, "Function found to be %sconst: %s\n",
1335 this_looping ? "looping " : "",
1336 cgraph_node_name (w));
1337 }
7f74ac6b 1338 cgraph_set_const_flag (w, true, this_looping);
b5cebd44 1339 break;
48e1416a 1340
b5cebd44 1341 case IPA_PURE:
7d0d0ce1 1342 if (!DECL_PURE_P (w->symbol.decl))
2c06958d 1343 {
7d0d0ce1 1344 warn_function_pure (w->symbol.decl, !this_looping);
2c06958d 1345 if (dump_file)
1346 fprintf (dump_file, "Function found to be %spure: %s\n",
1347 this_looping ? "looping " : "",
1348 cgraph_node_name (w));
1349 }
7f74ac6b 1350 cgraph_set_pure_flag (w, true, this_looping);
b5cebd44 1351 break;
48e1416a 1352
b5cebd44 1353 default:
1354 break;
1355 }
7d0d0ce1 1356 w_info = (struct ipa_dfs_info *) w->symbol.aux;
17b28e52 1357 w = w_info->next_cycle;
1358 }
1359 }
1360
7771d558 1361 ipa_free_postorder_info ();
c0240443 1362 free (order);
1363}
1364
1365/* Produce transitive closure over the callgraph and compute nothrow
1366 attributes. */
1367
1368static void
1369propagate_nothrow (void)
1370{
1371 struct cgraph_node *node;
1372 struct cgraph_node *w;
1373 struct cgraph_node **order =
1374 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1375 int order_pos;
1376 int i;
1377 struct ipa_dfs_info * w_info;
1378
7771d558 1379 order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
17b28e52 1380 if (dump_file)
1381 {
1382 dump_cgraph (dump_file);
7771d558 1383 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
17b28e52 1384 }
c0240443 1385
9d75589a 1386 /* Propagate the local information through the call graph to produce
17b28e52 1387 the global information. All the nodes within a cycle will have
1388 the same info so we collapse cycles first. Then we can do the
1389 propagation in one pass from the leaves to the roots. */
1390 for (i = 0; i < order_pos; i++ )
1391 {
1392 bool can_throw = false;
1393 node = order[i];
1394
8c1fce46 1395 if (node->alias)
1396 continue;
1397
17b28e52 1398 /* Find the worst state for any node in the cycle. */
1399 w = node;
1400 while (w)
1401 {
023a28e1 1402 struct cgraph_edge *e, *ie;
17b28e52 1403 funct_state w_l = get_function_state (w);
1404
ef378c27 1405 if (w_l->can_throw
1406 || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
17b28e52 1407 can_throw = true;
1408
1409 if (can_throw)
1410 break;
48e1416a 1411
1412 for (e = w->callees; e; e = e->next_callee)
17b28e52 1413 {
b2c2e188 1414 enum availability avail;
1415 struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
17b28e52 1416
b2c2e188 1417 if (avail > AVAIL_OVERWRITABLE)
17b28e52 1418 {
1419 funct_state y_l = get_function_state (y);
1420
48e1416a 1421 if (can_throw)
17b28e52 1422 break;
7d0d0ce1 1423 if (y_l->can_throw && !TREE_NOTHROW (w->symbol.decl)
17b28e52 1424 && e->can_throw_external)
1425 can_throw = true;
1426 }
7d0d0ce1 1427 else if (e->can_throw_external && !TREE_NOTHROW (y->symbol.decl))
ef378c27 1428 can_throw = true;
17b28e52 1429 }
023a28e1 1430 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1431 if (ie->can_throw_external)
1432 can_throw = true;
7d0d0ce1 1433 w_info = (struct ipa_dfs_info *) w->symbol.aux;
17b28e52 1434 w = w_info->next_cycle;
1435 }
1436
1437 /* Copy back the region's pure_const_state which is shared by
1438 all nodes in the region. */
1439 w = node;
1440 while (w)
1441 {
ecfab407 1442 funct_state w_l = get_function_state (w);
7d0d0ce1 1443 if (!can_throw && !TREE_NOTHROW (w->symbol.decl))
b5cebd44 1444 {
16fcf0f4 1445 cgraph_set_nothrow_flag (w, true);
b5cebd44 1446 if (dump_file)
48e1416a 1447 fprintf (dump_file, "Function found to be nothrow: %s\n",
b5cebd44 1448 cgraph_node_name (w));
f7d118a9 1449 }
7d0d0ce1 1450 else if (can_throw && !TREE_NOTHROW (w->symbol.decl))
ecfab407 1451 w_l->can_throw = true;
7d0d0ce1 1452 w_info = (struct ipa_dfs_info *) w->symbol.aux;
f7d118a9 1453 w = w_info->next_cycle;
1454 }
1455 }
1456
7771d558 1457 ipa_free_postorder_info ();
f7d118a9 1458 free (order);
c0240443 1459}
1460
1461
1462/* Produce the global information by preforming a transitive closure
1463 on the local information that was produced by generate_summary. */
1464
1465static unsigned int
1466propagate (void)
1467{
1468 struct cgraph_node *node;
1469
1470 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
1471 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1472 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1473
1474 /* Nothrow makes more function to not lead to return and improve
1475 later analysis. */
1476 propagate_nothrow ();
1477 propagate_pure_const ();
1478
1479 /* Cleanup. */
7c455d87 1480 FOR_EACH_DEFINED_FUNCTION (node)
c0240443 1481 if (has_function_state (node))
1482 free (get_function_state (node));
f1f41a6c 1483 funct_state_vec.release ();
cb886925 1484 finish_state ();
2a1990e9 1485 return 0;
f7d118a9 1486}
1487
1488static bool
1489gate_pure_const (void)
1490{
86844d6c 1491 return (flag_ipa_pure_const
f7d118a9 1492 /* Don't bother doing anything if the program has errors. */
852f689e 1493 && !seen_error ());
f7d118a9 1494}
1495
26dbec0a 1496struct ipa_opt_pass_d pass_ipa_pure_const =
f7d118a9 1497{
20099e35 1498 {
cb886925 1499 IPA_PASS,
d3d7bd46 1500 "pure-const", /* name */
c7875731 1501 OPTGROUP_NONE, /* optinfo_flags */
f7d118a9 1502 gate_pure_const, /* gate */
cb886925 1503 propagate, /* execute */
f7d118a9 1504 NULL, /* sub */
1505 NULL, /* next */
1506 0, /* static_pass_number */
1507 TV_IPA_PURE_CONST, /* tv_id */
1508 0, /* properties_required */
1509 0, /* properties_provided */
1510 0, /* properties_destroyed */
1511 0, /* todo_flags_start */
20099e35 1512 0 /* todo_flags_finish */
cb886925 1513 },
1514 generate_summary, /* generate_summary */
7bfefa9d 1515 pure_const_write_summary, /* write_summary */
1516 pure_const_read_summary, /* read_summary */
ddc90d88 1517 NULL, /* write_optimization_summary */
1518 NULL, /* read_optimization_summary */
90464c8b 1519 NULL, /* stmt_fixup */
cb886925 1520 0, /* TODOs */
1521 NULL, /* function_transform */
1522 NULL /* variable_transform */
f7d118a9 1523};
b5cebd44 1524
2c06958d 1525/* Return true if function should be skipped for local pure const analysis. */
b5cebd44 1526
2c06958d 1527static bool
1528skip_function_for_local_pure_const (struct cgraph_node *node)
b5cebd44 1529{
b5cebd44 1530 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1531 we must not promote functions that are called by already processed functions. */
1532
1533 if (function_called_by_processed_nodes_p ())
1534 {
1535 if (dump_file)
1536 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2c06958d 1537 return true;
b5cebd44 1538 }
16fcf0f4 1539 if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
b5cebd44 1540 {
1541 if (dump_file)
0a10fd82 1542 fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
2c06958d 1543 return true;
b5cebd44 1544 }
2c06958d 1545 return false;
1546}
1547
1548/* Simple local pass for pure const discovery reusing the analysis from
1549 ipa_pure_const. This pass is effective when executed together with
1550 other optimization passes in early optimization pass queue. */
b5cebd44 1551
2c06958d 1552static unsigned int
1553local_pure_const (void)
1554{
1555 bool changed = false;
1556 funct_state l;
1557 bool skip;
1558 struct cgraph_node *node;
1559
fd6a3c41 1560 node = cgraph_get_node (current_function_decl);
2c06958d 1561 skip = skip_function_for_local_pure_const (node);
1562 if (!warn_suggest_attribute_const
1563 && !warn_suggest_attribute_pure
1564 && skip)
1565 return 0;
e0933141 1566
ab3db708 1567 l = analyze_function (node, false);
1568
1569 /* Do NORETURN discovery. */
e0933141 1570 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1571 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
1572 {
43d60d64 1573 warn_function_noreturn (cfun->decl);
e0933141 1574 if (dump_file)
1575 fprintf (dump_file, "Function found to be noreturn: %s\n",
9631926a 1576 current_function_name ());
e0933141 1577
1578 /* Update declaration and reduce profile to executed once. */
1579 TREE_THIS_VOLATILE (current_function_decl) = 1;
1580 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1581 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1582
1583 changed = true;
1584 }
ef378c27 1585
b5cebd44 1586 switch (l->pure_const_state)
1587 {
1588 case IPA_CONST:
1589 if (!TREE_READONLY (current_function_decl))
1590 {
2c06958d 1591 warn_function_const (current_function_decl, !l->looping);
1592 if (!skip)
1593 {
7f74ac6b 1594 cgraph_set_const_flag (node, true, l->looping);
2c06958d 1595 changed = true;
1596 }
b5cebd44 1597 if (dump_file)
1598 fprintf (dump_file, "Function found to be %sconst: %s\n",
1599 l->looping ? "looping " : "",
9631926a 1600 current_function_name ());
b5cebd44 1601 }
1602 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1603 && !l->looping)
1604 {
2c06958d 1605 if (!skip)
1606 {
7f74ac6b 1607 cgraph_set_const_flag (node, true, false);
2c06958d 1608 changed = true;
1609 }
b5cebd44 1610 if (dump_file)
1611 fprintf (dump_file, "Function found to be non-looping: %s\n",
9631926a 1612 current_function_name ());
b5cebd44 1613 }
1614 break;
1615
1616 case IPA_PURE:
2c06958d 1617 if (!DECL_PURE_P (current_function_decl))
b5cebd44 1618 {
2c06958d 1619 if (!skip)
1620 {
7f74ac6b 1621 cgraph_set_pure_flag (node, true, l->looping);
2c06958d 1622 changed = true;
1623 }
1624 warn_function_pure (current_function_decl, !l->looping);
b5cebd44 1625 if (dump_file)
1626 fprintf (dump_file, "Function found to be %spure: %s\n",
1627 l->looping ? "looping " : "",
9631926a 1628 current_function_name ());
b5cebd44 1629 }
1630 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1631 && !l->looping)
1632 {
2c06958d 1633 if (!skip)
1634 {
7f74ac6b 1635 cgraph_set_pure_flag (node, true, false);
2c06958d 1636 changed = true;
1637 }
b5cebd44 1638 if (dump_file)
1639 fprintf (dump_file, "Function found to be non-looping: %s\n",
9631926a 1640 current_function_name ());
b5cebd44 1641 }
1642 break;
1643
1644 default:
1645 break;
1646 }
1647 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1648 {
16fcf0f4 1649 cgraph_set_nothrow_flag (node, true);
b5cebd44 1650 changed = true;
1651 if (dump_file)
1652 fprintf (dump_file, "Function found to be nothrow: %s\n",
9631926a 1653 current_function_name ());
b5cebd44 1654 }
dd045aee 1655 free (l);
b5cebd44 1656 if (changed)
1657 return execute_fixup_cfg ();
1658 else
1659 return 0;
1660}
1661
1662struct gimple_opt_pass pass_local_pure_const =
1663{
1664 {
1665 GIMPLE_PASS,
1666 "local-pure-const", /* name */
c7875731 1667 OPTGROUP_NONE, /* optinfo_flags */
b5cebd44 1668 gate_pure_const, /* gate */
1669 local_pure_const, /* execute */
1670 NULL, /* sub */
1671 NULL, /* next */
1672 0, /* static_pass_number */
1673 TV_IPA_PURE_CONST, /* tv_id */
1674 0, /* properties_required */
1675 0, /* properties_provided */
1676 0, /* properties_destroyed */
1677 0, /* todo_flags_start */
1678 0 /* todo_flags_finish */
1679 }
1680};