]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-pure-const.c
Daily bump.
[thirdparty/gcc.git] / gcc / ipa-pure-const.c
CommitLineData
f7d118a9 1/* Callgraph based analysis of static variables.
8e8f6434 2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
f7d118a9 3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
8c4c00c1 9Software Foundation; either version 3, or (at your option) any later
f7d118a9 10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
f7d118a9 20
f0b5f617 21/* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
f7d118a9 24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30/* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34#include "config.h"
35#include "system.h"
36#include "coretypes.h"
9ef16211 37#include "backend.h"
7c29e30e 38#include "target.h"
f7d118a9 39#include "tree.h"
9ef16211 40#include "gimple.h"
7c29e30e 41#include "tree-pass.h"
42#include "tree-streamer.h"
43#include "cgraph.h"
44#include "diagnostic.h"
9ed99284 45#include "calls.h"
94ea8568 46#include "cfganal.h"
bc61cadb 47#include "tree-eh.h"
dcf1a1ec 48#include "gimple-iterator.h"
49#include "gimple-walk.h"
073c1fd5 50#include "tree-cfg.h"
05d9c18a 51#include "tree-ssa-loop-niter.h"
f7d118a9 52#include "langhooks.h"
f7d118a9 53#include "ipa-utils.h"
ce084dfc 54#include "gimple-pretty-print.h"
c9263b6a 55#include "cfgloop.h"
56#include "tree-scalar-evolution.h"
2c06958d 57#include "intl.h"
58#include "opts.h"
bd5ef087 59#include "ssa.h"
60#include "alloc-pool.h"
61#include "symbol-summary.h"
62#include "ipa-prop.h"
63#include "ipa-fnsummary.h"
f7d118a9 64
65/* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
67 what is found. */
68enum pure_const_state_e
69{
70 IPA_CONST,
71 IPA_PURE,
72 IPA_NEITHER
73};
74
bd5ef087 75static const char *pure_const_names[3] = {"const", "pure", "neither"};
76
77enum malloc_state_e
78{
79 STATE_MALLOC_TOP,
80 STATE_MALLOC,
81 STATE_MALLOC_BOTTOM
82};
83
84static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
fc94a528 85
cb886925 86/* Holder for the const_state. There is one of these per function
87 decl. */
48e1416a 88struct funct_state_d
f7d118a9 89{
cb886925 90 /* See above. */
f7d118a9 91 enum pure_const_state_e pure_const_state;
b5cebd44 92 /* What user set here; we can be always sure about this. */
48e1416a 93 enum pure_const_state_e state_previously_known;
94 bool looping_previously_known;
cb886925 95
96 /* True if the function could possibly infinite loop. There are a
97 lot of ways that this could be determined. We are pretty
98 conservative here. While it is possible to cse pure and const
99 calls, it is not legal to have dce get rid of the call if there
100 is a possibility that the call could infinite loop since this is
101 a behavioral change. */
9c2a0c05 102 bool looping;
cb886925 103
b5cebd44 104 bool can_throw;
04c849b3 105
106 /* If function can call free, munmap or otherwise make previously
107 non-trapping memory accesses trapping. */
108 bool can_free;
bd5ef087 109
110 enum malloc_state_e malloc_state;
f7d118a9 111};
112
db86e424 113/* State used when we know nothing about function. */
114static struct funct_state_d varying_state
bd5ef087 115 = { IPA_NEITHER, IPA_NEITHER, true, true, true, true, STATE_MALLOC_BOTTOM };
db86e424 116
117
f7d118a9 118typedef struct funct_state_d * funct_state;
119
cb886925 120/* The storage of the funct_state is abstracted because there is the
121 possibility that it may be desirable to move this to the cgraph
48e1416a 122 local info. */
cb886925 123
124/* Array, indexed by cgraph node uid, of function states. */
125
f1f41a6c 126static vec<funct_state> funct_state_vec;
cb886925 127
415309e2 128static bool gate_pure_const (void);
129
7620bc82 130namespace {
131
132const pass_data pass_data_ipa_pure_const =
415309e2 133{
134 IPA_PASS, /* type */
135 "pure-const", /* name */
136 OPTGROUP_NONE, /* optinfo_flags */
137 TV_IPA_PURE_CONST, /* tv_id */
138 0, /* properties_required */
139 0, /* properties_provided */
140 0, /* properties_destroyed */
141 0, /* todo_flags_start */
142 0, /* todo_flags_finish */
143};
144
7620bc82 145class pass_ipa_pure_const : public ipa_opt_pass_d
415309e2 146{
147public:
148 pass_ipa_pure_const(gcc::context *ctxt);
149
150 /* opt_pass methods: */
151 bool gate (function *) { return gate_pure_const (); }
152 unsigned int execute (function *fun);
153
154 void register_hooks (void);
155
156private:
157 bool init_p;
158
159 /* Holders of ipa cgraph hooks: */
160 struct cgraph_node_hook_list *function_insertion_hook_holder;
161 struct cgraph_2node_hook_list *node_duplication_hook_holder;
162 struct cgraph_node_hook_list *node_removal_hook_holder;
163
164}; // class pass_ipa_pure_const
165
7620bc82 166} // anon namespace
167
2c06958d 168/* Try to guess if function body will always be visible to compiler
169 when compiling the call and whether compiler will be able
170 to propagate the information by itself. */
171
172static bool
173function_always_visible_to_compiler_p (tree decl)
174{
60722a03 175 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
176 || DECL_COMDAT (decl));
2c06958d 177}
178
179/* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
180 is true if the function is known to be finite. The diagnostic is
431205b7 181 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
2c06958d 182 OPTION, this function may initialize it and it is always returned
183 by the function. */
184
431205b7 185static hash_set<tree> *
2c06958d 186suggest_attribute (int option, tree decl, bool known_finite,
431205b7 187 hash_set<tree> *warned_about,
2c06958d 188 const char * attrib_name)
189{
2c5d2e39 190 if (!option_enabled (option, &global_options))
2c06958d 191 return warned_about;
192 if (TREE_THIS_VOLATILE (decl)
193 || (known_finite && function_always_visible_to_compiler_p (decl)))
194 return warned_about;
195
196 if (!warned_about)
431205b7 197 warned_about = new hash_set<tree>;
198 if (warned_about->contains (decl))
2c06958d 199 return warned_about;
431205b7 200 warned_about->add (decl);
2c06958d 201 warning_at (DECL_SOURCE_LOCATION (decl),
202 option,
203 known_finite
ca1f4c7a 204 ? G_("function might be candidate for attribute %qs")
205 : G_("function might be candidate for attribute %qs"
206 " if it is known to return normally"), attrib_name);
2c06958d 207 return warned_about;
208}
209
210/* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
211 is true if the function is known to be finite. */
212
213static void
214warn_function_pure (tree decl, bool known_finite)
215{
2acafe26 216 /* Declaring a void function pure makes no sense and is diagnosed
217 by -Wattributes because calling it would have no effect. */
218 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
219 return;
2c06958d 220
2acafe26 221 static hash_set<tree> *warned_about;
222 warned_about
2c06958d 223 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
224 known_finite, warned_about, "pure");
225}
226
227/* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
228 is true if the function is known to be finite. */
229
230static void
231warn_function_const (tree decl, bool known_finite)
232{
2acafe26 233 /* Declaring a void function const makes no sense is diagnosed
234 by -Wattributes because calling it would have no effect. */
235 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
236 return;
237
431205b7 238 static hash_set<tree> *warned_about;
2acafe26 239 warned_about
2c06958d 240 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
241 known_finite, warned_about, "const");
242}
43d60d64 243
bd5ef087 244/* Emit suggestion about __attribute__((malloc)) for DECL. */
245
246static void
247warn_function_malloc (tree decl)
248{
249 static hash_set<tree> *warned_about;
250 warned_about
251 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
252 false, warned_about, "malloc");
253}
254
255/* Emit suggestion about __attribute__((noreturn)) for DECL. */
256
64641360 257static void
43d60d64 258warn_function_noreturn (tree decl)
259{
313dfc4e 260 tree original_decl = decl;
261
262 cgraph_node *node = cgraph_node::get (decl);
263 if (node->instrumentation_clone)
264 decl = node->instrumented_version->decl;
265
431205b7 266 static hash_set<tree> *warned_about;
08c6cbd2 267 if (!lang_hooks.missing_noreturn_ok_p (decl)
268 && targetm.warn_func_return (decl))
43d60d64 269 warned_about
313dfc4e 270 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
43d60d64 271 true, warned_about, "noreturn");
272}
9631926a 273
60722a03 274void
275warn_function_cold (tree decl)
276{
277 tree original_decl = decl;
278
279 cgraph_node *node = cgraph_node::get (decl);
280 if (node->instrumentation_clone)
281 decl = node->instrumented_version->decl;
282
283 static hash_set<tree> *warned_about;
284 warned_about
285 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
286 true, warned_about, "cold");
287}
288
1ee39ad1 289/* Return true if we have a function state for NODE. */
290
291static inline bool
292has_function_state (struct cgraph_node *node)
293{
f1f41a6c 294 if (!funct_state_vec.exists ()
295 || funct_state_vec.length () <= (unsigned int)node->uid)
1ee39ad1 296 return false;
f1f41a6c 297 return funct_state_vec[node->uid] != NULL;
1ee39ad1 298}
299
48e1416a 300/* Return the function state from NODE. */
f7d118a9 301
302static inline funct_state
303get_function_state (struct cgraph_node *node)
304{
f1f41a6c 305 if (!funct_state_vec.exists ()
306 || funct_state_vec.length () <= (unsigned int)node->uid
307 || !funct_state_vec[node->uid])
1ee39ad1 308 /* We might want to put correct previously_known state into varying. */
db86e424 309 return &varying_state;
f1f41a6c 310 return funct_state_vec[node->uid];
cb886925 311}
312
313/* Set the function state S for NODE. */
314
315static inline void
316set_function_state (struct cgraph_node *node, funct_state s)
317{
f1f41a6c 318 if (!funct_state_vec.exists ()
319 || funct_state_vec.length () <= (unsigned int)node->uid)
320 funct_state_vec.safe_grow_cleared (node->uid + 1);
af91a851 321
322 /* If funct_state_vec already contains a funct_state, we have to release
323 it before it's going to be ovewritten. */
324 if (funct_state_vec[node->uid] != NULL
325 && funct_state_vec[node->uid] != &varying_state)
326 free (funct_state_vec[node->uid]);
327
f1f41a6c 328 funct_state_vec[node->uid] = s;
f7d118a9 329}
330
f0b5f617 331/* Check to see if the use (or definition when CHECKING_WRITE is true)
f7d118a9 332 variable T is legal in a function that is either pure or const. */
333
48e1416a 334static inline void
335check_decl (funct_state local,
023a28e1 336 tree t, bool checking_write, bool ipa)
f7d118a9 337{
f7d118a9 338 /* Do not want to do anything with volatile except mark any
339 function that uses one to be not const or pure. */
48e1416a 340 if (TREE_THIS_VOLATILE (t))
341 {
f7d118a9 342 local->pure_const_state = IPA_NEITHER;
b5cebd44 343 if (dump_file)
f7d67aae 344 fprintf (dump_file, " Volatile operand is not const/pure\n");
f7d118a9 345 return;
346 }
347
348 /* Do not care about a local automatic that is not static. */
349 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
350 return;
351
b5cebd44 352 /* If the variable has the "used" attribute, treat it as if it had a
353 been touched by the devil. */
83a23b05 354 if (DECL_PRESERVE_P (t))
b5cebd44 355 {
356 local->pure_const_state = IPA_NEITHER;
357 if (dump_file)
358 fprintf (dump_file, " Used static/global variable is not const/pure\n");
359 return;
360 }
361
023a28e1 362 /* In IPA mode we are not interested in checking actual loads and stores;
363 they will be processed at propagation time using ipa_ref. */
364 if (ipa)
365 return;
366
f7d118a9 367 /* Since we have dealt with the locals and params cases above, if we
368 are CHECKING_WRITE, this cannot be a pure or constant
369 function. */
48e1416a 370 if (checking_write)
bc17fd99 371 {
372 local->pure_const_state = IPA_NEITHER;
b5cebd44 373 if (dump_file)
374 fprintf (dump_file, " static/global memory write is not const/pure\n");
bc17fd99 375 return;
376 }
f7d118a9 377
378 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
379 {
b5cebd44 380 /* Readonly reads are safe. */
381 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
382 return; /* Read of a constant, do not change the function state. */
48e1416a 383 else
f7d118a9 384 {
b5cebd44 385 if (dump_file)
386 fprintf (dump_file, " global memory read is not const\n");
f7d118a9 387 /* Just a regular read. */
388 if (local->pure_const_state == IPA_CONST)
389 local->pure_const_state = IPA_PURE;
390 }
391 }
b5cebd44 392 else
393 {
394 /* Compilation level statics can be read if they are readonly
395 variables. */
396 if (TREE_READONLY (t))
397 return;
398
399 if (dump_file)
400 fprintf (dump_file, " static memory read is not const\n");
401 /* Just a regular read. */
402 if (local->pure_const_state == IPA_CONST)
403 local->pure_const_state = IPA_PURE;
404 }
f7d118a9 405}
406
f7d118a9 407
b5cebd44 408/* Check to see if the use (or definition when CHECKING_WRITE is true)
409 variable T is legal in a function that is either pure or const. */
f7d118a9 410
48e1416a 411static inline void
5ed0b345 412check_op (funct_state local, tree t, bool checking_write)
f7d118a9 413{
3ae61172 414 t = get_base_address (t);
415 if (t && TREE_THIS_VOLATILE (t))
f7d118a9 416 {
5ed0b345 417 local->pure_const_state = IPA_NEITHER;
418 if (dump_file)
419 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
420 return;
421 }
3ae61172 422 else if (t
182cf5a9 423 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
3ae61172 424 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
425 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
426 {
427 if (dump_file)
428 fprintf (dump_file, " Indirect ref to local memory is OK\n");
429 return;
430 }
5ed0b345 431 else if (checking_write)
432 {
433 local->pure_const_state = IPA_NEITHER;
434 if (dump_file)
435 fprintf (dump_file, " Indirect ref write is not const/pure\n");
436 return;
437 }
438 else
439 {
440 if (dump_file)
441 fprintf (dump_file, " Indirect ref read is not const\n");
442 if (local->pure_const_state == IPA_CONST)
443 local->pure_const_state = IPA_PURE;
f7d118a9 444 }
445}
446
023a28e1 447/* compute state based on ECF FLAGS and store to STATE and LOOPING. */
448
449static void
450state_from_flags (enum pure_const_state_e *state, bool *looping,
451 int flags, bool cannot_lead_to_return)
452{
453 *looping = false;
454 if (flags & ECF_LOOPING_CONST_OR_PURE)
455 {
456 *looping = true;
457 if (dump_file && (dump_flags & TDF_DETAILS))
f7d67aae 458 fprintf (dump_file, " looping\n");
023a28e1 459 }
460 if (flags & ECF_CONST)
461 {
462 *state = IPA_CONST;
463 if (dump_file && (dump_flags & TDF_DETAILS))
464 fprintf (dump_file, " const\n");
465 }
466 else if (flags & ECF_PURE)
467 {
468 *state = IPA_PURE;
469 if (dump_file && (dump_flags & TDF_DETAILS))
470 fprintf (dump_file, " pure\n");
471 }
472 else if (cannot_lead_to_return)
473 {
474 *state = IPA_PURE;
475 *looping = true;
476 if (dump_file && (dump_flags & TDF_DETAILS))
477 fprintf (dump_file, " ignoring side effects->pure looping\n");
478 }
479 else
480 {
481 if (dump_file && (dump_flags & TDF_DETAILS))
9631926a 482 fprintf (dump_file, " neither\n");
023a28e1 483 *state = IPA_NEITHER;
484 *looping = true;
485 }
486}
487
488/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
489 into STATE and LOOPING better of the two variants.
490 Be sure to merge looping correctly. IPA_NEITHER functions
491 have looping 0 even if they don't have to return. */
492
493static inline void
494better_state (enum pure_const_state_e *state, bool *looping,
495 enum pure_const_state_e state2, bool looping2)
496{
497 if (state2 < *state)
498 {
499 if (*state == IPA_NEITHER)
500 *looping = looping2;
501 else
502 *looping = MIN (*looping, looping2);
26fc128e 503 *state = state2;
023a28e1 504 }
505 else if (state2 != IPA_NEITHER)
506 *looping = MIN (*looping, looping2);
507}
508
509/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
8b4ee73c 510 into STATE and LOOPING worse of the two variants.
511 N is the actual node called. */
023a28e1 512
513static inline void
514worse_state (enum pure_const_state_e *state, bool *looping,
8b4ee73c 515 enum pure_const_state_e state2, bool looping2,
516 struct symtab_node *from,
517 struct symtab_node *to)
023a28e1 518{
8b4ee73c 519 /* Consider function:
520
521 bool a(int *p)
522 {
523 return *p==*p;
524 }
525
526 During early optimization we will turn this into:
527
528 bool a(int *p)
529 {
530 return true;
531 }
532
533 Now if this function will be detected as CONST however when interposed it
534 may end up being just pure. We always must assume the worst scenario here.
535 */
536 if (*state == IPA_CONST && state2 == IPA_CONST
537 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
538 {
539 if (dump_file && (dump_flags & TDF_DETAILS))
540 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
541 "bind to current def.\n", to->name ());
542 state2 = IPA_PURE;
543 }
023a28e1 544 *state = MAX (*state, state2);
545 *looping = MAX (*looping, looping2);
546}
547
0a10fd82 548/* Recognize special cases of builtins that are by themselves not pure or const
7dd42908 549 but function using them is. */
550static bool
a53e7471 551special_builtin_state (enum pure_const_state_e *state, bool *looping,
7dd42908 552 tree callee)
553{
554 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
555 switch (DECL_FUNCTION_CODE (callee))
556 {
557 case BUILT_IN_RETURN:
558 case BUILT_IN_UNREACHABLE:
2b34677f 559 CASE_BUILT_IN_ALLOCA:
7dd42908 560 case BUILT_IN_STACK_SAVE:
561 case BUILT_IN_STACK_RESTORE:
562 case BUILT_IN_EH_POINTER:
563 case BUILT_IN_EH_FILTER:
564 case BUILT_IN_UNWIND_RESUME:
565 case BUILT_IN_CXA_END_CLEANUP:
566 case BUILT_IN_EH_COPY_VALUES:
567 case BUILT_IN_FRAME_ADDRESS:
568 case BUILT_IN_APPLY:
569 case BUILT_IN_APPLY_ARGS:
a940fdc7 570 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
571 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
7dd42908 572 *looping = false;
573 *state = IPA_CONST;
574 return true;
575 case BUILT_IN_PREFETCH:
576 *looping = true;
577 *state = IPA_CONST;
578 return true;
5213d6c9 579 default:
580 break;
7dd42908 581 }
582 return false;
583}
584
f7d118a9 585/* Check the parameters of a function call to CALL_EXPR to see if
586 there are any references in the parameters that are not allowed for
587 pure or const functions. Also check to see if this is either an
588 indirect call, a call outside the compilation unit, or has special
589 attributes that may also effect the purity. The CALL_EXPR node for
590 the entire call expression. */
591
592static void
1a91d914 593check_call (funct_state local, gcall *call, bool ipa)
f7d118a9 594{
75a70cf9 595 int flags = gimple_call_flags (call);
b5cebd44 596 tree callee_t = gimple_call_fndecl (call);
b5cebd44 597 bool possibly_throws = stmt_could_throw_p (call);
598 bool possibly_throws_externally = (possibly_throws
599 && stmt_can_throw_external (call));
f7d118a9 600
b5cebd44 601 if (possibly_throws)
602 {
603 unsigned int i;
604 for (i = 0; i < gimple_num_ops (call); i++)
605 if (gimple_op (call, i)
606 && tree_could_throw_p (gimple_op (call, i)))
607 {
cbeb677e 608 if (possibly_throws && cfun->can_throw_non_call_exceptions)
b5cebd44 609 {
610 if (dump_file)
611 fprintf (dump_file, " operand can throw; looping\n");
612 local->looping = true;
613 }
614 if (possibly_throws_externally)
615 {
616 if (dump_file)
617 fprintf (dump_file, " operand can throw externally\n");
618 local->can_throw = true;
619 }
620 }
621 }
48e1416a 622
f7d118a9 623 /* The const and pure flags are set by a variety of places in the
624 compiler (including here). If someone has already set the flags
625 for the callee, (such as for some of the builtins) we will use
48e1416a 626 them, otherwise we will compute our own information.
627
f7d118a9 628 Const and pure functions have less clobber effects than other
629 functions so we process these first. Otherwise if it is a call
630 outside the compilation unit or an indirect call we punt. This
631 leaves local calls which will be processed by following the call
48e1416a 632 graph. */
f7d118a9 633 if (callee_t)
634 {
7dd42908 635 enum pure_const_state_e call_state;
636 bool call_looping;
637
04c849b3 638 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
639 && !nonfreeing_call_p (call))
640 local->can_free = true;
641
a53e7471 642 if (special_builtin_state (&call_state, &call_looping, callee_t))
7dd42908 643 {
644 worse_state (&local->pure_const_state, &local->looping,
8b4ee73c 645 call_state, call_looping,
646 NULL, NULL);
7dd42908 647 return;
648 }
f7d118a9 649 /* When bad things happen to bad functions, they cannot be const
650 or pure. */
651 if (setjmp_call_p (callee_t))
9c2a0c05 652 {
b5cebd44 653 if (dump_file)
654 fprintf (dump_file, " setjmp is not const/pure\n");
655 local->looping = true;
9c2a0c05 656 local->pure_const_state = IPA_NEITHER;
9c2a0c05 657 }
f7d118a9 658
659 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
660 switch (DECL_FUNCTION_CODE (callee_t))
661 {
662 case BUILT_IN_LONGJMP:
663 case BUILT_IN_NONLOCAL_GOTO:
b5cebd44 664 if (dump_file)
665 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
f7d118a9 666 local->pure_const_state = IPA_NEITHER;
b5cebd44 667 local->looping = true;
f7d118a9 668 break;
669 default:
670 break;
671 }
672 }
04c849b3 673 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
674 local->can_free = true;
f7d118a9 675
b5cebd44 676 /* When not in IPA mode, we can still handle self recursion. */
e9de52cc 677 if (!ipa && callee_t
678 && recursive_call_p (current_function_decl, callee_t))
2c06958d 679 {
680 if (dump_file)
681 fprintf (dump_file, " Recursive call can loop.\n");
682 local->looping = true;
683 }
0a10fd82 684 /* Either callee is unknown or we are doing local analysis.
ef378c27 685 Look to see if there are any bits available for the callee (such as by
686 declaration or because it is builtin) and process solely on the basis of
621733d8 687 those bits. Handle internal calls always, those calls don't have
688 corresponding cgraph edges and thus aren't processed during
689 the propagation. */
690 else if (!ipa || gimple_call_internal_p (call))
f7d118a9 691 {
023a28e1 692 enum pure_const_state_e call_state;
693 bool call_looping;
cbeb677e 694 if (possibly_throws && cfun->can_throw_non_call_exceptions)
b5cebd44 695 {
696 if (dump_file)
697 fprintf (dump_file, " can throw; looping\n");
698 local->looping = true;
699 }
700 if (possibly_throws_externally)
701 {
702 if (dump_file)
703 {
e38def9c 704 fprintf (dump_file, " can throw externally to lp %i\n",
705 lookup_stmt_eh_lp (call));
b5cebd44 706 if (callee_t)
707 fprintf (dump_file, " callee:%s\n",
708 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
709 }
710 local->can_throw = true;
711 }
023a28e1 712 if (dump_file && (dump_flags & TDF_DETAILS))
713 fprintf (dump_file, " checking flags for call:");
714 state_from_flags (&call_state, &call_looping, flags,
715 ((flags & (ECF_NORETURN | ECF_NOTHROW))
716 == (ECF_NORETURN | ECF_NOTHROW))
717 || (!flag_exceptions && (flags & ECF_NORETURN)));
718 worse_state (&local->pure_const_state, &local->looping,
8b4ee73c 719 call_state, call_looping, NULL, NULL);
f7d118a9 720 }
b5cebd44 721 /* Direct functions calls are handled by IPA propagation. */
f7d118a9 722}
723
023a28e1 724/* Wrapper around check_decl for loads in local more. */
5ed0b345 725
726static bool
42acab1c 727check_load (gimple *, tree op, tree, void *data)
5ed0b345 728{
729 if (DECL_P (op))
023a28e1 730 check_decl ((funct_state)data, op, false, false);
5ed0b345 731 else
732 check_op ((funct_state)data, op, false);
733 return false;
734}
735
023a28e1 736/* Wrapper around check_decl for stores in local more. */
5ed0b345 737
738static bool
42acab1c 739check_store (gimple *, tree op, tree, void *data)
5ed0b345 740{
741 if (DECL_P (op))
023a28e1 742 check_decl ((funct_state)data, op, true, false);
743 else
744 check_op ((funct_state)data, op, true);
745 return false;
746}
747
748/* Wrapper around check_decl for loads in ipa mode. */
749
750static bool
42acab1c 751check_ipa_load (gimple *, tree op, tree, void *data)
023a28e1 752{
753 if (DECL_P (op))
754 check_decl ((funct_state)data, op, false, true);
755 else
756 check_op ((funct_state)data, op, false);
757 return false;
758}
759
760/* Wrapper around check_decl for stores in ipa mode. */
761
762static bool
42acab1c 763check_ipa_store (gimple *, tree op, tree, void *data)
023a28e1 764{
765 if (DECL_P (op))
766 check_decl ((funct_state)data, op, true, true);
5ed0b345 767 else
768 check_op ((funct_state)data, op, true);
769 return false;
770}
771
dd277d48 772/* Look into pointer pointed to by GSIP and figure out what interesting side
773 effects it has. */
b5cebd44 774static void
775check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
f7d118a9 776{
42acab1c 777 gimple *stmt = gsi_stmt (*gsip);
f7d118a9 778
9845d120 779 if (is_gimple_debug (stmt))
780 return;
781
73594827 782 /* Do consider clobber as side effects before IPA, so we rather inline
783 C++ destructors and keep clobber semantics than eliminate them.
784
785 TODO: We may get smarter during early optimizations on these and let
786 functions containing only clobbers to be optimized more. This is a common
787 case of C++ destructors. */
788
789 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
790 return;
791
b5cebd44 792 if (dump_file)
f7d118a9 793 {
b5cebd44 794 fprintf (dump_file, " scanning: ");
1ffa4346 795 print_gimple_stmt (dump_file, stmt, 0);
b5cebd44 796 }
dd277d48 797
541c6dbb 798 if (gimple_has_volatile_ops (stmt)
799 && !gimple_clobber_p (stmt))
4b68df2e 800 {
801 local->pure_const_state = IPA_NEITHER;
802 if (dump_file)
803 fprintf (dump_file, " Volatile stmt is not const/pure\n");
804 }
805
5ed0b345 806 /* Look for loads and stores. */
023a28e1 807 walk_stmt_load_store_ops (stmt, local,
808 ipa ? check_ipa_load : check_load,
809 ipa ? check_ipa_store : check_store);
b5cebd44 810
811 if (gimple_code (stmt) != GIMPLE_CALL
812 && stmt_could_throw_p (stmt))
813 {
cbeb677e 814 if (cfun->can_throw_non_call_exceptions)
b5cebd44 815 {
816 if (dump_file)
10f4615f 817 fprintf (dump_file, " can throw; looping\n");
b5cebd44 818 local->looping = true;
819 }
820 if (stmt_can_throw_external (stmt))
821 {
822 if (dump_file)
10f4615f 823 fprintf (dump_file, " can throw externally\n");
b5cebd44 824 local->can_throw = true;
825 }
10f4615f 826 else
827 if (dump_file)
828 fprintf (dump_file, " can throw\n");
75a70cf9 829 }
75a70cf9 830 switch (gimple_code (stmt))
831 {
b5cebd44 832 case GIMPLE_CALL:
1a91d914 833 check_call (local, as_a <gcall *> (stmt), ipa);
f7d118a9 834 break;
75a70cf9 835 case GIMPLE_LABEL:
1a91d914 836 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
f7d118a9 837 /* Target of long jump. */
9c2a0c05 838 {
b5cebd44 839 if (dump_file)
10f4615f 840 fprintf (dump_file, " nonlocal label is not const/pure\n");
9c2a0c05 841 local->pure_const_state = IPA_NEITHER;
9c2a0c05 842 }
f7d118a9 843 break;
75a70cf9 844 case GIMPLE_ASM:
1a91d914 845 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
b5cebd44 846 {
97cf41ec 847 if (dump_file)
10f4615f 848 fprintf (dump_file, " memory asm clobber is not const/pure\n");
97cf41ec 849 /* Abandon all hope, ye who enter here. */
850 local->pure_const_state = IPA_NEITHER;
04c849b3 851 local->can_free = true;
b5cebd44 852 }
1a91d914 853 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
b5cebd44 854 {
855 if (dump_file)
10f4615f 856 fprintf (dump_file, " volatile is not const/pure\n");
b5cebd44 857 /* Abandon all hope, ye who enter here. */
858 local->pure_const_state = IPA_NEITHER;
04c849b3 859 local->looping = true;
860 local->can_free = true;
b5cebd44 861 }
862 return;
f7d118a9 863 default:
864 break;
865 }
f7d118a9 866}
867
bd5ef087 868/* Check that RETVAL is used only in STMT and in comparisons against 0.
869 RETVAL is return value of the function and STMT is return stmt. */
870
871static bool
872check_retval_uses (tree retval, gimple *stmt)
873{
874 imm_use_iterator use_iter;
875 gimple *use_stmt;
876
877 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
878 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
879 {
880 tree op2 = gimple_cond_rhs (cond);
881 if (!integer_zerop (op2))
882 RETURN_FROM_IMM_USE_STMT (use_iter, false);
883 }
884 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
885 {
886 enum tree_code code = gimple_assign_rhs_code (ga);
887 if (TREE_CODE_CLASS (code) != tcc_comparison)
888 RETURN_FROM_IMM_USE_STMT (use_iter, false);
889 if (!integer_zerop (gimple_assign_rhs2 (ga)))
890 RETURN_FROM_IMM_USE_STMT (use_iter, false);
891 }
892 else if (is_gimple_debug (use_stmt))
893 ;
894 else if (use_stmt != stmt)
895 RETURN_FROM_IMM_USE_STMT (use_iter, false);
896
897 return true;
898}
899
900/* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
901 attribute. Currently this function does a very conservative analysis.
902 FUN is considered to be a candidate if
903 1) It returns a value of pointer type.
904 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
905 a phi, and element of phi is either NULL or
906 SSA_NAME_DEF_STMT(element) is function call.
907 3) The return-value has immediate uses only within comparisons (gcond or gassign)
908 and return_stmt (and likewise a phi arg has immediate use only within comparison
909 or the phi stmt). */
910
911static bool
912malloc_candidate_p (function *fun, bool ipa)
913{
914 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
915 edge e;
916 edge_iterator ei;
917 cgraph_node *node = cgraph_node::get_create (fun->decl);
918
919#define DUMP_AND_RETURN(reason) \
920{ \
921 if (dump_file && (dump_flags & TDF_DETAILS)) \
922 fprintf (dump_file, "%s", (reason)); \
923 return false; \
924}
925
926 if (EDGE_COUNT (exit_block->preds) == 0)
927 return false;
928
929 FOR_EACH_EDGE (e, ei, exit_block->preds)
930 {
931 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
932 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
933
934 if (!ret_stmt)
935 return false;
936
937 tree retval = gimple_return_retval (ret_stmt);
938 if (!retval)
939 DUMP_AND_RETURN("No return value.")
940
941 if (TREE_CODE (retval) != SSA_NAME
942 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
943 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
944
945 if (!check_retval_uses (retval, ret_stmt))
946 DUMP_AND_RETURN("Return value has uses outside return stmt"
947 " and comparisons against 0.")
948
949 gimple *def = SSA_NAME_DEF_STMT (retval);
950 if (gcall *call_stmt = dyn_cast<gcall *> (def))
951 {
952 tree callee_decl = gimple_call_fndecl (call_stmt);
953 if (!callee_decl)
954 return false;
955
956 if (!ipa && !DECL_IS_MALLOC (callee_decl))
957 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
958 " non-ipa mode.")
959
960 cgraph_edge *cs = node->get_edge (call_stmt);
961 if (cs)
962 {
963 ipa_call_summary *es = ipa_call_summaries->get (cs);
964 gcc_assert (es);
965 es->is_return_callee_uncaptured = true;
966 }
967 }
968
969 else if (gphi *phi = dyn_cast<gphi *> (def))
970 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
971 {
972 tree arg = gimple_phi_arg_def (phi, i);
973 if (TREE_CODE (arg) != SSA_NAME)
974 DUMP_AND_RETURN("phi arg is not SSA_NAME.")
975 if (!(arg == null_pointer_node || check_retval_uses (arg, phi)))
976 DUMP_AND_RETURN("phi arg has uses outside phi"
977 " and comparisons against 0.")
978
979 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
980 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
981 if (!call_stmt)
982 return false;
983 tree callee_decl = gimple_call_fndecl (call_stmt);
984 if (!callee_decl)
985 return false;
986 if (!ipa && !DECL_IS_MALLOC (callee_decl))
987 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
988 " non-ipa mode.")
989
990 cgraph_edge *cs = node->get_edge (call_stmt);
991 if (cs)
992 {
993 ipa_call_summary *es = ipa_call_summaries->get (cs);
994 gcc_assert (es);
995 es->is_return_callee_uncaptured = true;
996 }
997 }
998
999 else
1000 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
1001 }
1002
1003 if (dump_file && (dump_flags & TDF_DETAILS))
1004 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1005 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1006 return true;
1007
1008#undef DUMP_AND_RETURN
1009}
1010
cb886925 1011
f7d118a9 1012/* This is the main routine for finding the reference patterns for
1013 global variables within a function FN. */
1014
b5cebd44 1015static funct_state
1016analyze_function (struct cgraph_node *fn, bool ipa)
f7d118a9 1017{
02774f2d 1018 tree decl = fn->decl;
b5cebd44 1019 funct_state l;
1020 basic_block this_block;
f7d118a9 1021
b5cebd44 1022 l = XCNEW (struct funct_state_d);
1023 l->pure_const_state = IPA_CONST;
df9b545b 1024 l->state_previously_known = IPA_NEITHER;
1025 l->looping_previously_known = true;
b5cebd44 1026 l->looping = false;
1027 l->can_throw = false;
04c849b3 1028 l->can_free = false;
91bf9d9a 1029 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
02774f2d 1030 flags_from_decl_or_type (fn->decl),
415d1b9a 1031 fn->cannot_return_p ());
91bf9d9a 1032
02774f2d 1033 if (fn->thunk.thunk_p || fn->alias)
91bf9d9a 1034 {
1035 /* Thunk gets propagated through, so nothing interesting happens. */
1036 gcc_assert (ipa);
cfd85d03 1037 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
1038 l->pure_const_state = IPA_NEITHER;
91bf9d9a 1039 return l;
1040 }
f7d118a9 1041
1042 if (dump_file)
1043 {
48e1416a 1044 fprintf (dump_file, "\n\n local analysis of %s\n ",
f1c8b4d7 1045 fn->name ());
f7d118a9 1046 }
48e1416a 1047
b5cebd44 1048 push_cfun (DECL_STRUCT_FUNCTION (decl));
48e1416a 1049
fc00614f 1050 FOR_EACH_BB_FN (this_block, cfun)
f7d118a9 1051 {
b5cebd44 1052 gimple_stmt_iterator gsi;
1053 struct walk_stmt_info wi;
f7d118a9 1054
9af5ce0c 1055 memset (&wi, 0, sizeof (wi));
b5cebd44 1056 for (gsi = gsi_start_bb (this_block);
1057 !gsi_end_p (gsi);
1058 gsi_next (&gsi))
f7d118a9 1059 {
b5cebd44 1060 check_stmt (&gsi, l, ipa);
04c849b3 1061 if (l->pure_const_state == IPA_NEITHER
1062 && l->looping
1063 && l->can_throw
1064 && l->can_free)
b5cebd44 1065 goto end;
f7d118a9 1066 }
1067 }
dcccac3e 1068
1069end:
b5cebd44 1070 if (l->pure_const_state != IPA_NEITHER)
1071 {
1072 /* Const functions cannot have back edges (an
1073 indication of possible infinite loop side
1074 effect. */
1075 if (mark_dfs_back_edges ())
c9263b6a 1076 {
b1887855 1077 /* Preheaders are needed for SCEV to work.
0a10fd82 1078 Simple latches and recorded exits improve chances that loop will
8ff30f9a 1079 proved to be finite in testcases such as in loop-15.c
1080 and loop-24.c */
1081 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1082 | LOOPS_HAVE_SIMPLE_LATCHES
b1887855 1083 | LOOPS_HAVE_RECORDED_EXITS);
c9263b6a 1084 if (dump_file && (dump_flags & TDF_DETAILS))
1085 flow_loops_dump (dump_file, NULL, 0);
1086 if (mark_irreducible_loops ())
1087 {
1088 if (dump_file)
1089 fprintf (dump_file, " has irreducible loops\n");
1090 l->looping = true;
1091 }
48e1416a 1092 else
c9263b6a 1093 {
c9263b6a 1094 struct loop *loop;
1095 scev_initialize ();
f21d4d00 1096 FOR_EACH_LOOP (loop, 0)
c9263b6a 1097 if (!finite_loop_p (loop))
1098 {
1099 if (dump_file)
8ff30f9a 1100 fprintf (dump_file, " can not prove finiteness of "
1101 "loop %i\n", loop->num);
c9263b6a 1102 l->looping =true;
f21d4d00 1103 break;
c9263b6a 1104 }
1105 scev_finalize ();
1106 }
1107 loop_optimizer_finalize ();
1108 }
b5cebd44 1109 }
1110
023a28e1 1111 if (dump_file && (dump_flags & TDF_DETAILS))
1112 fprintf (dump_file, " checking previously known:");
023a28e1 1113
1114 better_state (&l->pure_const_state, &l->looping,
1115 l->state_previously_known,
1116 l->looping_previously_known);
b5cebd44 1117 if (TREE_NOTHROW (decl))
1118 l->can_throw = false;
1119
bd5ef087 1120 l->malloc_state = STATE_MALLOC_BOTTOM;
1121 if (DECL_IS_MALLOC (decl))
1122 l->malloc_state = STATE_MALLOC;
1123 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1124 l->malloc_state = STATE_MALLOC_TOP;
1125 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1126 l->malloc_state = STATE_MALLOC;
1127
b5cebd44 1128 pop_cfun ();
dcccac3e 1129 if (dump_file)
1130 {
b5cebd44 1131 if (l->looping)
1132 fprintf (dump_file, "Function is locally looping.\n");
1133 if (l->can_throw)
1134 fprintf (dump_file, "Function is locally throwing.\n");
1135 if (l->pure_const_state == IPA_CONST)
1136 fprintf (dump_file, "Function is locally const.\n");
1137 if (l->pure_const_state == IPA_PURE)
1138 fprintf (dump_file, "Function is locally pure.\n");
04c849b3 1139 if (l->can_free)
1140 fprintf (dump_file, "Function can locally free.\n");
bd5ef087 1141 if (l->malloc_state == STATE_MALLOC)
1142 fprintf (dump_file, "Function is locally malloc.\n");
dcccac3e 1143 }
b5cebd44 1144 return l;
f7d118a9 1145}
1146
50828ed8 1147/* Called when new function is inserted to callgraph late. */
1148static void
1149add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1150{
50828ed8 1151 /* There are some shared nodes, in particular the initializers on
1152 static declarations. We do not need to scan them more than once
1153 since all we would be interested in are the addressof
1154 operations. */
8b4ee73c 1155 if (opt_for_fn (node->decl, flag_ipa_pure_const))
2c06958d 1156 set_function_state (node, analyze_function (node, true));
50828ed8 1157}
1158
86844d6c 1159/* Called when new clone is inserted to callgraph late. */
1160
1161static void
1162duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
1163 void *data ATTRIBUTE_UNUSED)
1164{
1ee39ad1 1165 if (has_function_state (src))
86844d6c 1166 {
1167 funct_state l = XNEW (struct funct_state_d);
1ee39ad1 1168 gcc_assert (!has_function_state (dst));
86844d6c 1169 memcpy (l, get_function_state (src), sizeof (*l));
1170 set_function_state (dst, l);
1171 }
1172}
1173
1174/* Called when new clone is inserted to callgraph late. */
1175
1176static void
1177remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1178{
1ee39ad1 1179 if (has_function_state (node))
af91a851 1180 set_function_state (node, NULL);
86844d6c 1181}
1182
f7d118a9 1183\f
415309e2 1184void
1185pass_ipa_pure_const::
7bfefa9d 1186register_hooks (void)
f7d118a9 1187{
7bfefa9d 1188 if (init_p)
1189 return;
1190
1191 init_p = true;
f7d118a9 1192
86844d6c 1193 node_removal_hook_holder =
35ee1c66 1194 symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
86844d6c 1195 node_duplication_hook_holder =
35ee1c66 1196 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
50828ed8 1197 function_insertion_hook_holder =
35ee1c66 1198 symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
7bfefa9d 1199}
1200
1201
1202/* Analyze each function in the cgraph to see if it is locally PURE or
1203 CONST. */
1204
48e1416a 1205static void
80880eb6 1206pure_const_generate_summary (void)
7bfefa9d 1207{
1208 struct cgraph_node *node;
1209
415309e2 1210 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1211 pass->register_hooks ();
7bfefa9d 1212
48e1416a 1213 /* Process all of the functions.
f7d118a9 1214
415d1b9a 1215 We process AVAIL_INTERPOSABLE functions. We can not use the results
ef378c27 1216 by default, but the info can be used at LTO with -fwhole-program or
0a10fd82 1217 when function got cloned and the clone is AVAILABLE. */
ef378c27 1218
7c455d87 1219 FOR_EACH_DEFINED_FUNCTION (node)
8b4ee73c 1220 if (opt_for_fn (node->decl, flag_ipa_pure_const))
b5cebd44 1221 set_function_state (node, analyze_function (node, true));
cb886925 1222}
1223
7bfefa9d 1224
1225/* Serialize the ipa info for lto. */
1226
1227static void
eab36a5a 1228pure_const_write_summary (void)
7bfefa9d 1229{
1230 struct cgraph_node *node;
1231 struct lto_simple_output_block *ob
1232 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1233 unsigned int count = 0;
eab36a5a 1234 lto_symtab_encoder_iterator lsei;
1235 lto_symtab_encoder_t encoder;
7bfefa9d 1236
eab36a5a 1237 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1238
1239 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1240 lsei_next_function_in_partition (&lsei))
7bfefa9d 1241 {
eab36a5a 1242 node = lsei_cgraph_node (lsei);
02774f2d 1243 if (node->definition && has_function_state (node))
7bfefa9d 1244 count++;
1245 }
48e1416a 1246
7f385784 1247 streamer_write_uhwi_stream (ob->main_stream, count);
48e1416a 1248
7bfefa9d 1249 /* Process all of the functions. */
eab36a5a 1250 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1251 lsei_next_function_in_partition (&lsei))
7bfefa9d 1252 {
eab36a5a 1253 node = lsei_cgraph_node (lsei);
02774f2d 1254 if (node->definition && has_function_state (node))
7bfefa9d 1255 {
30baba90 1256 struct bitpack_d bp;
7bfefa9d 1257 funct_state fs;
1258 int node_ref;
70225339 1259 lto_symtab_encoder_t encoder;
48e1416a 1260
7bfefa9d 1261 fs = get_function_state (node);
1262
70225339 1263 encoder = ob->decl_state->symtab_node_encoder;
02774f2d 1264 node_ref = lto_symtab_encoder_encode (encoder, node);
7f385784 1265 streamer_write_uhwi_stream (ob->main_stream, node_ref);
48e1416a 1266
7bfefa9d 1267 /* Note that flags will need to be read in the opposite
1268 order as we are pushing the bitflags into FLAGS. */
30baba90 1269 bp = bitpack_create (ob->main_stream);
1270 bp_pack_value (&bp, fs->pure_const_state, 2);
1271 bp_pack_value (&bp, fs->state_previously_known, 2);
1272 bp_pack_value (&bp, fs->looping_previously_known, 1);
1273 bp_pack_value (&bp, fs->looping, 1);
1274 bp_pack_value (&bp, fs->can_throw, 1);
04c849b3 1275 bp_pack_value (&bp, fs->can_free, 1);
bd5ef087 1276 bp_pack_value (&bp, fs->malloc_state, 2);
7f385784 1277 streamer_write_bitpack (&bp);
7bfefa9d 1278 }
1279 }
1280
1281 lto_destroy_simple_output_block (ob);
1282}
1283
1284
1285/* Deserialize the ipa info for lto. */
1286
48e1416a 1287static void
7bfefa9d 1288pure_const_read_summary (void)
1289{
1290 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1291 struct lto_file_decl_data *file_data;
1292 unsigned int j = 0;
1293
415309e2 1294 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1295 pass->register_hooks ();
1296
7bfefa9d 1297 while ((file_data = file_data_vec[j++]))
1298 {
1299 const char *data;
1300 size_t len;
1301 struct lto_input_block *ib
48e1416a 1302 = lto_create_simple_input_block (file_data,
1303 LTO_section_ipa_pure_const,
7bfefa9d 1304 &data, &len);
1305 if (ib)
1306 {
1307 unsigned int i;
7f385784 1308 unsigned int count = streamer_read_uhwi (ib);
7bfefa9d 1309
1310 for (i = 0; i < count; i++)
1311 {
1312 unsigned int index;
1313 struct cgraph_node *node;
30baba90 1314 struct bitpack_d bp;
7bfefa9d 1315 funct_state fs;
70225339 1316 lto_symtab_encoder_t encoder;
7bfefa9d 1317
1318 fs = XCNEW (struct funct_state_d);
7f385784 1319 index = streamer_read_uhwi (ib);
70225339 1320 encoder = file_data->symtab_node_encoder;
415d1b9a 1321 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1322 index));
7bfefa9d 1323 set_function_state (node, fs);
1324
1325 /* Note that the flags must be read in the opposite
1326 order in which they were written (the bitflags were
1327 pushed into FLAGS). */
7f385784 1328 bp = streamer_read_bitpack (ib);
7bfefa9d 1329 fs->pure_const_state
30baba90 1330 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
7bfefa9d 1331 fs->state_previously_known
30baba90 1332 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1333 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1334 fs->looping = bp_unpack_value (&bp, 1);
1335 fs->can_throw = bp_unpack_value (&bp, 1);
04c849b3 1336 fs->can_free = bp_unpack_value (&bp, 1);
bd5ef087 1337 fs->malloc_state
1338 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1339
fc94a528 1340 if (dump_file)
1341 {
02774f2d 1342 int flags = flags_from_decl_or_type (node->decl);
0e388735 1343 fprintf (dump_file, "Read info for %s ", node->dump_name ());
fc94a528 1344 if (flags & ECF_CONST)
1345 fprintf (dump_file, " const");
1346 if (flags & ECF_PURE)
1347 fprintf (dump_file, " pure");
1348 if (flags & ECF_NOTHROW)
1349 fprintf (dump_file, " nothrow");
1350 fprintf (dump_file, "\n pure const state: %s\n",
1351 pure_const_names[fs->pure_const_state]);
1352 fprintf (dump_file, " previously known state: %s\n",
8b4ee73c 1353 pure_const_names[fs->state_previously_known]);
fc94a528 1354 if (fs->looping)
1355 fprintf (dump_file," function is locally looping\n");
1356 if (fs->looping_previously_known)
1357 fprintf (dump_file," function is previously known looping\n");
1358 if (fs->can_throw)
1359 fprintf (dump_file," function is locally throwing\n");
04c849b3 1360 if (fs->can_free)
1361 fprintf (dump_file," function can locally free\n");
bd5ef087 1362 fprintf (dump_file, "\n malloc state: %s\n",
1363 malloc_state_names[fs->malloc_state]);
fc94a528 1364 }
7bfefa9d 1365 }
1366
48e1416a 1367 lto_destroy_simple_input_block (file_data,
1368 LTO_section_ipa_pure_const,
7bfefa9d 1369 ib, data, len);
1370 }
1371 }
1372}
1373
355c1f40 1374/* We only propagate across edges that can throw externally and their callee
1375 is not interposable. */
7bfefa9d 1376
17b28e52 1377static bool
355c1f40 1378ignore_edge_for_nothrow (struct cgraph_edge *e)
17b28e52 1379{
355c1f40 1380 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1381 return true;
1382
1383 enum availability avail;
8b4ee73c 1384 cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
1385 e->caller);
ace7bf06 1386 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
1387 return true;
1388 return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1389 && !e->callee->binds_to_current_def_p (e->caller);
17b28e52 1390}
1391
b2c2e188 1392/* Return true if NODE is self recursive function.
e9de52cc 1393 Indirectly recursive functions appears as non-trivial strongly
1394 connected components, so we need to care about self recursion
1395 only. */
a1e88032 1396
1397static bool
1398self_recursive_p (struct cgraph_node *node)
1399{
1400 struct cgraph_edge *e;
1401 for (e = node->callees; e; e = e->next_callee)
415d1b9a 1402 if (e->callee->function_symbol () == node)
a1e88032 1403 return true;
1404 return false;
1405}
1406
366970c6 1407/* Return true if N is cdtor that is not const or pure. In this case we may
1408 need to remove unreachable function if it is marked const/pure. */
1409
1410static bool
1411cdtor_p (cgraph_node *n, void *)
1412{
1413 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
98b8f2a5 1414 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1415 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
366970c6 1416 return false;
1417}
1418
d2c6c2b4 1419/* We only propagate across edges with non-interposable callee. */
1420
1421static bool
1422ignore_edge_for_pure_const (struct cgraph_edge *e)
1423{
1424 enum availability avail;
8b4ee73c 1425 e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
d2c6c2b4 1426 return (avail <= AVAIL_INTERPOSABLE);
1427}
1428
1429
c0240443 1430/* Produce transitive closure over the callgraph and compute pure/const
1431 attributes. */
023a28e1 1432
366970c6 1433static bool
c0240443 1434propagate_pure_const (void)
cb886925 1435{
1436 struct cgraph_node *node;
1437 struct cgraph_node *w;
1438 struct cgraph_node **order =
35ee1c66 1439 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
cb886925 1440 int order_pos;
1441 int i;
1442 struct ipa_dfs_info * w_info;
366970c6 1443 bool remove_p = false;
aaa36a78 1444 bool has_cdtor;
cb886925 1445
d2c6c2b4 1446 order_pos = ipa_reduced_postorder (order, true, false,
1447 ignore_edge_for_pure_const);
f7d118a9 1448 if (dump_file)
1449 {
415d1b9a 1450 cgraph_node::dump_cgraph (dump_file);
48669653 1451 ipa_print_order (dump_file, "reduced", order, order_pos);
f7d118a9 1452 }
1453
9d75589a 1454 /* Propagate the local information through the call graph to produce
f7d118a9 1455 the global information. All the nodes within a cycle will have
1456 the same info so we collapse cycles first. Then we can do the
1457 propagation in one pass from the leaves to the roots. */
1458 for (i = 0; i < order_pos; i++ )
1459 {
1460 enum pure_const_state_e pure_const_state = IPA_CONST;
9c2a0c05 1461 bool looping = false;
54157bf1 1462 int count = 0;
f7d118a9 1463 node = order[i];
1464
02774f2d 1465 if (node->alias)
8c1fce46 1466 continue;
1467
fc94a528 1468 if (dump_file && (dump_flags & TDF_DETAILS))
1469 fprintf (dump_file, "Starting cycle\n");
1470
f7d118a9 1471 /* Find the worst state for any node in the cycle. */
1472 w = node;
023a28e1 1473 while (w && pure_const_state != IPA_NEITHER)
f7d118a9 1474 {
b5cebd44 1475 struct cgraph_edge *e;
023a28e1 1476 struct cgraph_edge *ie;
1477 int i;
51ce5652 1478 struct ipa_ref *ref = NULL;
023a28e1 1479
f7d118a9 1480 funct_state w_l = get_function_state (w);
fc94a528 1481 if (dump_file && (dump_flags & TDF_DETAILS))
0e388735 1482 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1483 w->dump_name (),
fc94a528 1484 pure_const_names[w_l->pure_const_state],
1485 w_l->looping);
f7d118a9 1486
8b4ee73c 1487 /* First merge in function body properties.
1488 We are safe to pass NULL as FROM and TO because we will take care
1489 of possible interposition when walking callees. */
023a28e1 1490 worse_state (&pure_const_state, &looping,
8b4ee73c 1491 w_l->pure_const_state, w_l->looping,
1492 NULL, NULL);
023a28e1 1493 if (pure_const_state == IPA_NEITHER)
1494 break;
1495
b5cebd44 1496 count++;
1497
023a28e1 1498 /* We consider recursive cycles as possibly infinite.
1499 This might be relaxed since infinite recursion leads to stack
1500 overflow. */
b5cebd44 1501 if (count > 1)
1502 looping = true;
48e1416a 1503
023a28e1 1504 /* Now walk the edges and merge in callee properties. */
d2c6c2b4 1505 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1506 e = e->next_callee)
f7d118a9 1507 {
b2c2e188 1508 enum availability avail;
cfd85d03 1509 struct cgraph_node *y = e->callee->
8b4ee73c 1510 function_or_virtual_thunk_symbol (&avail,
1511 e->caller);
fc94a528 1512 enum pure_const_state_e edge_state = IPA_CONST;
1513 bool edge_looping = false;
54157bf1 1514
fc94a528 1515 if (dump_file && (dump_flags & TDF_DETAILS))
1516 {
0e388735 1517 fprintf (dump_file, " Call to %s",
1518 e->callee->dump_name ());
fc94a528 1519 }
415d1b9a 1520 if (avail > AVAIL_INTERPOSABLE)
f7d118a9 1521 {
b5cebd44 1522 funct_state y_l = get_function_state (y);
fc94a528 1523 if (dump_file && (dump_flags & TDF_DETAILS))
1524 {
1525 fprintf (dump_file,
1526 " state:%s looping:%i\n",
1527 pure_const_names[y_l->pure_const_state],
1528 y_l->looping);
1529 }
4c170941 1530 if (y_l->pure_const_state > IPA_PURE
35ee1c66 1531 && e->cannot_lead_to_return_p ())
fc94a528 1532 {
1533 if (dump_file && (dump_flags & TDF_DETAILS))
023a28e1 1534 fprintf (dump_file,
1535 " Ignoring side effects"
1536 " -> pure, looping\n");
fc94a528 1537 edge_state = IPA_PURE;
1538 edge_looping = true;
1539 }
1540 else
1541 {
1542 edge_state = y_l->pure_const_state;
1543 edge_looping = y_l->looping;
1544 }
f7d118a9 1545 }
a53e7471 1546 else if (special_builtin_state (&edge_state, &edge_looping,
02774f2d 1547 y->decl))
7dd42908 1548 ;
ef378c27 1549 else
023a28e1 1550 state_from_flags (&edge_state, &edge_looping,
02774f2d 1551 flags_from_decl_or_type (y->decl),
35ee1c66 1552 e->cannot_lead_to_return_p ());
023a28e1 1553
1554 /* Merge the results with what we already know. */
1555 better_state (&edge_state, &edge_looping,
1556 w_l->state_previously_known,
1557 w_l->looping_previously_known);
1558 worse_state (&pure_const_state, &looping,
8b4ee73c 1559 edge_state, edge_looping, e->caller, e->callee);
023a28e1 1560 if (pure_const_state == IPA_NEITHER)
1561 break;
1562 }
ef378c27 1563
023a28e1 1564 /* Now process the indirect call. */
d2c6c2b4 1565 for (ie = w->indirect_calls;
1566 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
023a28e1 1567 {
1568 enum pure_const_state_e edge_state = IPA_CONST;
1569 bool edge_looping = false;
4c170941 1570
023a28e1 1571 if (dump_file && (dump_flags & TDF_DETAILS))
1572 fprintf (dump_file, " Indirect call");
1573 state_from_flags (&edge_state, &edge_looping,
1574 ie->indirect_info->ecf_flags,
35ee1c66 1575 ie->cannot_lead_to_return_p ());
023a28e1 1576 /* Merge the results with what we already know. */
1577 better_state (&edge_state, &edge_looping,
1578 w_l->state_previously_known,
1579 w_l->looping_previously_known);
1580 worse_state (&pure_const_state, &looping,
8b4ee73c 1581 edge_state, edge_looping, NULL, NULL);
fc94a528 1582 if (pure_const_state == IPA_NEITHER)
1583 break;
f7d118a9 1584 }
023a28e1 1585
1586 /* And finally all loads and stores. */
d2c6c2b4 1587 for (i = 0; w->iterate_reference (i, ref)
1588 && pure_const_state != IPA_NEITHER; i++)
023a28e1 1589 {
1590 enum pure_const_state_e ref_state = IPA_CONST;
1591 bool ref_looping = false;
1592 switch (ref->use)
1593 {
1594 case IPA_REF_LOAD:
1595 /* readonly reads are safe. */
51ce5652 1596 if (TREE_READONLY (ref->referred->decl))
023a28e1 1597 break;
1598 if (dump_file && (dump_flags & TDF_DETAILS))
1599 fprintf (dump_file, " nonreadonly global var read\n");
1600 ref_state = IPA_PURE;
1601 break;
1602 case IPA_REF_STORE:
51ce5652 1603 if (ref->cannot_lead_to_return ())
023a28e1 1604 break;
1605 ref_state = IPA_NEITHER;
1606 if (dump_file && (dump_flags & TDF_DETAILS))
1607 fprintf (dump_file, " global var write\n");
1608 break;
1609 case IPA_REF_ADDR:
058a1b7a 1610 case IPA_REF_CHKP:
023a28e1 1611 break;
5cb7c8cf 1612 default:
1613 gcc_unreachable ();
023a28e1 1614 }
1615 better_state (&ref_state, &ref_looping,
1616 w_l->state_previously_known,
1617 w_l->looping_previously_known);
1618 worse_state (&pure_const_state, &looping,
8b4ee73c 1619 ref_state, ref_looping, NULL, NULL);
023a28e1 1620 if (pure_const_state == IPA_NEITHER)
1621 break;
1622 }
02774f2d 1623 w_info = (struct ipa_dfs_info *) w->aux;
f7d118a9 1624 w = w_info->next_cycle;
1625 }
fc94a528 1626 if (dump_file && (dump_flags & TDF_DETAILS))
1627 fprintf (dump_file, "Result %s looping %i\n",
1628 pure_const_names [pure_const_state],
1629 looping);
f7d118a9 1630
04c849b3 1631 /* Find the worst state of can_free for any node in the cycle. */
1632 bool can_free = false;
1633 w = node;
1634 while (w && !can_free)
1635 {
1636 struct cgraph_edge *e;
1637 funct_state w_l = get_function_state (w);
1638
1639 if (w_l->can_free
1640 || w->get_availability () == AVAIL_INTERPOSABLE
1641 || w->indirect_calls)
1642 can_free = true;
1643
1644 for (e = w->callees; e && !can_free; e = e->next_callee)
1645 {
1646 enum availability avail;
cfd85d03 1647 struct cgraph_node *y = e->callee->
8b4ee73c 1648 function_or_virtual_thunk_symbol (&avail,
1649 e->caller);
04c849b3 1650
1651 if (avail > AVAIL_INTERPOSABLE)
1652 can_free = get_function_state (y)->can_free;
1653 else
1654 can_free = true;
1655 }
1656 w_info = (struct ipa_dfs_info *) w->aux;
1657 w = w_info->next_cycle;
1658 }
1659
f7d118a9 1660 /* Copy back the region's pure_const_state which is shared by
1661 all nodes in the region. */
1662 w = node;
1663 while (w)
1664 {
1665 funct_state w_l = get_function_state (w);
b5cebd44 1666 enum pure_const_state_e this_state = pure_const_state;
1667 bool this_looping = looping;
f7d118a9 1668
04c849b3 1669 w_l->can_free = can_free;
1670 w->nonfreeing_fn = !can_free;
1671 if (!can_free && dump_file)
1672 fprintf (dump_file, "Function found not to call free: %s\n",
1673 w->name ());
1674
df9b545b 1675 if (w_l->state_previously_known != IPA_NEITHER
1676 && this_state > w_l->state_previously_known)
4c170941 1677 {
1678 this_state = w_l->state_previously_known;
d2c6c2b4 1679 if (this_state == IPA_NEITHER)
1680 this_looping = w_l->looping_previously_known;
4c170941 1681 }
a1e88032 1682 if (!this_looping && self_recursive_p (w))
1683 this_looping = true;
df9b545b 1684 if (!w_l->looping_previously_known)
1685 this_looping = false;
cb886925 1686
b5cebd44 1687 /* All nodes within a cycle share the same info. */
1688 w_l->pure_const_state = this_state;
1689 w_l->looping = this_looping;
1690
ce2d198d 1691 /* Inline clones share declaration with their offline copies;
1692 do not modify their declarations since the offline copy may
1693 be different. */
1694 if (!w->global.inlined_to)
1695 switch (this_state)
1696 {
1697 case IPA_CONST:
1698 if (!TREE_READONLY (w->decl))
1699 {
1700 warn_function_const (w->decl, !this_looping);
1701 if (dump_file)
1702 fprintf (dump_file, "Function found to be %sconst: %s\n",
1703 this_looping ? "looping " : "",
1704 w->name ());
1705 }
aaa36a78 1706 /* Turning constructor or destructor to non-looping const/pure
1707 enables us to possibly remove the function completely. */
1708 if (this_looping)
1709 has_cdtor = false;
1710 else
1711 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1712 NULL, true);
1713 if (w->set_const_flag (true, this_looping))
1714 {
1715 if (dump_file)
1716 fprintf (dump_file,
1717 "Declaration updated to be %sconst: %s\n",
1718 this_looping ? "looping " : "",
1719 w->name ());
1720 remove_p |= has_cdtor;
1721 }
ce2d198d 1722 break;
48e1416a 1723
ce2d198d 1724 case IPA_PURE:
1725 if (!DECL_PURE_P (w->decl))
1726 {
1727 warn_function_pure (w->decl, !this_looping);
1728 if (dump_file)
1729 fprintf (dump_file, "Function found to be %spure: %s\n",
1730 this_looping ? "looping " : "",
1731 w->name ());
1732 }
aaa36a78 1733 if (this_looping)
1734 has_cdtor = false;
1735 else
1736 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1737 NULL, true);
1738 if (w->set_pure_flag (true, this_looping))
1739 {
1740 if (dump_file)
1741 fprintf (dump_file,
1742 "Declaration updated to be %spure: %s\n",
1743 this_looping ? "looping " : "",
1744 w->name ());
1745 remove_p |= has_cdtor;
1746 }
ce2d198d 1747 break;
48e1416a 1748
ce2d198d 1749 default:
1750 break;
1751 }
02774f2d 1752 w_info = (struct ipa_dfs_info *) w->aux;
17b28e52 1753 w = w_info->next_cycle;
1754 }
1755 }
1756
7771d558 1757 ipa_free_postorder_info ();
c0240443 1758 free (order);
366970c6 1759 return remove_p;
c0240443 1760}
1761
1762/* Produce transitive closure over the callgraph and compute nothrow
1763 attributes. */
1764
1765static void
1766propagate_nothrow (void)
1767{
1768 struct cgraph_node *node;
1769 struct cgraph_node *w;
1770 struct cgraph_node **order =
35ee1c66 1771 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
c0240443 1772 int order_pos;
1773 int i;
1774 struct ipa_dfs_info * w_info;
1775
355c1f40 1776 order_pos = ipa_reduced_postorder (order, true, false,
1777 ignore_edge_for_nothrow);
17b28e52 1778 if (dump_file)
1779 {
415d1b9a 1780 cgraph_node::dump_cgraph (dump_file);
7771d558 1781 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
17b28e52 1782 }
c0240443 1783
9d75589a 1784 /* Propagate the local information through the call graph to produce
17b28e52 1785 the global information. All the nodes within a cycle will have
1786 the same info so we collapse cycles first. Then we can do the
1787 propagation in one pass from the leaves to the roots. */
1788 for (i = 0; i < order_pos; i++ )
1789 {
1790 bool can_throw = false;
1791 node = order[i];
1792
02774f2d 1793 if (node->alias)
8c1fce46 1794 continue;
1795
17b28e52 1796 /* Find the worst state for any node in the cycle. */
1797 w = node;
b2c90c54 1798 while (w && !can_throw)
17b28e52 1799 {
023a28e1 1800 struct cgraph_edge *e, *ie;
17b28e52 1801
355c1f40 1802 if (!TREE_NOTHROW (w->decl))
17b28e52 1803 {
355c1f40 1804 funct_state w_l = get_function_state (w);
17b28e52 1805
355c1f40 1806 if (w_l->can_throw
1807 || w->get_availability () == AVAIL_INTERPOSABLE)
1808 can_throw = true;
1809
1810 for (e = w->callees; e && !can_throw; e = e->next_callee)
17b28e52 1811 {
355c1f40 1812 enum availability avail;
1813
1814 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1815 continue;
1816
1817 struct cgraph_node *y = e->callee->
ace7bf06 1818 function_or_virtual_thunk_symbol (&avail,
1819 e->caller);
17b28e52 1820
355c1f40 1821 /* We can use info about the callee only if we know it can
ace7bf06 1822 not be interposed.
1823 When callee is compiled with non-call exceptions we also
1824 must check that the declaration is bound to current
1825 body as other semantically equivalent body may still
1826 throw. */
355c1f40 1827 if (avail <= AVAIL_INTERPOSABLE
1828 || (!TREE_NOTHROW (y->decl)
ace7bf06 1829 && (get_function_state (y)->can_throw
1830 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1831 && !e->callee->binds_to_current_def_p (w)))))
17b28e52 1832 can_throw = true;
1833 }
355c1f40 1834 for (ie = w->indirect_calls; ie && !can_throw;
1835 ie = ie->next_callee)
1836 if (ie->can_throw_external
1837 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1838 can_throw = true;
17b28e52 1839 }
02774f2d 1840 w_info = (struct ipa_dfs_info *) w->aux;
17b28e52 1841 w = w_info->next_cycle;
1842 }
1843
1844 /* Copy back the region's pure_const_state which is shared by
1845 all nodes in the region. */
1846 w = node;
1847 while (w)
1848 {
ecfab407 1849 funct_state w_l = get_function_state (w);
02774f2d 1850 if (!can_throw && !TREE_NOTHROW (w->decl))
b5cebd44 1851 {
ce2d198d 1852 /* Inline clones share declaration with their offline copies;
1853 do not modify their declarations since the offline copy may
1854 be different. */
1855 if (!w->global.inlined_to)
1856 {
415d1b9a 1857 w->set_nothrow_flag (true);
ce2d198d 1858 if (dump_file)
1859 fprintf (dump_file, "Function found to be nothrow: %s\n",
1860 w->name ());
1861 }
f7d118a9 1862 }
02774f2d 1863 else if (can_throw && !TREE_NOTHROW (w->decl))
ecfab407 1864 w_l->can_throw = true;
02774f2d 1865 w_info = (struct ipa_dfs_info *) w->aux;
f7d118a9 1866 w = w_info->next_cycle;
1867 }
1868 }
1869
7771d558 1870 ipa_free_postorder_info ();
f7d118a9 1871 free (order);
c0240443 1872}
1873
bd5ef087 1874/* Debugging function to dump state of malloc lattice. */
1875
1876DEBUG_FUNCTION
1877static void
1878dump_malloc_lattice (FILE *dump_file, const char *s)
1879{
1880 if (!dump_file)
1881 return;
1882
1883 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1884 cgraph_node *node;
1885 FOR_EACH_FUNCTION (node)
1886 {
1887 funct_state fs = get_function_state (node);
1888 malloc_state_e state = fs->malloc_state;
1889 fprintf (dump_file, "%s: %s\n", node->name (), malloc_state_names[state]);
1890 }
1891}
1892
1893/* Propagate malloc attribute across the callgraph. */
1894
1895static void
1896propagate_malloc (void)
1897{
1898 cgraph_node *node;
1899 FOR_EACH_FUNCTION (node)
1900 {
1901 if (DECL_IS_MALLOC (node->decl))
1902 if (!has_function_state (node))
1903 {
1904 funct_state l = XCNEW (struct funct_state_d);
1905 *l = varying_state;
1906 l->malloc_state = STATE_MALLOC;
1907 set_function_state (node, l);
1908 }
1909 }
1910
1911 dump_malloc_lattice (dump_file, "Initial");
1912 struct cgraph_node **order
1913 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1914 int order_pos = ipa_reverse_postorder (order);
1915 bool changed = true;
1916
1917 while (changed)
1918 {
1919 changed = false;
1920 /* Walk in postorder. */
1921 for (int i = order_pos - 1; i >= 0; --i)
1922 {
1923 cgraph_node *node = order[i];
1924 if (node->alias
1925 || !node->definition
1926 || !has_function_state (node))
1927 continue;
1928
1929 funct_state l = get_function_state (node);
1930
1931 /* FIXME: add support for indirect-calls. */
1932 if (node->indirect_calls)
1933 {
1934 l->malloc_state = STATE_MALLOC_BOTTOM;
1935 continue;
1936 }
1937
1938 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1939 {
1940 l->malloc_state = STATE_MALLOC_BOTTOM;
1941 continue;
1942 }
1943
1944 if (l->malloc_state == STATE_MALLOC_BOTTOM)
1945 continue;
1946
1947 vec<cgraph_node *> callees = vNULL;
1948 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1949 {
1950 ipa_call_summary *es = ipa_call_summaries->get (cs);
1951 if (es && es->is_return_callee_uncaptured)
1952 callees.safe_push (cs->callee);
1953 }
1954
1955 malloc_state_e new_state = l->malloc_state;
1956 for (unsigned j = 0; j < callees.length (); j++)
1957 {
1958 cgraph_node *callee = callees[j];
1959 if (!has_function_state (callee))
1960 {
1961 new_state = STATE_MALLOC_BOTTOM;
1962 break;
1963 }
1964 malloc_state_e callee_state = get_function_state (callee)->malloc_state;
1965 if (new_state < callee_state)
1966 new_state = callee_state;
1967 }
1968 if (new_state != l->malloc_state)
1969 {
1970 changed = true;
1971 l->malloc_state = new_state;
1972 }
1973 }
1974 }
1975
1976 FOR_EACH_DEFINED_FUNCTION (node)
1977 if (has_function_state (node))
1978 {
1979 funct_state l = get_function_state (node);
1980 if (!node->alias
1981 && l->malloc_state == STATE_MALLOC
1982 && !node->global.inlined_to)
1983 {
1984 if (dump_file && (dump_flags & TDF_DETAILS))
1985 fprintf (dump_file, "Function %s found to be malloc\n",
1986 node->name ());
1987
1988 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1989 node->set_malloc_flag (true);
1990 if (!malloc_decl_p && warn_suggest_attribute_malloc)
1991 warn_function_malloc (node->decl);
1992 }
1993 }
1994
1995 dump_malloc_lattice (dump_file, "after propagation");
1996 ipa_free_postorder_info ();
1997 free (order);
1998}
c0240443 1999
2000/* Produce the global information by preforming a transitive closure
2001 on the local information that was produced by generate_summary. */
2002
415309e2 2003unsigned int
2004pass_ipa_pure_const::
2005execute (function *)
c0240443 2006{
2007 struct cgraph_node *node;
366970c6 2008 bool remove_p;
c0240443 2009
35ee1c66 2010 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
2011 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
2012 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
c0240443 2013
2014 /* Nothrow makes more function to not lead to return and improve
2015 later analysis. */
2016 propagate_nothrow ();
bd5ef087 2017 propagate_malloc ();
366970c6 2018 remove_p = propagate_pure_const ();
c0240443 2019
2020 /* Cleanup. */
c6d207aa 2021 FOR_EACH_FUNCTION (node)
c0240443 2022 if (has_function_state (node))
2023 free (get_function_state (node));
f1f41a6c 2024 funct_state_vec.release ();
366970c6 2025 return remove_p ? TODO_remove_functions : 0;
f7d118a9 2026}
2027
2028static bool
2029gate_pure_const (void)
2030{
d1f68cd8 2031 return flag_ipa_pure_const || in_lto_p;
f7d118a9 2032}
2033
415309e2 2034pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2035 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2036 pure_const_generate_summary, /* generate_summary */
2037 pure_const_write_summary, /* write_summary */
2038 pure_const_read_summary, /* read_summary */
2039 NULL, /* write_optimization_summary */
2040 NULL, /* read_optimization_summary */
2041 NULL, /* stmt_fixup */
2042 0, /* function_transform_todo_flags_start */
2043 NULL, /* function_transform */
2044 NULL), /* variable_transform */
2045 init_p(false),
2046 function_insertion_hook_holder(NULL),
2047 node_duplication_hook_holder(NULL),
2048 node_removal_hook_holder(NULL)
f7d118a9 2049{
415309e2 2050}
cbe8bda8 2051
2052ipa_opt_pass_d *
2053make_pass_ipa_pure_const (gcc::context *ctxt)
2054{
2055 return new pass_ipa_pure_const (ctxt);
2056}
2057
2c06958d 2058/* Return true if function should be skipped for local pure const analysis. */
b5cebd44 2059
2c06958d 2060static bool
2061skip_function_for_local_pure_const (struct cgraph_node *node)
b5cebd44 2062{
8b4ee73c 2063 /* Because we do not schedule pass_fixup_cfg over whole program after early
2064 optimizations we must not promote functions that are called by already
2065 processed functions. */
b5cebd44 2066
2067 if (function_called_by_processed_nodes_p ())
2068 {
2069 if (dump_file)
2070 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2c06958d 2071 return true;
b5cebd44 2072 }
aaa36a78 2073 /* Save some work and do not analyze functions which are interposable and
2074 do not have any non-interposable aliases. */
2075 if (node->get_availability () <= AVAIL_INTERPOSABLE
2076 && !node->has_aliases_p ())
b5cebd44 2077 {
2078 if (dump_file)
8b4ee73c 2079 fprintf (dump_file,
aaa36a78 2080 "Function is interposable; not analyzing.\n");
2c06958d 2081 return true;
b5cebd44 2082 }
2c06958d 2083 return false;
2084}
2085
2086/* Simple local pass for pure const discovery reusing the analysis from
2087 ipa_pure_const. This pass is effective when executed together with
2088 other optimization passes in early optimization pass queue. */
b5cebd44 2089
7620bc82 2090namespace {
2091
2092const pass_data pass_data_local_pure_const =
65b0537f 2093{
2094 GIMPLE_PASS, /* type */
2095 "local-pure-const", /* name */
2096 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 2097 TV_IPA_PURE_CONST, /* tv_id */
2098 0, /* properties_required */
2099 0, /* properties_provided */
2100 0, /* properties_destroyed */
2101 0, /* todo_flags_start */
2102 0, /* todo_flags_finish */
2103};
2104
7620bc82 2105class pass_local_pure_const : public gimple_opt_pass
65b0537f 2106{
2107public:
2108 pass_local_pure_const (gcc::context *ctxt)
2109 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2110 {}
2111
2112 /* opt_pass methods: */
2113 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
2114 virtual bool gate (function *) { return gate_pure_const (); }
2115 virtual unsigned int execute (function *);
2116
2117}; // class pass_local_pure_const
2118
2119unsigned int
2120pass_local_pure_const::execute (function *fun)
2c06958d 2121{
2122 bool changed = false;
2123 funct_state l;
2124 bool skip;
2125 struct cgraph_node *node;
2126
415d1b9a 2127 node = cgraph_node::get (current_function_decl);
2c06958d 2128 skip = skip_function_for_local_pure_const (node);
60722a03 2129
2c06958d 2130 if (!warn_suggest_attribute_const
2131 && !warn_suggest_attribute_pure
2132 && skip)
2133 return 0;
e0933141 2134
ab3db708 2135 l = analyze_function (node, false);
2136
2137 /* Do NORETURN discovery. */
e0933141 2138 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
65b0537f 2139 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
e0933141 2140 {
65b0537f 2141 warn_function_noreturn (fun->decl);
e0933141 2142 if (dump_file)
65b0537f 2143 fprintf (dump_file, "Function found to be noreturn: %s\n",
2144 current_function_name ());
e0933141 2145
2146 /* Update declaration and reduce profile to executed once. */
2147 TREE_THIS_VOLATILE (current_function_decl) = 1;
2148 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
65b0537f 2149 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
e0933141 2150
2151 changed = true;
2152 }
ef378c27 2153
b5cebd44 2154 switch (l->pure_const_state)
2155 {
2156 case IPA_CONST:
2157 if (!TREE_READONLY (current_function_decl))
2158 {
2c06958d 2159 warn_function_const (current_function_decl, !l->looping);
b5cebd44 2160 if (dump_file)
2161 fprintf (dump_file, "Function found to be %sconst: %s\n",
2162 l->looping ? "looping " : "",
9631926a 2163 current_function_name ());
b5cebd44 2164 }
2165 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2166 && !l->looping)
2167 {
b5cebd44 2168 if (dump_file)
2169 fprintf (dump_file, "Function found to be non-looping: %s\n",
9631926a 2170 current_function_name ());
b5cebd44 2171 }
aaa36a78 2172 if (!skip && node->set_const_flag (true, l->looping))
2173 {
2174 if (dump_file)
2175 fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2176 l->looping ? "looping " : "",
2177 current_function_name ());
2178 changed = true;
2179 }
b5cebd44 2180 break;
2181
2182 case IPA_PURE:
2c06958d 2183 if (!DECL_PURE_P (current_function_decl))
b5cebd44 2184 {
2c06958d 2185 warn_function_pure (current_function_decl, !l->looping);
b5cebd44 2186 if (dump_file)
2187 fprintf (dump_file, "Function found to be %spure: %s\n",
2188 l->looping ? "looping " : "",
9631926a 2189 current_function_name ());
b5cebd44 2190 }
2191 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2192 && !l->looping)
2193 {
b5cebd44 2194 if (dump_file)
2195 fprintf (dump_file, "Function found to be non-looping: %s\n",
9631926a 2196 current_function_name ());
b5cebd44 2197 }
aaa36a78 2198 if (!skip && node->set_pure_flag (true, l->looping))
2199 {
2200 if (dump_file)
2201 fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2202 l->looping ? "looping " : "",
2203 current_function_name ());
2204 changed = true;
2205 }
b5cebd44 2206 break;
2207
2208 default:
2209 break;
2210 }
2211 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2212 {
415d1b9a 2213 node->set_nothrow_flag (true);
b5cebd44 2214 changed = true;
2215 if (dump_file)
2216 fprintf (dump_file, "Function found to be nothrow: %s\n",
9631926a 2217 current_function_name ());
b5cebd44 2218 }
bd5ef087 2219
2220 if (l->malloc_state == STATE_MALLOC
2221 && !DECL_IS_MALLOC (current_function_decl))
2222 {
2223 node->set_malloc_flag (true);
2224 if (warn_suggest_attribute_malloc)
2225 warn_function_malloc (node->decl);
2226 changed = true;
2227 if (dump_file)
2228 fprintf (dump_file, "Function found to be malloc: %s\n",
2229 node->name ());
2230 }
2231
dd045aee 2232 free (l);
b5cebd44 2233 if (changed)
2234 return execute_fixup_cfg ();
2235 else
2236 return 0;
2237}
2238
7620bc82 2239} // anon namespace
2240
cbe8bda8 2241gimple_opt_pass *
2242make_pass_local_pure_const (gcc::context *ctxt)
2243{
2244 return new pass_local_pure_const (ctxt);
2245}
64641360 2246
2247/* Emit noreturn warnings. */
2248
7620bc82 2249namespace {
2250
2251const pass_data pass_data_warn_function_noreturn =
64641360 2252{
2253 GIMPLE_PASS, /* type */
2254 "*warn_function_noreturn", /* name */
2255 OPTGROUP_NONE, /* optinfo_flags */
64641360 2256 TV_NONE, /* tv_id */
2257 PROP_cfg, /* properties_required */
2258 0, /* properties_provided */
2259 0, /* properties_destroyed */
2260 0, /* todo_flags_start */
2261 0, /* todo_flags_finish */
2262};
2263
7620bc82 2264class pass_warn_function_noreturn : public gimple_opt_pass
64641360 2265{
2266public:
2267 pass_warn_function_noreturn (gcc::context *ctxt)
2268 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2269 {}
2270
2271 /* opt_pass methods: */
31315c24 2272 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
65b0537f 2273 virtual unsigned int execute (function *fun)
2274 {
2275 if (!TREE_THIS_VOLATILE (current_function_decl)
2276 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2277 warn_function_noreturn (current_function_decl);
2278 return 0;
2279 }
64641360 2280
2281}; // class pass_warn_function_noreturn
2282
7620bc82 2283} // anon namespace
2284
64641360 2285gimple_opt_pass *
2286make_pass_warn_function_noreturn (gcc::context *ctxt)
2287{
2288 return new pass_warn_function_noreturn (ctxt);
2289}
73594827 2290
2291/* Simple local pass for pure const discovery reusing the analysis from
2292 ipa_pure_const. This pass is effective when executed together with
2293 other optimization passes in early optimization pass queue. */
2294
7620bc82 2295namespace {
2296
2297const pass_data pass_data_nothrow =
73594827 2298{
2299 GIMPLE_PASS, /* type */
2300 "nothrow", /* name */
2301 OPTGROUP_NONE, /* optinfo_flags */
2302 TV_IPA_PURE_CONST, /* tv_id */
2303 0, /* properties_required */
2304 0, /* properties_provided */
2305 0, /* properties_destroyed */
2306 0, /* todo_flags_start */
2307 0, /* todo_flags_finish */
2308};
2309
7620bc82 2310class pass_nothrow : public gimple_opt_pass
73594827 2311{
2312public:
2313 pass_nothrow (gcc::context *ctxt)
2314 : gimple_opt_pass (pass_data_nothrow, ctxt)
2315 {}
2316
2317 /* opt_pass methods: */
2318 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
2319 virtual bool gate (function *) { return optimize; }
2320 virtual unsigned int execute (function *);
2321
2322}; // class pass_nothrow
2323
2324unsigned int
2325pass_nothrow::execute (function *)
2326{
2327 struct cgraph_node *node;
2328 basic_block this_block;
2329
2330 if (TREE_NOTHROW (current_function_decl))
2331 return 0;
2332
2333 node = cgraph_node::get (current_function_decl);
2334
2335 /* We run during lowering, we can not really use availability yet. */
2336 if (cgraph_node::get (current_function_decl)->get_availability ()
2337 <= AVAIL_INTERPOSABLE)
2338 {
2339 if (dump_file)
2340 fprintf (dump_file, "Function is interposable;"
2341 " not analyzing.\n");
2342 return true;
2343 }
2344
2345 FOR_EACH_BB_FN (this_block, cfun)
2346 {
2347 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2348 !gsi_end_p (gsi);
2349 gsi_next (&gsi))
2350 if (stmt_can_throw_external (gsi_stmt (gsi)))
2351 {
2352 if (is_gimple_call (gsi_stmt (gsi)))
2353 {
2354 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2355 if (callee_t && recursive_call_p (current_function_decl,
2356 callee_t))
2357 continue;
2358 }
2359
2360 if (dump_file)
2361 {
2362 fprintf (dump_file, "Statement can throw: ");
1ffa4346 2363 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
73594827 2364 }
2365 return 0;
2366 }
2367 }
2368
2369 node->set_nothrow_flag (true);
fd499010 2370
2371 bool cfg_changed = false;
2372 if (self_recursive_p (node))
2373 FOR_EACH_BB_FN (this_block, cfun)
2374 if (gimple *g = last_stmt (this_block))
2375 if (is_gimple_call (g))
2376 {
2377 tree callee_t = gimple_call_fndecl (g);
2378 if (callee_t
2379 && recursive_call_p (current_function_decl, callee_t)
2380 && maybe_clean_eh_stmt (g)
2381 && gimple_purge_dead_eh_edges (this_block))
2382 cfg_changed = true;
2383 }
2384
73594827 2385 if (dump_file)
2386 fprintf (dump_file, "Function found to be nothrow: %s\n",
2387 current_function_name ());
fd499010 2388 return cfg_changed ? TODO_cleanup_cfg : 0;
73594827 2389}
2390
7620bc82 2391} // anon namespace
2392
73594827 2393gimple_opt_pass *
2394make_pass_nothrow (gcc::context *ctxt)
2395{
2396 return new pass_nothrow (ctxt);
2397}