]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-pure-const.c
2019-01-09 Thomas Koenig <tkoenig@gcc.gnu.org>
[thirdparty/gcc.git] / gcc / ipa-pure-const.c
CommitLineData
f7d118a9 1/* Callgraph based analysis of static variables.
fbd26352 2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
f7d118a9 3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
8c4c00c1 9Software Foundation; either version 3, or (at your option) any later
f7d118a9 10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
f7d118a9 20
f0b5f617 21/* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
f7d118a9 24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30/* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34#include "config.h"
35#include "system.h"
36#include "coretypes.h"
9ef16211 37#include "backend.h"
7c29e30e 38#include "target.h"
f7d118a9 39#include "tree.h"
9ef16211 40#include "gimple.h"
7c29e30e 41#include "tree-pass.h"
42#include "tree-streamer.h"
43#include "cgraph.h"
44#include "diagnostic.h"
9ed99284 45#include "calls.h"
94ea8568 46#include "cfganal.h"
bc61cadb 47#include "tree-eh.h"
dcf1a1ec 48#include "gimple-iterator.h"
49#include "gimple-walk.h"
073c1fd5 50#include "tree-cfg.h"
05d9c18a 51#include "tree-ssa-loop-niter.h"
f7d118a9 52#include "langhooks.h"
f7d118a9 53#include "ipa-utils.h"
ce084dfc 54#include "gimple-pretty-print.h"
c9263b6a 55#include "cfgloop.h"
56#include "tree-scalar-evolution.h"
2c06958d 57#include "intl.h"
58#include "opts.h"
bd5ef087 59#include "ssa.h"
60#include "alloc-pool.h"
61#include "symbol-summary.h"
62#include "ipa-prop.h"
63#include "ipa-fnsummary.h"
f7d118a9 64
65/* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
67 what is found. */
68enum pure_const_state_e
69{
70 IPA_CONST,
71 IPA_PURE,
72 IPA_NEITHER
73};
74
bd5ef087 75static const char *pure_const_names[3] = {"const", "pure", "neither"};
76
77enum malloc_state_e
78{
79 STATE_MALLOC_TOP,
80 STATE_MALLOC,
81 STATE_MALLOC_BOTTOM
82};
83
84static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
fc94a528 85
cb886925 86/* Holder for the const_state. There is one of these per function
87 decl. */
16f72bd0 88class funct_state_d
f7d118a9 89{
16f72bd0 90public:
91 funct_state_d (): pure_const_state (IPA_NEITHER),
92 state_previously_known (IPA_NEITHER), looping_previously_known (true),
93 looping (true), can_throw (true), can_free (true),
94 malloc_state (STATE_MALLOC_BOTTOM) {}
95
96 funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
97 state_previously_known (s.state_previously_known),
98 looping_previously_known (s.looping_previously_known),
99 looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
100 malloc_state (s.malloc_state) {}
101
cb886925 102 /* See above. */
f7d118a9 103 enum pure_const_state_e pure_const_state;
b5cebd44 104 /* What user set here; we can be always sure about this. */
48e1416a 105 enum pure_const_state_e state_previously_known;
106 bool looping_previously_known;
cb886925 107
108 /* True if the function could possibly infinite loop. There are a
109 lot of ways that this could be determined. We are pretty
110 conservative here. While it is possible to cse pure and const
111 calls, it is not legal to have dce get rid of the call if there
112 is a possibility that the call could infinite loop since this is
113 a behavioral change. */
9c2a0c05 114 bool looping;
cb886925 115
b5cebd44 116 bool can_throw;
04c849b3 117
118 /* If function can call free, munmap or otherwise make previously
119 non-trapping memory accesses trapping. */
120 bool can_free;
bd5ef087 121
122 enum malloc_state_e malloc_state;
f7d118a9 123};
124
125typedef struct funct_state_d * funct_state;
126
cb886925 127/* The storage of the funct_state is abstracted because there is the
128 possibility that it may be desirable to move this to the cgraph
48e1416a 129 local info. */
cb886925 130
16f72bd0 131class funct_state_summary_t: public function_summary <funct_state_d *>
132{
133public:
134 funct_state_summary_t (symbol_table *symtab):
135 function_summary <funct_state_d *> (symtab) {}
136
137 virtual void insert (cgraph_node *, funct_state_d *state);
138 virtual void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
139 funct_state_d *src_data,
140 funct_state_d *dst_data);
141};
cb886925 142
16f72bd0 143static funct_state_summary_t *funct_state_summaries = NULL;
cb886925 144
415309e2 145static bool gate_pure_const (void);
146
7620bc82 147namespace {
148
149const pass_data pass_data_ipa_pure_const =
415309e2 150{
151 IPA_PASS, /* type */
152 "pure-const", /* name */
153 OPTGROUP_NONE, /* optinfo_flags */
154 TV_IPA_PURE_CONST, /* tv_id */
155 0, /* properties_required */
156 0, /* properties_provided */
157 0, /* properties_destroyed */
158 0, /* todo_flags_start */
159 0, /* todo_flags_finish */
160};
161
7620bc82 162class pass_ipa_pure_const : public ipa_opt_pass_d
415309e2 163{
164public:
165 pass_ipa_pure_const(gcc::context *ctxt);
166
167 /* opt_pass methods: */
168 bool gate (function *) { return gate_pure_const (); }
169 unsigned int execute (function *fun);
170
171 void register_hooks (void);
172
173private:
174 bool init_p;
415309e2 175}; // class pass_ipa_pure_const
176
7620bc82 177} // anon namespace
178
2c06958d 179/* Try to guess if function body will always be visible to compiler
180 when compiling the call and whether compiler will be able
181 to propagate the information by itself. */
182
183static bool
184function_always_visible_to_compiler_p (tree decl)
185{
60722a03 186 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
187 || DECL_COMDAT (decl));
2c06958d 188}
189
190/* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
191 is true if the function is known to be finite. The diagnostic is
431205b7 192 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
2c06958d 193 OPTION, this function may initialize it and it is always returned
194 by the function. */
195
431205b7 196static hash_set<tree> *
2c06958d 197suggest_attribute (int option, tree decl, bool known_finite,
431205b7 198 hash_set<tree> *warned_about,
2c06958d 199 const char * attrib_name)
200{
2c5d2e39 201 if (!option_enabled (option, &global_options))
2c06958d 202 return warned_about;
203 if (TREE_THIS_VOLATILE (decl)
204 || (known_finite && function_always_visible_to_compiler_p (decl)))
205 return warned_about;
206
207 if (!warned_about)
431205b7 208 warned_about = new hash_set<tree>;
209 if (warned_about->contains (decl))
2c06958d 210 return warned_about;
431205b7 211 warned_about->add (decl);
2c06958d 212 warning_at (DECL_SOURCE_LOCATION (decl),
213 option,
214 known_finite
ca1f4c7a 215 ? G_("function might be candidate for attribute %qs")
216 : G_("function might be candidate for attribute %qs"
217 " if it is known to return normally"), attrib_name);
2c06958d 218 return warned_about;
219}
220
221/* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
222 is true if the function is known to be finite. */
223
224static void
225warn_function_pure (tree decl, bool known_finite)
226{
2acafe26 227 /* Declaring a void function pure makes no sense and is diagnosed
228 by -Wattributes because calling it would have no effect. */
229 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
230 return;
2c06958d 231
2acafe26 232 static hash_set<tree> *warned_about;
233 warned_about
2c06958d 234 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
235 known_finite, warned_about, "pure");
236}
237
238/* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
239 is true if the function is known to be finite. */
240
241static void
242warn_function_const (tree decl, bool known_finite)
243{
2acafe26 244 /* Declaring a void function const makes no sense is diagnosed
245 by -Wattributes because calling it would have no effect. */
246 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
247 return;
248
431205b7 249 static hash_set<tree> *warned_about;
2acafe26 250 warned_about
2c06958d 251 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
252 known_finite, warned_about, "const");
253}
43d60d64 254
bd5ef087 255/* Emit suggestion about __attribute__((malloc)) for DECL. */
256
257static void
258warn_function_malloc (tree decl)
259{
260 static hash_set<tree> *warned_about;
261 warned_about
262 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
9deb0d09 263 true, warned_about, "malloc");
bd5ef087 264}
265
266/* Emit suggestion about __attribute__((noreturn)) for DECL. */
267
64641360 268static void
43d60d64 269warn_function_noreturn (tree decl)
270{
313dfc4e 271 tree original_decl = decl;
272
431205b7 273 static hash_set<tree> *warned_about;
08c6cbd2 274 if (!lang_hooks.missing_noreturn_ok_p (decl)
275 && targetm.warn_func_return (decl))
43d60d64 276 warned_about
313dfc4e 277 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
43d60d64 278 true, warned_about, "noreturn");
279}
9631926a 280
60722a03 281void
282warn_function_cold (tree decl)
283{
284 tree original_decl = decl;
285
60722a03 286 static hash_set<tree> *warned_about;
287 warned_about
288 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
289 true, warned_about, "cold");
290}
291
f0b5f617 292/* Check to see if the use (or definition when CHECKING_WRITE is true)
f7d118a9 293 variable T is legal in a function that is either pure or const. */
294
48e1416a 295static inline void
296check_decl (funct_state local,
023a28e1 297 tree t, bool checking_write, bool ipa)
f7d118a9 298{
f7d118a9 299 /* Do not want to do anything with volatile except mark any
300 function that uses one to be not const or pure. */
48e1416a 301 if (TREE_THIS_VOLATILE (t))
302 {
f7d118a9 303 local->pure_const_state = IPA_NEITHER;
b5cebd44 304 if (dump_file)
f7d67aae 305 fprintf (dump_file, " Volatile operand is not const/pure\n");
f7d118a9 306 return;
307 }
308
309 /* Do not care about a local automatic that is not static. */
310 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
311 return;
312
b5cebd44 313 /* If the variable has the "used" attribute, treat it as if it had a
314 been touched by the devil. */
83a23b05 315 if (DECL_PRESERVE_P (t))
b5cebd44 316 {
317 local->pure_const_state = IPA_NEITHER;
318 if (dump_file)
319 fprintf (dump_file, " Used static/global variable is not const/pure\n");
320 return;
321 }
322
023a28e1 323 /* In IPA mode we are not interested in checking actual loads and stores;
324 they will be processed at propagation time using ipa_ref. */
325 if (ipa)
326 return;
327
f7d118a9 328 /* Since we have dealt with the locals and params cases above, if we
329 are CHECKING_WRITE, this cannot be a pure or constant
330 function. */
48e1416a 331 if (checking_write)
bc17fd99 332 {
333 local->pure_const_state = IPA_NEITHER;
b5cebd44 334 if (dump_file)
335 fprintf (dump_file, " static/global memory write is not const/pure\n");
bc17fd99 336 return;
337 }
f7d118a9 338
339 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
340 {
b5cebd44 341 /* Readonly reads are safe. */
7d136f71 342 if (TREE_READONLY (t))
b5cebd44 343 return; /* Read of a constant, do not change the function state. */
48e1416a 344 else
f7d118a9 345 {
b5cebd44 346 if (dump_file)
347 fprintf (dump_file, " global memory read is not const\n");
f7d118a9 348 /* Just a regular read. */
349 if (local->pure_const_state == IPA_CONST)
350 local->pure_const_state = IPA_PURE;
351 }
352 }
b5cebd44 353 else
354 {
355 /* Compilation level statics can be read if they are readonly
356 variables. */
357 if (TREE_READONLY (t))
358 return;
359
360 if (dump_file)
361 fprintf (dump_file, " static memory read is not const\n");
362 /* Just a regular read. */
363 if (local->pure_const_state == IPA_CONST)
364 local->pure_const_state = IPA_PURE;
365 }
f7d118a9 366}
367
f7d118a9 368
b5cebd44 369/* Check to see if the use (or definition when CHECKING_WRITE is true)
370 variable T is legal in a function that is either pure or const. */
f7d118a9 371
48e1416a 372static inline void
5ed0b345 373check_op (funct_state local, tree t, bool checking_write)
f7d118a9 374{
3ae61172 375 t = get_base_address (t);
376 if (t && TREE_THIS_VOLATILE (t))
f7d118a9 377 {
5ed0b345 378 local->pure_const_state = IPA_NEITHER;
379 if (dump_file)
380 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
381 return;
382 }
3ae61172 383 else if (t
182cf5a9 384 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
3ae61172 385 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
386 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
387 {
388 if (dump_file)
389 fprintf (dump_file, " Indirect ref to local memory is OK\n");
390 return;
391 }
5ed0b345 392 else if (checking_write)
393 {
394 local->pure_const_state = IPA_NEITHER;
395 if (dump_file)
396 fprintf (dump_file, " Indirect ref write is not const/pure\n");
397 return;
398 }
399 else
400 {
401 if (dump_file)
402 fprintf (dump_file, " Indirect ref read is not const\n");
403 if (local->pure_const_state == IPA_CONST)
404 local->pure_const_state = IPA_PURE;
f7d118a9 405 }
406}
407
023a28e1 408/* compute state based on ECF FLAGS and store to STATE and LOOPING. */
409
410static void
411state_from_flags (enum pure_const_state_e *state, bool *looping,
412 int flags, bool cannot_lead_to_return)
413{
414 *looping = false;
415 if (flags & ECF_LOOPING_CONST_OR_PURE)
416 {
417 *looping = true;
418 if (dump_file && (dump_flags & TDF_DETAILS))
f7d67aae 419 fprintf (dump_file, " looping\n");
023a28e1 420 }
421 if (flags & ECF_CONST)
422 {
423 *state = IPA_CONST;
424 if (dump_file && (dump_flags & TDF_DETAILS))
425 fprintf (dump_file, " const\n");
426 }
427 else if (flags & ECF_PURE)
428 {
429 *state = IPA_PURE;
430 if (dump_file && (dump_flags & TDF_DETAILS))
431 fprintf (dump_file, " pure\n");
432 }
433 else if (cannot_lead_to_return)
434 {
435 *state = IPA_PURE;
436 *looping = true;
437 if (dump_file && (dump_flags & TDF_DETAILS))
438 fprintf (dump_file, " ignoring side effects->pure looping\n");
439 }
440 else
441 {
442 if (dump_file && (dump_flags & TDF_DETAILS))
9631926a 443 fprintf (dump_file, " neither\n");
023a28e1 444 *state = IPA_NEITHER;
445 *looping = true;
446 }
447}
448
449/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
450 into STATE and LOOPING better of the two variants.
451 Be sure to merge looping correctly. IPA_NEITHER functions
452 have looping 0 even if they don't have to return. */
453
454static inline void
455better_state (enum pure_const_state_e *state, bool *looping,
456 enum pure_const_state_e state2, bool looping2)
457{
458 if (state2 < *state)
459 {
460 if (*state == IPA_NEITHER)
461 *looping = looping2;
462 else
463 *looping = MIN (*looping, looping2);
26fc128e 464 *state = state2;
023a28e1 465 }
466 else if (state2 != IPA_NEITHER)
467 *looping = MIN (*looping, looping2);
468}
469
470/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
8b4ee73c 471 into STATE and LOOPING worse of the two variants.
472 N is the actual node called. */
023a28e1 473
474static inline void
475worse_state (enum pure_const_state_e *state, bool *looping,
8b4ee73c 476 enum pure_const_state_e state2, bool looping2,
477 struct symtab_node *from,
478 struct symtab_node *to)
023a28e1 479{
8b4ee73c 480 /* Consider function:
481
482 bool a(int *p)
483 {
484 return *p==*p;
485 }
486
487 During early optimization we will turn this into:
488
489 bool a(int *p)
490 {
491 return true;
492 }
493
494 Now if this function will be detected as CONST however when interposed it
495 may end up being just pure. We always must assume the worst scenario here.
496 */
497 if (*state == IPA_CONST && state2 == IPA_CONST
498 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
499 {
500 if (dump_file && (dump_flags & TDF_DETAILS))
501 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
502 "bind to current def.\n", to->name ());
503 state2 = IPA_PURE;
504 }
023a28e1 505 *state = MAX (*state, state2);
506 *looping = MAX (*looping, looping2);
507}
508
0a10fd82 509/* Recognize special cases of builtins that are by themselves not pure or const
7dd42908 510 but function using them is. */
511static bool
a53e7471 512special_builtin_state (enum pure_const_state_e *state, bool *looping,
7dd42908 513 tree callee)
514{
515 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
516 switch (DECL_FUNCTION_CODE (callee))
517 {
518 case BUILT_IN_RETURN:
519 case BUILT_IN_UNREACHABLE:
2b34677f 520 CASE_BUILT_IN_ALLOCA:
7dd42908 521 case BUILT_IN_STACK_SAVE:
522 case BUILT_IN_STACK_RESTORE:
523 case BUILT_IN_EH_POINTER:
524 case BUILT_IN_EH_FILTER:
525 case BUILT_IN_UNWIND_RESUME:
526 case BUILT_IN_CXA_END_CLEANUP:
527 case BUILT_IN_EH_COPY_VALUES:
528 case BUILT_IN_FRAME_ADDRESS:
529 case BUILT_IN_APPLY:
530 case BUILT_IN_APPLY_ARGS:
a940fdc7 531 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
532 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
7dd42908 533 *looping = false;
534 *state = IPA_CONST;
535 return true;
536 case BUILT_IN_PREFETCH:
537 *looping = true;
538 *state = IPA_CONST;
539 return true;
5213d6c9 540 default:
541 break;
7dd42908 542 }
543 return false;
544}
545
f7d118a9 546/* Check the parameters of a function call to CALL_EXPR to see if
547 there are any references in the parameters that are not allowed for
548 pure or const functions. Also check to see if this is either an
549 indirect call, a call outside the compilation unit, or has special
550 attributes that may also effect the purity. The CALL_EXPR node for
551 the entire call expression. */
552
553static void
1a91d914 554check_call (funct_state local, gcall *call, bool ipa)
f7d118a9 555{
75a70cf9 556 int flags = gimple_call_flags (call);
b5cebd44 557 tree callee_t = gimple_call_fndecl (call);
aac19106 558 bool possibly_throws = stmt_could_throw_p (cfun, call);
b5cebd44 559 bool possibly_throws_externally = (possibly_throws
aac19106 560 && stmt_can_throw_external (cfun, call));
f7d118a9 561
b5cebd44 562 if (possibly_throws)
563 {
564 unsigned int i;
565 for (i = 0; i < gimple_num_ops (call); i++)
566 if (gimple_op (call, i)
567 && tree_could_throw_p (gimple_op (call, i)))
568 {
cbeb677e 569 if (possibly_throws && cfun->can_throw_non_call_exceptions)
b5cebd44 570 {
571 if (dump_file)
572 fprintf (dump_file, " operand can throw; looping\n");
573 local->looping = true;
574 }
575 if (possibly_throws_externally)
576 {
577 if (dump_file)
578 fprintf (dump_file, " operand can throw externally\n");
579 local->can_throw = true;
580 }
581 }
582 }
48e1416a 583
f7d118a9 584 /* The const and pure flags are set by a variety of places in the
585 compiler (including here). If someone has already set the flags
586 for the callee, (such as for some of the builtins) we will use
48e1416a 587 them, otherwise we will compute our own information.
588
f7d118a9 589 Const and pure functions have less clobber effects than other
590 functions so we process these first. Otherwise if it is a call
591 outside the compilation unit or an indirect call we punt. This
592 leaves local calls which will be processed by following the call
48e1416a 593 graph. */
f7d118a9 594 if (callee_t)
595 {
7dd42908 596 enum pure_const_state_e call_state;
597 bool call_looping;
598
04c849b3 599 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
600 && !nonfreeing_call_p (call))
601 local->can_free = true;
602
a53e7471 603 if (special_builtin_state (&call_state, &call_looping, callee_t))
7dd42908 604 {
605 worse_state (&local->pure_const_state, &local->looping,
8b4ee73c 606 call_state, call_looping,
607 NULL, NULL);
7dd42908 608 return;
609 }
f7d118a9 610 /* When bad things happen to bad functions, they cannot be const
611 or pure. */
612 if (setjmp_call_p (callee_t))
9c2a0c05 613 {
b5cebd44 614 if (dump_file)
615 fprintf (dump_file, " setjmp is not const/pure\n");
616 local->looping = true;
9c2a0c05 617 local->pure_const_state = IPA_NEITHER;
9c2a0c05 618 }
f7d118a9 619
620 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
621 switch (DECL_FUNCTION_CODE (callee_t))
622 {
623 case BUILT_IN_LONGJMP:
624 case BUILT_IN_NONLOCAL_GOTO:
b5cebd44 625 if (dump_file)
626 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
f7d118a9 627 local->pure_const_state = IPA_NEITHER;
b5cebd44 628 local->looping = true;
f7d118a9 629 break;
630 default:
631 break;
632 }
633 }
04c849b3 634 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
635 local->can_free = true;
f7d118a9 636
b5cebd44 637 /* When not in IPA mode, we can still handle self recursion. */
e9de52cc 638 if (!ipa && callee_t
639 && recursive_call_p (current_function_decl, callee_t))
2c06958d 640 {
641 if (dump_file)
642 fprintf (dump_file, " Recursive call can loop.\n");
643 local->looping = true;
644 }
0a10fd82 645 /* Either callee is unknown or we are doing local analysis.
ef378c27 646 Look to see if there are any bits available for the callee (such as by
647 declaration or because it is builtin) and process solely on the basis of
621733d8 648 those bits. Handle internal calls always, those calls don't have
649 corresponding cgraph edges and thus aren't processed during
650 the propagation. */
651 else if (!ipa || gimple_call_internal_p (call))
f7d118a9 652 {
023a28e1 653 enum pure_const_state_e call_state;
654 bool call_looping;
cbeb677e 655 if (possibly_throws && cfun->can_throw_non_call_exceptions)
b5cebd44 656 {
657 if (dump_file)
658 fprintf (dump_file, " can throw; looping\n");
659 local->looping = true;
660 }
661 if (possibly_throws_externally)
662 {
663 if (dump_file)
664 {
e38def9c 665 fprintf (dump_file, " can throw externally to lp %i\n",
666 lookup_stmt_eh_lp (call));
b5cebd44 667 if (callee_t)
668 fprintf (dump_file, " callee:%s\n",
669 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
670 }
671 local->can_throw = true;
672 }
023a28e1 673 if (dump_file && (dump_flags & TDF_DETAILS))
674 fprintf (dump_file, " checking flags for call:");
675 state_from_flags (&call_state, &call_looping, flags,
676 ((flags & (ECF_NORETURN | ECF_NOTHROW))
677 == (ECF_NORETURN | ECF_NOTHROW))
678 || (!flag_exceptions && (flags & ECF_NORETURN)));
679 worse_state (&local->pure_const_state, &local->looping,
8b4ee73c 680 call_state, call_looping, NULL, NULL);
f7d118a9 681 }
b5cebd44 682 /* Direct functions calls are handled by IPA propagation. */
f7d118a9 683}
684
023a28e1 685/* Wrapper around check_decl for loads in local more. */
5ed0b345 686
687static bool
42acab1c 688check_load (gimple *, tree op, tree, void *data)
5ed0b345 689{
690 if (DECL_P (op))
023a28e1 691 check_decl ((funct_state)data, op, false, false);
5ed0b345 692 else
693 check_op ((funct_state)data, op, false);
694 return false;
695}
696
023a28e1 697/* Wrapper around check_decl for stores in local more. */
5ed0b345 698
699static bool
42acab1c 700check_store (gimple *, tree op, tree, void *data)
5ed0b345 701{
702 if (DECL_P (op))
023a28e1 703 check_decl ((funct_state)data, op, true, false);
704 else
705 check_op ((funct_state)data, op, true);
706 return false;
707}
708
709/* Wrapper around check_decl for loads in ipa mode. */
710
711static bool
42acab1c 712check_ipa_load (gimple *, tree op, tree, void *data)
023a28e1 713{
714 if (DECL_P (op))
715 check_decl ((funct_state)data, op, false, true);
716 else
717 check_op ((funct_state)data, op, false);
718 return false;
719}
720
721/* Wrapper around check_decl for stores in ipa mode. */
722
723static bool
42acab1c 724check_ipa_store (gimple *, tree op, tree, void *data)
023a28e1 725{
726 if (DECL_P (op))
727 check_decl ((funct_state)data, op, true, true);
5ed0b345 728 else
729 check_op ((funct_state)data, op, true);
730 return false;
731}
732
dd277d48 733/* Look into pointer pointed to by GSIP and figure out what interesting side
734 effects it has. */
b5cebd44 735static void
736check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
f7d118a9 737{
42acab1c 738 gimple *stmt = gsi_stmt (*gsip);
f7d118a9 739
9845d120 740 if (is_gimple_debug (stmt))
741 return;
742
73594827 743 /* Do consider clobber as side effects before IPA, so we rather inline
744 C++ destructors and keep clobber semantics than eliminate them.
745
746 TODO: We may get smarter during early optimizations on these and let
747 functions containing only clobbers to be optimized more. This is a common
748 case of C++ destructors. */
749
750 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
751 return;
752
b5cebd44 753 if (dump_file)
f7d118a9 754 {
b5cebd44 755 fprintf (dump_file, " scanning: ");
1ffa4346 756 print_gimple_stmt (dump_file, stmt, 0);
b5cebd44 757 }
dd277d48 758
541c6dbb 759 if (gimple_has_volatile_ops (stmt)
760 && !gimple_clobber_p (stmt))
4b68df2e 761 {
762 local->pure_const_state = IPA_NEITHER;
763 if (dump_file)
764 fprintf (dump_file, " Volatile stmt is not const/pure\n");
765 }
766
5ed0b345 767 /* Look for loads and stores. */
023a28e1 768 walk_stmt_load_store_ops (stmt, local,
769 ipa ? check_ipa_load : check_load,
770 ipa ? check_ipa_store : check_store);
b5cebd44 771
772 if (gimple_code (stmt) != GIMPLE_CALL
aac19106 773 && stmt_could_throw_p (cfun, stmt))
b5cebd44 774 {
cbeb677e 775 if (cfun->can_throw_non_call_exceptions)
b5cebd44 776 {
777 if (dump_file)
10f4615f 778 fprintf (dump_file, " can throw; looping\n");
b5cebd44 779 local->looping = true;
780 }
aac19106 781 if (stmt_can_throw_external (cfun, stmt))
b5cebd44 782 {
783 if (dump_file)
10f4615f 784 fprintf (dump_file, " can throw externally\n");
b5cebd44 785 local->can_throw = true;
786 }
10f4615f 787 else
788 if (dump_file)
789 fprintf (dump_file, " can throw\n");
75a70cf9 790 }
75a70cf9 791 switch (gimple_code (stmt))
792 {
b5cebd44 793 case GIMPLE_CALL:
1a91d914 794 check_call (local, as_a <gcall *> (stmt), ipa);
f7d118a9 795 break;
75a70cf9 796 case GIMPLE_LABEL:
1a91d914 797 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
f7d118a9 798 /* Target of long jump. */
9c2a0c05 799 {
b5cebd44 800 if (dump_file)
10f4615f 801 fprintf (dump_file, " nonlocal label is not const/pure\n");
9c2a0c05 802 local->pure_const_state = IPA_NEITHER;
9c2a0c05 803 }
f7d118a9 804 break;
75a70cf9 805 case GIMPLE_ASM:
1a91d914 806 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
b5cebd44 807 {
97cf41ec 808 if (dump_file)
10f4615f 809 fprintf (dump_file, " memory asm clobber is not const/pure\n");
97cf41ec 810 /* Abandon all hope, ye who enter here. */
811 local->pure_const_state = IPA_NEITHER;
04c849b3 812 local->can_free = true;
b5cebd44 813 }
1a91d914 814 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
b5cebd44 815 {
816 if (dump_file)
10f4615f 817 fprintf (dump_file, " volatile is not const/pure\n");
b5cebd44 818 /* Abandon all hope, ye who enter here. */
819 local->pure_const_state = IPA_NEITHER;
04c849b3 820 local->looping = true;
821 local->can_free = true;
b5cebd44 822 }
823 return;
f7d118a9 824 default:
825 break;
826 }
f7d118a9 827}
828
bd5ef087 829/* Check that RETVAL is used only in STMT and in comparisons against 0.
830 RETVAL is return value of the function and STMT is return stmt. */
831
832static bool
833check_retval_uses (tree retval, gimple *stmt)
834{
835 imm_use_iterator use_iter;
836 gimple *use_stmt;
837
838 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
839 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
840 {
841 tree op2 = gimple_cond_rhs (cond);
842 if (!integer_zerop (op2))
843 RETURN_FROM_IMM_USE_STMT (use_iter, false);
844 }
845 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
846 {
847 enum tree_code code = gimple_assign_rhs_code (ga);
848 if (TREE_CODE_CLASS (code) != tcc_comparison)
849 RETURN_FROM_IMM_USE_STMT (use_iter, false);
850 if (!integer_zerop (gimple_assign_rhs2 (ga)))
851 RETURN_FROM_IMM_USE_STMT (use_iter, false);
852 }
853 else if (is_gimple_debug (use_stmt))
854 ;
855 else if (use_stmt != stmt)
856 RETURN_FROM_IMM_USE_STMT (use_iter, false);
857
858 return true;
859}
860
861/* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
862 attribute. Currently this function does a very conservative analysis.
863 FUN is considered to be a candidate if
864 1) It returns a value of pointer type.
865 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
866 a phi, and element of phi is either NULL or
867 SSA_NAME_DEF_STMT(element) is function call.
868 3) The return-value has immediate uses only within comparisons (gcond or gassign)
869 and return_stmt (and likewise a phi arg has immediate use only within comparison
870 or the phi stmt). */
871
bd5ef087 872#define DUMP_AND_RETURN(reason) \
873{ \
874 if (dump_file && (dump_flags & TDF_DETAILS)) \
d51fd3fc 875 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
876 (node->name()), (reason)); \
bd5ef087 877 return false; \
878}
879
18ea7971 880static bool
881malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa)
882{
883 cgraph_node *node = cgraph_node::get_create (fun->decl);
884
885 if (!check_retval_uses (retval, ret_stmt))
886 DUMP_AND_RETURN("Return value has uses outside return stmt"
887 " and comparisons against 0.")
888
889 gimple *def = SSA_NAME_DEF_STMT (retval);
890
891 if (gcall *call_stmt = dyn_cast<gcall *> (def))
892 {
893 tree callee_decl = gimple_call_fndecl (call_stmt);
894 if (!callee_decl)
895 return false;
896
897 if (!ipa && !DECL_IS_MALLOC (callee_decl))
898 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
899 " non-ipa mode.")
900
901 cgraph_edge *cs = node->get_edge (call_stmt);
902 if (cs)
903 {
904 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
905 es->is_return_callee_uncaptured = true;
906 }
907 }
908
909 else if (gphi *phi = dyn_cast<gphi *> (def))
910 {
911 bool all_args_zero = true;
912 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
913 {
914 tree arg = gimple_phi_arg_def (phi, i);
915 if (integer_zerop (arg))
916 continue;
917
918 all_args_zero = false;
919 if (TREE_CODE (arg) != SSA_NAME)
920 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
921 if (!check_retval_uses (arg, phi))
922 DUMP_AND_RETURN ("phi arg has uses outside phi"
923 " and comparisons against 0.")
924
925 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
926 if (is_a<gphi *> (arg_def))
927 {
928 if (!malloc_candidate_p_1 (fun, arg, phi, ipa))
929 DUMP_AND_RETURN ("nested phi fail")
930 continue;
931 }
932
933 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
934 if (!call_stmt)
935 DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
936
937 tree callee_decl = gimple_call_fndecl (call_stmt);
938 if (!callee_decl)
939 return false;
940 if (!ipa && !DECL_IS_MALLOC (callee_decl))
941 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
942 " for non-ipa mode.")
943
944 cgraph_edge *cs = node->get_edge (call_stmt);
945 if (cs)
946 {
947 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
948 es->is_return_callee_uncaptured = true;
949 }
950 }
951
952 if (all_args_zero)
953 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
954 }
955
956 else
957 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
958
959 return true;
960}
961
962static bool
963malloc_candidate_p (function *fun, bool ipa)
964{
965 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
966 edge e;
967 edge_iterator ei;
968 cgraph_node *node = cgraph_node::get_create (fun->decl);
969
d51fd3fc 970 if (EDGE_COUNT (exit_block->preds) == 0
971 || !flag_delete_null_pointer_checks)
bd5ef087 972 return false;
973
974 FOR_EACH_EDGE (e, ei, exit_block->preds)
975 {
976 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
977 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
978
979 if (!ret_stmt)
980 return false;
981
982 tree retval = gimple_return_retval (ret_stmt);
983 if (!retval)
984 DUMP_AND_RETURN("No return value.")
985
986 if (TREE_CODE (retval) != SSA_NAME
987 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
988 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
989
18ea7971 990 if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa))
991 return false;
bd5ef087 992 }
993
994 if (dump_file && (dump_flags & TDF_DETAILS))
995 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
996 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
997 return true;
bd5ef087 998}
999
18ea7971 1000#undef DUMP_AND_RETURN
cb886925 1001
f7d118a9 1002/* This is the main routine for finding the reference patterns for
1003 global variables within a function FN. */
1004
b5cebd44 1005static funct_state
1006analyze_function (struct cgraph_node *fn, bool ipa)
f7d118a9 1007{
02774f2d 1008 tree decl = fn->decl;
b5cebd44 1009 funct_state l;
1010 basic_block this_block;
f7d118a9 1011
b5cebd44 1012 l = XCNEW (struct funct_state_d);
1013 l->pure_const_state = IPA_CONST;
df9b545b 1014 l->state_previously_known = IPA_NEITHER;
1015 l->looping_previously_known = true;
b5cebd44 1016 l->looping = false;
1017 l->can_throw = false;
04c849b3 1018 l->can_free = false;
91bf9d9a 1019 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
02774f2d 1020 flags_from_decl_or_type (fn->decl),
415d1b9a 1021 fn->cannot_return_p ());
91bf9d9a 1022
02774f2d 1023 if (fn->thunk.thunk_p || fn->alias)
91bf9d9a 1024 {
1025 /* Thunk gets propagated through, so nothing interesting happens. */
1026 gcc_assert (ipa);
cfd85d03 1027 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
1028 l->pure_const_state = IPA_NEITHER;
91bf9d9a 1029 return l;
1030 }
f7d118a9 1031
1032 if (dump_file)
1033 {
48e1416a 1034 fprintf (dump_file, "\n\n local analysis of %s\n ",
f1c8b4d7 1035 fn->name ());
f7d118a9 1036 }
48e1416a 1037
b5cebd44 1038 push_cfun (DECL_STRUCT_FUNCTION (decl));
48e1416a 1039
fc00614f 1040 FOR_EACH_BB_FN (this_block, cfun)
f7d118a9 1041 {
b5cebd44 1042 gimple_stmt_iterator gsi;
1043 struct walk_stmt_info wi;
f7d118a9 1044
9af5ce0c 1045 memset (&wi, 0, sizeof (wi));
b5cebd44 1046 for (gsi = gsi_start_bb (this_block);
1047 !gsi_end_p (gsi);
1048 gsi_next (&gsi))
f7d118a9 1049 {
b5cebd44 1050 check_stmt (&gsi, l, ipa);
04c849b3 1051 if (l->pure_const_state == IPA_NEITHER
1052 && l->looping
1053 && l->can_throw
1054 && l->can_free)
b5cebd44 1055 goto end;
f7d118a9 1056 }
1057 }
dcccac3e 1058
1059end:
b5cebd44 1060 if (l->pure_const_state != IPA_NEITHER)
1061 {
1062 /* Const functions cannot have back edges (an
1063 indication of possible infinite loop side
1064 effect. */
1065 if (mark_dfs_back_edges ())
c9263b6a 1066 {
b1887855 1067 /* Preheaders are needed for SCEV to work.
0a10fd82 1068 Simple latches and recorded exits improve chances that loop will
8ff30f9a 1069 proved to be finite in testcases such as in loop-15.c
1070 and loop-24.c */
1071 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1072 | LOOPS_HAVE_SIMPLE_LATCHES
b1887855 1073 | LOOPS_HAVE_RECORDED_EXITS);
c9263b6a 1074 if (dump_file && (dump_flags & TDF_DETAILS))
1075 flow_loops_dump (dump_file, NULL, 0);
1076 if (mark_irreducible_loops ())
1077 {
1078 if (dump_file)
1079 fprintf (dump_file, " has irreducible loops\n");
1080 l->looping = true;
1081 }
48e1416a 1082 else
c9263b6a 1083 {
c9263b6a 1084 struct loop *loop;
1085 scev_initialize ();
f21d4d00 1086 FOR_EACH_LOOP (loop, 0)
c9263b6a 1087 if (!finite_loop_p (loop))
1088 {
1089 if (dump_file)
8ff30f9a 1090 fprintf (dump_file, " can not prove finiteness of "
1091 "loop %i\n", loop->num);
c9263b6a 1092 l->looping =true;
f21d4d00 1093 break;
c9263b6a 1094 }
1095 scev_finalize ();
1096 }
1097 loop_optimizer_finalize ();
1098 }
b5cebd44 1099 }
1100
023a28e1 1101 if (dump_file && (dump_flags & TDF_DETAILS))
1102 fprintf (dump_file, " checking previously known:");
023a28e1 1103
1104 better_state (&l->pure_const_state, &l->looping,
1105 l->state_previously_known,
1106 l->looping_previously_known);
b5cebd44 1107 if (TREE_NOTHROW (decl))
1108 l->can_throw = false;
1109
bd5ef087 1110 l->malloc_state = STATE_MALLOC_BOTTOM;
1111 if (DECL_IS_MALLOC (decl))
1112 l->malloc_state = STATE_MALLOC;
1113 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1114 l->malloc_state = STATE_MALLOC_TOP;
1115 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1116 l->malloc_state = STATE_MALLOC;
1117
b5cebd44 1118 pop_cfun ();
dcccac3e 1119 if (dump_file)
1120 {
b5cebd44 1121 if (l->looping)
1122 fprintf (dump_file, "Function is locally looping.\n");
1123 if (l->can_throw)
1124 fprintf (dump_file, "Function is locally throwing.\n");
1125 if (l->pure_const_state == IPA_CONST)
1126 fprintf (dump_file, "Function is locally const.\n");
1127 if (l->pure_const_state == IPA_PURE)
1128 fprintf (dump_file, "Function is locally pure.\n");
04c849b3 1129 if (l->can_free)
1130 fprintf (dump_file, "Function can locally free.\n");
bd5ef087 1131 if (l->malloc_state == STATE_MALLOC)
1132 fprintf (dump_file, "Function is locally malloc.\n");
dcccac3e 1133 }
b5cebd44 1134 return l;
f7d118a9 1135}
1136
16f72bd0 1137void
1138funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
50828ed8 1139{
50828ed8 1140 /* There are some shared nodes, in particular the initializers on
1141 static declarations. We do not need to scan them more than once
1142 since all we would be interested in are the addressof
1143 operations. */
8b4ee73c 1144 if (opt_for_fn (node->decl, flag_ipa_pure_const))
86844d6c 1145 {
16f72bd0 1146 funct_state_d *a = analyze_function (node, true);
1147 new (state) funct_state_d (*a);
1148 free (a);
86844d6c 1149 }
1150}
1151
1152/* Called when new clone is inserted to callgraph late. */
1153
16f72bd0 1154void
1155funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *,
1156 funct_state_d *src_data,
1157 funct_state_d *dst_data)
86844d6c 1158{
16f72bd0 1159 new (dst_data) funct_state_d (*src_data);
86844d6c 1160}
1161
f7d118a9 1162\f
415309e2 1163void
1164pass_ipa_pure_const::
7bfefa9d 1165register_hooks (void)
f7d118a9 1166{
7bfefa9d 1167 if (init_p)
1168 return;
1169
1170 init_p = true;
f7d118a9 1171
16f72bd0 1172 funct_state_summaries = new funct_state_summary_t (symtab);
7bfefa9d 1173}
1174
1175
1176/* Analyze each function in the cgraph to see if it is locally PURE or
1177 CONST. */
1178
48e1416a 1179static void
80880eb6 1180pure_const_generate_summary (void)
7bfefa9d 1181{
1182 struct cgraph_node *node;
1183
415309e2 1184 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1185 pass->register_hooks ();
7bfefa9d 1186
48e1416a 1187 /* Process all of the functions.
f7d118a9 1188
415d1b9a 1189 We process AVAIL_INTERPOSABLE functions. We can not use the results
ef378c27 1190 by default, but the info can be used at LTO with -fwhole-program or
0a10fd82 1191 when function got cloned and the clone is AVAILABLE. */
ef378c27 1192
7c455d87 1193 FOR_EACH_DEFINED_FUNCTION (node)
8b4ee73c 1194 if (opt_for_fn (node->decl, flag_ipa_pure_const))
16f72bd0 1195 {
1196 funct_state_d *a = analyze_function (node, true);
1197 new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1198 free (a);
1199 }
cb886925 1200}
1201
7bfefa9d 1202
1203/* Serialize the ipa info for lto. */
1204
1205static void
eab36a5a 1206pure_const_write_summary (void)
7bfefa9d 1207{
1208 struct cgraph_node *node;
1209 struct lto_simple_output_block *ob
1210 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1211 unsigned int count = 0;
eab36a5a 1212 lto_symtab_encoder_iterator lsei;
1213 lto_symtab_encoder_t encoder;
7bfefa9d 1214
eab36a5a 1215 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1216
1217 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1218 lsei_next_function_in_partition (&lsei))
7bfefa9d 1219 {
eab36a5a 1220 node = lsei_cgraph_node (lsei);
16f72bd0 1221 if (node->definition && funct_state_summaries->exists (node))
7bfefa9d 1222 count++;
1223 }
48e1416a 1224
7f385784 1225 streamer_write_uhwi_stream (ob->main_stream, count);
48e1416a 1226
7bfefa9d 1227 /* Process all of the functions. */
eab36a5a 1228 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1229 lsei_next_function_in_partition (&lsei))
7bfefa9d 1230 {
eab36a5a 1231 node = lsei_cgraph_node (lsei);
16f72bd0 1232 funct_state_d *fs = funct_state_summaries->get (node);
1233 if (node->definition && fs != NULL)
7bfefa9d 1234 {
30baba90 1235 struct bitpack_d bp;
7bfefa9d 1236 int node_ref;
70225339 1237 lto_symtab_encoder_t encoder;
48e1416a 1238
70225339 1239 encoder = ob->decl_state->symtab_node_encoder;
02774f2d 1240 node_ref = lto_symtab_encoder_encode (encoder, node);
7f385784 1241 streamer_write_uhwi_stream (ob->main_stream, node_ref);
48e1416a 1242
7bfefa9d 1243 /* Note that flags will need to be read in the opposite
1244 order as we are pushing the bitflags into FLAGS. */
30baba90 1245 bp = bitpack_create (ob->main_stream);
1246 bp_pack_value (&bp, fs->pure_const_state, 2);
1247 bp_pack_value (&bp, fs->state_previously_known, 2);
1248 bp_pack_value (&bp, fs->looping_previously_known, 1);
1249 bp_pack_value (&bp, fs->looping, 1);
1250 bp_pack_value (&bp, fs->can_throw, 1);
04c849b3 1251 bp_pack_value (&bp, fs->can_free, 1);
bd5ef087 1252 bp_pack_value (&bp, fs->malloc_state, 2);
7f385784 1253 streamer_write_bitpack (&bp);
7bfefa9d 1254 }
1255 }
1256
1257 lto_destroy_simple_output_block (ob);
1258}
1259
1260
1261/* Deserialize the ipa info for lto. */
1262
48e1416a 1263static void
7bfefa9d 1264pure_const_read_summary (void)
1265{
1266 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1267 struct lto_file_decl_data *file_data;
1268 unsigned int j = 0;
1269
415309e2 1270 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1271 pass->register_hooks ();
1272
7bfefa9d 1273 while ((file_data = file_data_vec[j++]))
1274 {
1275 const char *data;
1276 size_t len;
1277 struct lto_input_block *ib
48e1416a 1278 = lto_create_simple_input_block (file_data,
1279 LTO_section_ipa_pure_const,
7bfefa9d 1280 &data, &len);
1281 if (ib)
1282 {
1283 unsigned int i;
7f385784 1284 unsigned int count = streamer_read_uhwi (ib);
7bfefa9d 1285
1286 for (i = 0; i < count; i++)
1287 {
1288 unsigned int index;
1289 struct cgraph_node *node;
30baba90 1290 struct bitpack_d bp;
7bfefa9d 1291 funct_state fs;
70225339 1292 lto_symtab_encoder_t encoder;
7bfefa9d 1293
7f385784 1294 index = streamer_read_uhwi (ib);
70225339 1295 encoder = file_data->symtab_node_encoder;
415d1b9a 1296 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1297 index));
7bfefa9d 1298
16f72bd0 1299 fs = funct_state_summaries->get_create (node);
7bfefa9d 1300 /* Note that the flags must be read in the opposite
1301 order in which they were written (the bitflags were
1302 pushed into FLAGS). */
7f385784 1303 bp = streamer_read_bitpack (ib);
7bfefa9d 1304 fs->pure_const_state
30baba90 1305 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
7bfefa9d 1306 fs->state_previously_known
30baba90 1307 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1308 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1309 fs->looping = bp_unpack_value (&bp, 1);
1310 fs->can_throw = bp_unpack_value (&bp, 1);
04c849b3 1311 fs->can_free = bp_unpack_value (&bp, 1);
bd5ef087 1312 fs->malloc_state
1313 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1314
fc94a528 1315 if (dump_file)
1316 {
02774f2d 1317 int flags = flags_from_decl_or_type (node->decl);
0e388735 1318 fprintf (dump_file, "Read info for %s ", node->dump_name ());
fc94a528 1319 if (flags & ECF_CONST)
1320 fprintf (dump_file, " const");
1321 if (flags & ECF_PURE)
1322 fprintf (dump_file, " pure");
1323 if (flags & ECF_NOTHROW)
1324 fprintf (dump_file, " nothrow");
1325 fprintf (dump_file, "\n pure const state: %s\n",
1326 pure_const_names[fs->pure_const_state]);
1327 fprintf (dump_file, " previously known state: %s\n",
8b4ee73c 1328 pure_const_names[fs->state_previously_known]);
fc94a528 1329 if (fs->looping)
1330 fprintf (dump_file," function is locally looping\n");
1331 if (fs->looping_previously_known)
1332 fprintf (dump_file," function is previously known looping\n");
1333 if (fs->can_throw)
1334 fprintf (dump_file," function is locally throwing\n");
04c849b3 1335 if (fs->can_free)
1336 fprintf (dump_file," function can locally free\n");
bd5ef087 1337 fprintf (dump_file, "\n malloc state: %s\n",
1338 malloc_state_names[fs->malloc_state]);
fc94a528 1339 }
7bfefa9d 1340 }
1341
48e1416a 1342 lto_destroy_simple_input_block (file_data,
1343 LTO_section_ipa_pure_const,
7bfefa9d 1344 ib, data, len);
1345 }
1346 }
1347}
1348
355c1f40 1349/* We only propagate across edges that can throw externally and their callee
1350 is not interposable. */
7bfefa9d 1351
17b28e52 1352static bool
355c1f40 1353ignore_edge_for_nothrow (struct cgraph_edge *e)
17b28e52 1354{
355c1f40 1355 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1356 return true;
1357
1358 enum availability avail;
8b4ee73c 1359 cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
1360 e->caller);
ace7bf06 1361 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
1362 return true;
1363 return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1364 && !e->callee->binds_to_current_def_p (e->caller);
17b28e52 1365}
1366
b2c2e188 1367/* Return true if NODE is self recursive function.
e9de52cc 1368 Indirectly recursive functions appears as non-trivial strongly
1369 connected components, so we need to care about self recursion
1370 only. */
a1e88032 1371
1372static bool
1373self_recursive_p (struct cgraph_node *node)
1374{
1375 struct cgraph_edge *e;
1376 for (e = node->callees; e; e = e->next_callee)
415d1b9a 1377 if (e->callee->function_symbol () == node)
a1e88032 1378 return true;
1379 return false;
1380}
1381
366970c6 1382/* Return true if N is cdtor that is not const or pure. In this case we may
1383 need to remove unreachable function if it is marked const/pure. */
1384
1385static bool
1386cdtor_p (cgraph_node *n, void *)
1387{
1388 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
98b8f2a5 1389 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1390 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
366970c6 1391 return false;
1392}
1393
d2c6c2b4 1394/* We only propagate across edges with non-interposable callee. */
1395
1396static bool
1397ignore_edge_for_pure_const (struct cgraph_edge *e)
1398{
1399 enum availability avail;
8b4ee73c 1400 e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
d2c6c2b4 1401 return (avail <= AVAIL_INTERPOSABLE);
1402}
1403
1404
c0240443 1405/* Produce transitive closure over the callgraph and compute pure/const
1406 attributes. */
023a28e1 1407
366970c6 1408static bool
c0240443 1409propagate_pure_const (void)
cb886925 1410{
1411 struct cgraph_node *node;
1412 struct cgraph_node *w;
1413 struct cgraph_node **order =
35ee1c66 1414 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
cb886925 1415 int order_pos;
1416 int i;
1417 struct ipa_dfs_info * w_info;
366970c6 1418 bool remove_p = false;
aaa36a78 1419 bool has_cdtor;
cb886925 1420
d2c6c2b4 1421 order_pos = ipa_reduced_postorder (order, true, false,
1422 ignore_edge_for_pure_const);
f7d118a9 1423 if (dump_file)
1424 {
415d1b9a 1425 cgraph_node::dump_cgraph (dump_file);
48669653 1426 ipa_print_order (dump_file, "reduced", order, order_pos);
f7d118a9 1427 }
1428
9d75589a 1429 /* Propagate the local information through the call graph to produce
f7d118a9 1430 the global information. All the nodes within a cycle will have
1431 the same info so we collapse cycles first. Then we can do the
1432 propagation in one pass from the leaves to the roots. */
1433 for (i = 0; i < order_pos; i++ )
1434 {
1435 enum pure_const_state_e pure_const_state = IPA_CONST;
9c2a0c05 1436 bool looping = false;
54157bf1 1437 int count = 0;
f7d118a9 1438 node = order[i];
1439
02774f2d 1440 if (node->alias)
8c1fce46 1441 continue;
1442
fc94a528 1443 if (dump_file && (dump_flags & TDF_DETAILS))
1444 fprintf (dump_file, "Starting cycle\n");
1445
f7d118a9 1446 /* Find the worst state for any node in the cycle. */
1447 w = node;
023a28e1 1448 while (w && pure_const_state != IPA_NEITHER)
f7d118a9 1449 {
b5cebd44 1450 struct cgraph_edge *e;
023a28e1 1451 struct cgraph_edge *ie;
1452 int i;
51ce5652 1453 struct ipa_ref *ref = NULL;
023a28e1 1454
16f72bd0 1455 funct_state w_l = funct_state_summaries->get_create (w);
fc94a528 1456 if (dump_file && (dump_flags & TDF_DETAILS))
0e388735 1457 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1458 w->dump_name (),
fc94a528 1459 pure_const_names[w_l->pure_const_state],
1460 w_l->looping);
f7d118a9 1461
8b4ee73c 1462 /* First merge in function body properties.
1463 We are safe to pass NULL as FROM and TO because we will take care
1464 of possible interposition when walking callees. */
023a28e1 1465 worse_state (&pure_const_state, &looping,
8b4ee73c 1466 w_l->pure_const_state, w_l->looping,
1467 NULL, NULL);
023a28e1 1468 if (pure_const_state == IPA_NEITHER)
1469 break;
1470
b5cebd44 1471 count++;
1472
023a28e1 1473 /* We consider recursive cycles as possibly infinite.
1474 This might be relaxed since infinite recursion leads to stack
1475 overflow. */
b5cebd44 1476 if (count > 1)
1477 looping = true;
48e1416a 1478
023a28e1 1479 /* Now walk the edges and merge in callee properties. */
d2c6c2b4 1480 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1481 e = e->next_callee)
f7d118a9 1482 {
b2c2e188 1483 enum availability avail;
cfd85d03 1484 struct cgraph_node *y = e->callee->
8b4ee73c 1485 function_or_virtual_thunk_symbol (&avail,
1486 e->caller);
fc94a528 1487 enum pure_const_state_e edge_state = IPA_CONST;
1488 bool edge_looping = false;
54157bf1 1489
fc94a528 1490 if (dump_file && (dump_flags & TDF_DETAILS))
1491 {
0e388735 1492 fprintf (dump_file, " Call to %s",
1493 e->callee->dump_name ());
fc94a528 1494 }
415d1b9a 1495 if (avail > AVAIL_INTERPOSABLE)
f7d118a9 1496 {
bd3c34e9 1497 funct_state y_l = funct_state_summaries->get (y);
fc94a528 1498 if (dump_file && (dump_flags & TDF_DETAILS))
1499 {
1500 fprintf (dump_file,
1501 " state:%s looping:%i\n",
1502 pure_const_names[y_l->pure_const_state],
1503 y_l->looping);
1504 }
4c170941 1505 if (y_l->pure_const_state > IPA_PURE
35ee1c66 1506 && e->cannot_lead_to_return_p ())
fc94a528 1507 {
1508 if (dump_file && (dump_flags & TDF_DETAILS))
023a28e1 1509 fprintf (dump_file,
1510 " Ignoring side effects"
1511 " -> pure, looping\n");
fc94a528 1512 edge_state = IPA_PURE;
1513 edge_looping = true;
1514 }
1515 else
1516 {
1517 edge_state = y_l->pure_const_state;
1518 edge_looping = y_l->looping;
1519 }
f7d118a9 1520 }
a53e7471 1521 else if (special_builtin_state (&edge_state, &edge_looping,
02774f2d 1522 y->decl))
7dd42908 1523 ;
ef378c27 1524 else
023a28e1 1525 state_from_flags (&edge_state, &edge_looping,
02774f2d 1526 flags_from_decl_or_type (y->decl),
35ee1c66 1527 e->cannot_lead_to_return_p ());
023a28e1 1528
1529 /* Merge the results with what we already know. */
1530 better_state (&edge_state, &edge_looping,
1531 w_l->state_previously_known,
1532 w_l->looping_previously_known);
1533 worse_state (&pure_const_state, &looping,
8b4ee73c 1534 edge_state, edge_looping, e->caller, e->callee);
023a28e1 1535 if (pure_const_state == IPA_NEITHER)
1536 break;
1537 }
ef378c27 1538
023a28e1 1539 /* Now process the indirect call. */
d2c6c2b4 1540 for (ie = w->indirect_calls;
1541 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
023a28e1 1542 {
1543 enum pure_const_state_e edge_state = IPA_CONST;
1544 bool edge_looping = false;
4c170941 1545
023a28e1 1546 if (dump_file && (dump_flags & TDF_DETAILS))
1547 fprintf (dump_file, " Indirect call");
1548 state_from_flags (&edge_state, &edge_looping,
1549 ie->indirect_info->ecf_flags,
35ee1c66 1550 ie->cannot_lead_to_return_p ());
023a28e1 1551 /* Merge the results with what we already know. */
1552 better_state (&edge_state, &edge_looping,
1553 w_l->state_previously_known,
1554 w_l->looping_previously_known);
1555 worse_state (&pure_const_state, &looping,
8b4ee73c 1556 edge_state, edge_looping, NULL, NULL);
fc94a528 1557 if (pure_const_state == IPA_NEITHER)
1558 break;
f7d118a9 1559 }
023a28e1 1560
1561 /* And finally all loads and stores. */
d2c6c2b4 1562 for (i = 0; w->iterate_reference (i, ref)
1563 && pure_const_state != IPA_NEITHER; i++)
023a28e1 1564 {
1565 enum pure_const_state_e ref_state = IPA_CONST;
1566 bool ref_looping = false;
1567 switch (ref->use)
1568 {
1569 case IPA_REF_LOAD:
1570 /* readonly reads are safe. */
51ce5652 1571 if (TREE_READONLY (ref->referred->decl))
023a28e1 1572 break;
1573 if (dump_file && (dump_flags & TDF_DETAILS))
1574 fprintf (dump_file, " nonreadonly global var read\n");
1575 ref_state = IPA_PURE;
1576 break;
1577 case IPA_REF_STORE:
51ce5652 1578 if (ref->cannot_lead_to_return ())
023a28e1 1579 break;
1580 ref_state = IPA_NEITHER;
1581 if (dump_file && (dump_flags & TDF_DETAILS))
1582 fprintf (dump_file, " global var write\n");
1583 break;
1584 case IPA_REF_ADDR:
1585 break;
5cb7c8cf 1586 default:
1587 gcc_unreachable ();
023a28e1 1588 }
1589 better_state (&ref_state, &ref_looping,
1590 w_l->state_previously_known,
1591 w_l->looping_previously_known);
1592 worse_state (&pure_const_state, &looping,
8b4ee73c 1593 ref_state, ref_looping, NULL, NULL);
023a28e1 1594 if (pure_const_state == IPA_NEITHER)
1595 break;
1596 }
02774f2d 1597 w_info = (struct ipa_dfs_info *) w->aux;
f7d118a9 1598 w = w_info->next_cycle;
1599 }
fc94a528 1600 if (dump_file && (dump_flags & TDF_DETAILS))
1601 fprintf (dump_file, "Result %s looping %i\n",
1602 pure_const_names [pure_const_state],
1603 looping);
f7d118a9 1604
04c849b3 1605 /* Find the worst state of can_free for any node in the cycle. */
1606 bool can_free = false;
1607 w = node;
1608 while (w && !can_free)
1609 {
1610 struct cgraph_edge *e;
bd3c34e9 1611 funct_state w_l = funct_state_summaries->get (w);
04c849b3 1612
1613 if (w_l->can_free
1614 || w->get_availability () == AVAIL_INTERPOSABLE
1615 || w->indirect_calls)
1616 can_free = true;
1617
1618 for (e = w->callees; e && !can_free; e = e->next_callee)
1619 {
1620 enum availability avail;
cfd85d03 1621 struct cgraph_node *y = e->callee->
8b4ee73c 1622 function_or_virtual_thunk_symbol (&avail,
1623 e->caller);
04c849b3 1624
1625 if (avail > AVAIL_INTERPOSABLE)
bd3c34e9 1626 can_free = funct_state_summaries->get (y)->can_free;
04c849b3 1627 else
1628 can_free = true;
1629 }
1630 w_info = (struct ipa_dfs_info *) w->aux;
1631 w = w_info->next_cycle;
1632 }
1633
f7d118a9 1634 /* Copy back the region's pure_const_state which is shared by
1635 all nodes in the region. */
1636 w = node;
1637 while (w)
1638 {
bd3c34e9 1639 funct_state w_l = funct_state_summaries->get (w);
b5cebd44 1640 enum pure_const_state_e this_state = pure_const_state;
1641 bool this_looping = looping;
f7d118a9 1642
04c849b3 1643 w_l->can_free = can_free;
1644 w->nonfreeing_fn = !can_free;
1645 if (!can_free && dump_file)
1646 fprintf (dump_file, "Function found not to call free: %s\n",
1647 w->name ());
1648
df9b545b 1649 if (w_l->state_previously_known != IPA_NEITHER
1650 && this_state > w_l->state_previously_known)
4c170941 1651 {
1652 this_state = w_l->state_previously_known;
d2c6c2b4 1653 if (this_state == IPA_NEITHER)
1654 this_looping = w_l->looping_previously_known;
4c170941 1655 }
a1e88032 1656 if (!this_looping && self_recursive_p (w))
1657 this_looping = true;
df9b545b 1658 if (!w_l->looping_previously_known)
1659 this_looping = false;
cb886925 1660
b5cebd44 1661 /* All nodes within a cycle share the same info. */
1662 w_l->pure_const_state = this_state;
1663 w_l->looping = this_looping;
1664
ce2d198d 1665 /* Inline clones share declaration with their offline copies;
1666 do not modify their declarations since the offline copy may
1667 be different. */
1668 if (!w->global.inlined_to)
1669 switch (this_state)
1670 {
1671 case IPA_CONST:
1672 if (!TREE_READONLY (w->decl))
1673 {
1674 warn_function_const (w->decl, !this_looping);
1675 if (dump_file)
1676 fprintf (dump_file, "Function found to be %sconst: %s\n",
1677 this_looping ? "looping " : "",
1678 w->name ());
1679 }
aaa36a78 1680 /* Turning constructor or destructor to non-looping const/pure
1681 enables us to possibly remove the function completely. */
1682 if (this_looping)
1683 has_cdtor = false;
1684 else
1685 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1686 NULL, true);
1687 if (w->set_const_flag (true, this_looping))
1688 {
1689 if (dump_file)
1690 fprintf (dump_file,
1691 "Declaration updated to be %sconst: %s\n",
1692 this_looping ? "looping " : "",
1693 w->name ());
1694 remove_p |= has_cdtor;
1695 }
ce2d198d 1696 break;
48e1416a 1697
ce2d198d 1698 case IPA_PURE:
1699 if (!DECL_PURE_P (w->decl))
1700 {
1701 warn_function_pure (w->decl, !this_looping);
1702 if (dump_file)
1703 fprintf (dump_file, "Function found to be %spure: %s\n",
1704 this_looping ? "looping " : "",
1705 w->name ());
1706 }
aaa36a78 1707 if (this_looping)
1708 has_cdtor = false;
1709 else
1710 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1711 NULL, true);
1712 if (w->set_pure_flag (true, this_looping))
1713 {
1714 if (dump_file)
1715 fprintf (dump_file,
1716 "Declaration updated to be %spure: %s\n",
1717 this_looping ? "looping " : "",
1718 w->name ());
1719 remove_p |= has_cdtor;
1720 }
ce2d198d 1721 break;
48e1416a 1722
ce2d198d 1723 default:
1724 break;
1725 }
02774f2d 1726 w_info = (struct ipa_dfs_info *) w->aux;
17b28e52 1727 w = w_info->next_cycle;
1728 }
1729 }
1730
7771d558 1731 ipa_free_postorder_info ();
c0240443 1732 free (order);
366970c6 1733 return remove_p;
c0240443 1734}
1735
1736/* Produce transitive closure over the callgraph and compute nothrow
1737 attributes. */
1738
1739static void
1740propagate_nothrow (void)
1741{
1742 struct cgraph_node *node;
1743 struct cgraph_node *w;
1744 struct cgraph_node **order =
35ee1c66 1745 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
c0240443 1746 int order_pos;
1747 int i;
1748 struct ipa_dfs_info * w_info;
1749
355c1f40 1750 order_pos = ipa_reduced_postorder (order, true, false,
1751 ignore_edge_for_nothrow);
17b28e52 1752 if (dump_file)
1753 {
415d1b9a 1754 cgraph_node::dump_cgraph (dump_file);
7771d558 1755 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
17b28e52 1756 }
c0240443 1757
9d75589a 1758 /* Propagate the local information through the call graph to produce
17b28e52 1759 the global information. All the nodes within a cycle will have
1760 the same info so we collapse cycles first. Then we can do the
1761 propagation in one pass from the leaves to the roots. */
1762 for (i = 0; i < order_pos; i++ )
1763 {
1764 bool can_throw = false;
1765 node = order[i];
1766
02774f2d 1767 if (node->alias)
8c1fce46 1768 continue;
1769
17b28e52 1770 /* Find the worst state for any node in the cycle. */
1771 w = node;
b2c90c54 1772 while (w && !can_throw)
17b28e52 1773 {
023a28e1 1774 struct cgraph_edge *e, *ie;
17b28e52 1775
355c1f40 1776 if (!TREE_NOTHROW (w->decl))
17b28e52 1777 {
16f72bd0 1778 funct_state w_l = funct_state_summaries->get_create (w);
17b28e52 1779
355c1f40 1780 if (w_l->can_throw
1781 || w->get_availability () == AVAIL_INTERPOSABLE)
1782 can_throw = true;
1783
1784 for (e = w->callees; e && !can_throw; e = e->next_callee)
17b28e52 1785 {
355c1f40 1786 enum availability avail;
1787
1788 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1789 continue;
1790
1791 struct cgraph_node *y = e->callee->
ace7bf06 1792 function_or_virtual_thunk_symbol (&avail,
1793 e->caller);
17b28e52 1794
355c1f40 1795 /* We can use info about the callee only if we know it can
ace7bf06 1796 not be interposed.
1797 When callee is compiled with non-call exceptions we also
1798 must check that the declaration is bound to current
1799 body as other semantically equivalent body may still
1800 throw. */
355c1f40 1801 if (avail <= AVAIL_INTERPOSABLE
1802 || (!TREE_NOTHROW (y->decl)
16f72bd0 1803 && (funct_state_summaries->get_create (y)->can_throw
ace7bf06 1804 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1805 && !e->callee->binds_to_current_def_p (w)))))
17b28e52 1806 can_throw = true;
1807 }
355c1f40 1808 for (ie = w->indirect_calls; ie && !can_throw;
1809 ie = ie->next_callee)
1810 if (ie->can_throw_external
1811 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1812 can_throw = true;
17b28e52 1813 }
02774f2d 1814 w_info = (struct ipa_dfs_info *) w->aux;
17b28e52 1815 w = w_info->next_cycle;
1816 }
1817
1818 /* Copy back the region's pure_const_state which is shared by
1819 all nodes in the region. */
1820 w = node;
1821 while (w)
1822 {
eb57efa5 1823 funct_state w_l = funct_state_summaries->get_create (w);
02774f2d 1824 if (!can_throw && !TREE_NOTHROW (w->decl))
b5cebd44 1825 {
ce2d198d 1826 /* Inline clones share declaration with their offline copies;
1827 do not modify their declarations since the offline copy may
1828 be different. */
1829 if (!w->global.inlined_to)
1830 {
415d1b9a 1831 w->set_nothrow_flag (true);
ce2d198d 1832 if (dump_file)
1833 fprintf (dump_file, "Function found to be nothrow: %s\n",
1834 w->name ());
1835 }
f7d118a9 1836 }
02774f2d 1837 else if (can_throw && !TREE_NOTHROW (w->decl))
ecfab407 1838 w_l->can_throw = true;
02774f2d 1839 w_info = (struct ipa_dfs_info *) w->aux;
f7d118a9 1840 w = w_info->next_cycle;
1841 }
1842 }
1843
7771d558 1844 ipa_free_postorder_info ();
f7d118a9 1845 free (order);
c0240443 1846}
1847
bd5ef087 1848/* Debugging function to dump state of malloc lattice. */
1849
1850DEBUG_FUNCTION
1851static void
1852dump_malloc_lattice (FILE *dump_file, const char *s)
1853{
1854 if (!dump_file)
1855 return;
1856
1857 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1858 cgraph_node *node;
1859 FOR_EACH_FUNCTION (node)
1860 {
2c085ec2 1861 funct_state fs = funct_state_summaries->get (node);
1862 if (fs)
1863 fprintf (dump_file, "%s: %s\n", node->name (),
1864 malloc_state_names[fs->malloc_state]);
bd5ef087 1865 }
1866}
1867
1868/* Propagate malloc attribute across the callgraph. */
1869
1870static void
1871propagate_malloc (void)
1872{
1873 cgraph_node *node;
1874 FOR_EACH_FUNCTION (node)
1875 {
1876 if (DECL_IS_MALLOC (node->decl))
16f72bd0 1877 if (!funct_state_summaries->exists (node))
bd5ef087 1878 {
16f72bd0 1879 funct_state fs = funct_state_summaries->get_create (node);
1880 fs->malloc_state = STATE_MALLOC;
bd5ef087 1881 }
1882 }
1883
1884 dump_malloc_lattice (dump_file, "Initial");
1885 struct cgraph_node **order
1886 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1887 int order_pos = ipa_reverse_postorder (order);
1888 bool changed = true;
1889
1890 while (changed)
1891 {
1892 changed = false;
1893 /* Walk in postorder. */
1894 for (int i = order_pos - 1; i >= 0; --i)
1895 {
1896 cgraph_node *node = order[i];
1897 if (node->alias
1898 || !node->definition
16f72bd0 1899 || !funct_state_summaries->exists (node))
bd5ef087 1900 continue;
1901
2c085ec2 1902 funct_state l = funct_state_summaries->get (node);
bd5ef087 1903
1904 /* FIXME: add support for indirect-calls. */
1905 if (node->indirect_calls)
1906 {
1907 l->malloc_state = STATE_MALLOC_BOTTOM;
1908 continue;
1909 }
1910
1911 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1912 {
1913 l->malloc_state = STATE_MALLOC_BOTTOM;
1914 continue;
1915 }
1916
1917 if (l->malloc_state == STATE_MALLOC_BOTTOM)
1918 continue;
1919
1920 vec<cgraph_node *> callees = vNULL;
1921 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1922 {
b53d4f56 1923 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
bd5ef087 1924 if (es && es->is_return_callee_uncaptured)
1925 callees.safe_push (cs->callee);
1926 }
1927
1928 malloc_state_e new_state = l->malloc_state;
1929 for (unsigned j = 0; j < callees.length (); j++)
1930 {
1931 cgraph_node *callee = callees[j];
16f72bd0 1932 if (!funct_state_summaries->exists (node))
bd5ef087 1933 {
1934 new_state = STATE_MALLOC_BOTTOM;
1935 break;
1936 }
16f72bd0 1937 malloc_state_e callee_state
1938 = funct_state_summaries->get_create (callee)->malloc_state;
bd5ef087 1939 if (new_state < callee_state)
1940 new_state = callee_state;
1941 }
1942 if (new_state != l->malloc_state)
1943 {
1944 changed = true;
1945 l->malloc_state = new_state;
1946 }
1947 }
1948 }
1949
1950 FOR_EACH_DEFINED_FUNCTION (node)
16f72bd0 1951 if (funct_state_summaries->exists (node))
bd5ef087 1952 {
2c085ec2 1953 funct_state l = funct_state_summaries->get (node);
bd5ef087 1954 if (!node->alias
1955 && l->malloc_state == STATE_MALLOC
1956 && !node->global.inlined_to)
1957 {
1958 if (dump_file && (dump_flags & TDF_DETAILS))
1959 fprintf (dump_file, "Function %s found to be malloc\n",
1960 node->name ());
1961
1962 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1963 node->set_malloc_flag (true);
1964 if (!malloc_decl_p && warn_suggest_attribute_malloc)
1965 warn_function_malloc (node->decl);
1966 }
1967 }
1968
1969 dump_malloc_lattice (dump_file, "after propagation");
1970 ipa_free_postorder_info ();
1971 free (order);
1972}
c0240443 1973
1974/* Produce the global information by preforming a transitive closure
1975 on the local information that was produced by generate_summary. */
1976
415309e2 1977unsigned int
1978pass_ipa_pure_const::
1979execute (function *)
c0240443 1980{
366970c6 1981 bool remove_p;
c0240443 1982
c0240443 1983 /* Nothrow makes more function to not lead to return and improve
1984 later analysis. */
1985 propagate_nothrow ();
bd5ef087 1986 propagate_malloc ();
366970c6 1987 remove_p = propagate_pure_const ();
c0240443 1988
16f72bd0 1989 delete funct_state_summaries;
366970c6 1990 return remove_p ? TODO_remove_functions : 0;
f7d118a9 1991}
1992
1993static bool
1994gate_pure_const (void)
1995{
d1f68cd8 1996 return flag_ipa_pure_const || in_lto_p;
f7d118a9 1997}
1998
415309e2 1999pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2000 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2001 pure_const_generate_summary, /* generate_summary */
2002 pure_const_write_summary, /* write_summary */
2003 pure_const_read_summary, /* read_summary */
2004 NULL, /* write_optimization_summary */
2005 NULL, /* read_optimization_summary */
2006 NULL, /* stmt_fixup */
2007 0, /* function_transform_todo_flags_start */
2008 NULL, /* function_transform */
2009 NULL), /* variable_transform */
16f72bd0 2010 init_p (false) {}
cbe8bda8 2011
2012ipa_opt_pass_d *
2013make_pass_ipa_pure_const (gcc::context *ctxt)
2014{
2015 return new pass_ipa_pure_const (ctxt);
2016}
2017
2c06958d 2018/* Return true if function should be skipped for local pure const analysis. */
b5cebd44 2019
2c06958d 2020static bool
2021skip_function_for_local_pure_const (struct cgraph_node *node)
b5cebd44 2022{
8b4ee73c 2023 /* Because we do not schedule pass_fixup_cfg over whole program after early
2024 optimizations we must not promote functions that are called by already
2025 processed functions. */
b5cebd44 2026
2027 if (function_called_by_processed_nodes_p ())
2028 {
2029 if (dump_file)
2030 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2c06958d 2031 return true;
b5cebd44 2032 }
aaa36a78 2033 /* Save some work and do not analyze functions which are interposable and
2034 do not have any non-interposable aliases. */
2035 if (node->get_availability () <= AVAIL_INTERPOSABLE
2036 && !node->has_aliases_p ())
b5cebd44 2037 {
2038 if (dump_file)
8b4ee73c 2039 fprintf (dump_file,
aaa36a78 2040 "Function is interposable; not analyzing.\n");
2c06958d 2041 return true;
b5cebd44 2042 }
2c06958d 2043 return false;
2044}
2045
2046/* Simple local pass for pure const discovery reusing the analysis from
2047 ipa_pure_const. This pass is effective when executed together with
2048 other optimization passes in early optimization pass queue. */
b5cebd44 2049
7620bc82 2050namespace {
2051
2052const pass_data pass_data_local_pure_const =
65b0537f 2053{
2054 GIMPLE_PASS, /* type */
2055 "local-pure-const", /* name */
2056 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 2057 TV_IPA_PURE_CONST, /* tv_id */
2058 0, /* properties_required */
2059 0, /* properties_provided */
2060 0, /* properties_destroyed */
2061 0, /* todo_flags_start */
2062 0, /* todo_flags_finish */
2063};
2064
7620bc82 2065class pass_local_pure_const : public gimple_opt_pass
65b0537f 2066{
2067public:
2068 pass_local_pure_const (gcc::context *ctxt)
2069 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2070 {}
2071
2072 /* opt_pass methods: */
2073 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
2074 virtual bool gate (function *) { return gate_pure_const (); }
2075 virtual unsigned int execute (function *);
2076
2077}; // class pass_local_pure_const
2078
2079unsigned int
2080pass_local_pure_const::execute (function *fun)
2c06958d 2081{
2082 bool changed = false;
2083 funct_state l;
2084 bool skip;
2085 struct cgraph_node *node;
2086
415d1b9a 2087 node = cgraph_node::get (current_function_decl);
2c06958d 2088 skip = skip_function_for_local_pure_const (node);
60722a03 2089
2c06958d 2090 if (!warn_suggest_attribute_const
2091 && !warn_suggest_attribute_pure
2092 && skip)
2093 return 0;
e0933141 2094
ab3db708 2095 l = analyze_function (node, false);
2096
2097 /* Do NORETURN discovery. */
e0933141 2098 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
65b0537f 2099 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
e0933141 2100 {
65b0537f 2101 warn_function_noreturn (fun->decl);
e0933141 2102 if (dump_file)
65b0537f 2103 fprintf (dump_file, "Function found to be noreturn: %s\n",
2104 current_function_name ());
e0933141 2105
2106 /* Update declaration and reduce profile to executed once. */
2107 TREE_THIS_VOLATILE (current_function_decl) = 1;
2108 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
65b0537f 2109 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
e0933141 2110
2111 changed = true;
2112 }
ef378c27 2113
b5cebd44 2114 switch (l->pure_const_state)
2115 {
2116 case IPA_CONST:
2117 if (!TREE_READONLY (current_function_decl))
2118 {
2c06958d 2119 warn_function_const (current_function_decl, !l->looping);
b5cebd44 2120 if (dump_file)
2121 fprintf (dump_file, "Function found to be %sconst: %s\n",
2122 l->looping ? "looping " : "",
9631926a 2123 current_function_name ());
b5cebd44 2124 }
2125 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2126 && !l->looping)
2127 {
b5cebd44 2128 if (dump_file)
2129 fprintf (dump_file, "Function found to be non-looping: %s\n",
9631926a 2130 current_function_name ());
b5cebd44 2131 }
aaa36a78 2132 if (!skip && node->set_const_flag (true, l->looping))
2133 {
2134 if (dump_file)
2135 fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2136 l->looping ? "looping " : "",
2137 current_function_name ());
2138 changed = true;
2139 }
b5cebd44 2140 break;
2141
2142 case IPA_PURE:
2c06958d 2143 if (!DECL_PURE_P (current_function_decl))
b5cebd44 2144 {
2c06958d 2145 warn_function_pure (current_function_decl, !l->looping);
b5cebd44 2146 if (dump_file)
2147 fprintf (dump_file, "Function found to be %spure: %s\n",
2148 l->looping ? "looping " : "",
9631926a 2149 current_function_name ());
b5cebd44 2150 }
2151 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2152 && !l->looping)
2153 {
b5cebd44 2154 if (dump_file)
2155 fprintf (dump_file, "Function found to be non-looping: %s\n",
9631926a 2156 current_function_name ());
b5cebd44 2157 }
aaa36a78 2158 if (!skip && node->set_pure_flag (true, l->looping))
2159 {
2160 if (dump_file)
2161 fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2162 l->looping ? "looping " : "",
2163 current_function_name ());
2164 changed = true;
2165 }
b5cebd44 2166 break;
2167
2168 default:
2169 break;
2170 }
2171 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2172 {
415d1b9a 2173 node->set_nothrow_flag (true);
b5cebd44 2174 changed = true;
2175 if (dump_file)
2176 fprintf (dump_file, "Function found to be nothrow: %s\n",
9631926a 2177 current_function_name ());
b5cebd44 2178 }
bd5ef087 2179
2180 if (l->malloc_state == STATE_MALLOC
2181 && !DECL_IS_MALLOC (current_function_decl))
2182 {
2183 node->set_malloc_flag (true);
2184 if (warn_suggest_attribute_malloc)
2185 warn_function_malloc (node->decl);
2186 changed = true;
2187 if (dump_file)
2188 fprintf (dump_file, "Function found to be malloc: %s\n",
2189 node->name ());
2190 }
2191
dd045aee 2192 free (l);
b5cebd44 2193 if (changed)
2194 return execute_fixup_cfg ();
2195 else
2196 return 0;
2197}
2198
7620bc82 2199} // anon namespace
2200
cbe8bda8 2201gimple_opt_pass *
2202make_pass_local_pure_const (gcc::context *ctxt)
2203{
2204 return new pass_local_pure_const (ctxt);
2205}
64641360 2206
2207/* Emit noreturn warnings. */
2208
7620bc82 2209namespace {
2210
2211const pass_data pass_data_warn_function_noreturn =
64641360 2212{
2213 GIMPLE_PASS, /* type */
2214 "*warn_function_noreturn", /* name */
2215 OPTGROUP_NONE, /* optinfo_flags */
64641360 2216 TV_NONE, /* tv_id */
2217 PROP_cfg, /* properties_required */
2218 0, /* properties_provided */
2219 0, /* properties_destroyed */
2220 0, /* todo_flags_start */
2221 0, /* todo_flags_finish */
2222};
2223
7620bc82 2224class pass_warn_function_noreturn : public gimple_opt_pass
64641360 2225{
2226public:
2227 pass_warn_function_noreturn (gcc::context *ctxt)
2228 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2229 {}
2230
2231 /* opt_pass methods: */
31315c24 2232 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
65b0537f 2233 virtual unsigned int execute (function *fun)
2234 {
2235 if (!TREE_THIS_VOLATILE (current_function_decl)
2236 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2237 warn_function_noreturn (current_function_decl);
2238 return 0;
2239 }
64641360 2240
2241}; // class pass_warn_function_noreturn
2242
7620bc82 2243} // anon namespace
2244
64641360 2245gimple_opt_pass *
2246make_pass_warn_function_noreturn (gcc::context *ctxt)
2247{
2248 return new pass_warn_function_noreturn (ctxt);
2249}
73594827 2250
2251/* Simple local pass for pure const discovery reusing the analysis from
2252 ipa_pure_const. This pass is effective when executed together with
2253 other optimization passes in early optimization pass queue. */
2254
7620bc82 2255namespace {
2256
2257const pass_data pass_data_nothrow =
73594827 2258{
2259 GIMPLE_PASS, /* type */
2260 "nothrow", /* name */
2261 OPTGROUP_NONE, /* optinfo_flags */
2262 TV_IPA_PURE_CONST, /* tv_id */
2263 0, /* properties_required */
2264 0, /* properties_provided */
2265 0, /* properties_destroyed */
2266 0, /* todo_flags_start */
2267 0, /* todo_flags_finish */
2268};
2269
7620bc82 2270class pass_nothrow : public gimple_opt_pass
73594827 2271{
2272public:
2273 pass_nothrow (gcc::context *ctxt)
2274 : gimple_opt_pass (pass_data_nothrow, ctxt)
2275 {}
2276
2277 /* opt_pass methods: */
2278 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
2279 virtual bool gate (function *) { return optimize; }
2280 virtual unsigned int execute (function *);
2281
2282}; // class pass_nothrow
2283
2284unsigned int
2285pass_nothrow::execute (function *)
2286{
2287 struct cgraph_node *node;
2288 basic_block this_block;
2289
2290 if (TREE_NOTHROW (current_function_decl))
2291 return 0;
2292
2293 node = cgraph_node::get (current_function_decl);
2294
2295 /* We run during lowering, we can not really use availability yet. */
2296 if (cgraph_node::get (current_function_decl)->get_availability ()
2297 <= AVAIL_INTERPOSABLE)
2298 {
2299 if (dump_file)
2300 fprintf (dump_file, "Function is interposable;"
2301 " not analyzing.\n");
2302 return true;
2303 }
2304
2305 FOR_EACH_BB_FN (this_block, cfun)
2306 {
2307 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2308 !gsi_end_p (gsi);
2309 gsi_next (&gsi))
aac19106 2310 if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
73594827 2311 {
2312 if (is_gimple_call (gsi_stmt (gsi)))
2313 {
2314 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2315 if (callee_t && recursive_call_p (current_function_decl,
2316 callee_t))
2317 continue;
2318 }
2319
2320 if (dump_file)
2321 {
2322 fprintf (dump_file, "Statement can throw: ");
1ffa4346 2323 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
73594827 2324 }
2325 return 0;
2326 }
2327 }
2328
2329 node->set_nothrow_flag (true);
fd499010 2330
2331 bool cfg_changed = false;
2332 if (self_recursive_p (node))
2333 FOR_EACH_BB_FN (this_block, cfun)
2334 if (gimple *g = last_stmt (this_block))
2335 if (is_gimple_call (g))
2336 {
2337 tree callee_t = gimple_call_fndecl (g);
2338 if (callee_t
2339 && recursive_call_p (current_function_decl, callee_t)
2340 && maybe_clean_eh_stmt (g)
2341 && gimple_purge_dead_eh_edges (this_block))
2342 cfg_changed = true;
2343 }
2344
73594827 2345 if (dump_file)
2346 fprintf (dump_file, "Function found to be nothrow: %s\n",
2347 current_function_name ());
fd499010 2348 return cfg_changed ? TODO_cleanup_cfg : 0;
73594827 2349}
2350
7620bc82 2351} // anon namespace
2352
73594827 2353gimple_opt_pass *
2354make_pass_nothrow (gcc::context *ctxt)
2355{
2356 return new pass_nothrow (ctxt);
2357}