]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-pure-const.c
Fix splay tree KEY leak detected in GDB test gdb.base/macscp.exp
[thirdparty/gcc.git] / gcc / ipa-pure-const.c
CommitLineData
f7d118a9 1/* Callgraph based analysis of static variables.
fbd26352 2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
f7d118a9 3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
8c4c00c1 9Software Foundation; either version 3, or (at your option) any later
f7d118a9 10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
f7d118a9 20
f0b5f617 21/* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
f7d118a9 24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30/* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34#include "config.h"
35#include "system.h"
36#include "coretypes.h"
9ef16211 37#include "backend.h"
7c29e30e 38#include "target.h"
f7d118a9 39#include "tree.h"
9ef16211 40#include "gimple.h"
7c29e30e 41#include "tree-pass.h"
42#include "tree-streamer.h"
43#include "cgraph.h"
44#include "diagnostic.h"
9ed99284 45#include "calls.h"
94ea8568 46#include "cfganal.h"
bc61cadb 47#include "tree-eh.h"
dcf1a1ec 48#include "gimple-iterator.h"
49#include "gimple-walk.h"
073c1fd5 50#include "tree-cfg.h"
05d9c18a 51#include "tree-ssa-loop-niter.h"
f7d118a9 52#include "langhooks.h"
f7d118a9 53#include "ipa-utils.h"
ce084dfc 54#include "gimple-pretty-print.h"
c9263b6a 55#include "cfgloop.h"
56#include "tree-scalar-evolution.h"
2c06958d 57#include "intl.h"
58#include "opts.h"
bd5ef087 59#include "ssa.h"
60#include "alloc-pool.h"
61#include "symbol-summary.h"
62#include "ipa-prop.h"
63#include "ipa-fnsummary.h"
f7d118a9 64
65/* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
67 what is found. */
68enum pure_const_state_e
69{
70 IPA_CONST,
71 IPA_PURE,
72 IPA_NEITHER
73};
74
bd5ef087 75static const char *pure_const_names[3] = {"const", "pure", "neither"};
76
77enum malloc_state_e
78{
79 STATE_MALLOC_TOP,
80 STATE_MALLOC,
81 STATE_MALLOC_BOTTOM
82};
83
84static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
fc94a528 85
cb886925 86/* Holder for the const_state. There is one of these per function
87 decl. */
16f72bd0 88class funct_state_d
f7d118a9 89{
16f72bd0 90public:
91 funct_state_d (): pure_const_state (IPA_NEITHER),
92 state_previously_known (IPA_NEITHER), looping_previously_known (true),
93 looping (true), can_throw (true), can_free (true),
94 malloc_state (STATE_MALLOC_BOTTOM) {}
95
96 funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
97 state_previously_known (s.state_previously_known),
98 looping_previously_known (s.looping_previously_known),
99 looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
100 malloc_state (s.malloc_state) {}
101
cb886925 102 /* See above. */
f7d118a9 103 enum pure_const_state_e pure_const_state;
b5cebd44 104 /* What user set here; we can be always sure about this. */
48e1416a 105 enum pure_const_state_e state_previously_known;
106 bool looping_previously_known;
cb886925 107
108 /* True if the function could possibly infinite loop. There are a
109 lot of ways that this could be determined. We are pretty
110 conservative here. While it is possible to cse pure and const
111 calls, it is not legal to have dce get rid of the call if there
112 is a possibility that the call could infinite loop since this is
113 a behavioral change. */
9c2a0c05 114 bool looping;
cb886925 115
b5cebd44 116 bool can_throw;
04c849b3 117
118 /* If function can call free, munmap or otherwise make previously
119 non-trapping memory accesses trapping. */
120 bool can_free;
bd5ef087 121
122 enum malloc_state_e malloc_state;
f7d118a9 123};
124
125typedef struct funct_state_d * funct_state;
126
cb886925 127/* The storage of the funct_state is abstracted because there is the
128 possibility that it may be desirable to move this to the cgraph
48e1416a 129 local info. */
cb886925 130
16f72bd0 131class funct_state_summary_t: public function_summary <funct_state_d *>
132{
133public:
134 funct_state_summary_t (symbol_table *symtab):
135 function_summary <funct_state_d *> (symtab) {}
136
137 virtual void insert (cgraph_node *, funct_state_d *state);
138 virtual void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
139 funct_state_d *src_data,
140 funct_state_d *dst_data);
141};
cb886925 142
16f72bd0 143static funct_state_summary_t *funct_state_summaries = NULL;
cb886925 144
415309e2 145static bool gate_pure_const (void);
146
7620bc82 147namespace {
148
149const pass_data pass_data_ipa_pure_const =
415309e2 150{
151 IPA_PASS, /* type */
152 "pure-const", /* name */
153 OPTGROUP_NONE, /* optinfo_flags */
154 TV_IPA_PURE_CONST, /* tv_id */
155 0, /* properties_required */
156 0, /* properties_provided */
157 0, /* properties_destroyed */
158 0, /* todo_flags_start */
159 0, /* todo_flags_finish */
160};
161
7620bc82 162class pass_ipa_pure_const : public ipa_opt_pass_d
415309e2 163{
164public:
165 pass_ipa_pure_const(gcc::context *ctxt);
166
167 /* opt_pass methods: */
168 bool gate (function *) { return gate_pure_const (); }
169 unsigned int execute (function *fun);
170
171 void register_hooks (void);
172
173private:
174 bool init_p;
415309e2 175}; // class pass_ipa_pure_const
176
7620bc82 177} // anon namespace
178
2c06958d 179/* Try to guess if function body will always be visible to compiler
180 when compiling the call and whether compiler will be able
181 to propagate the information by itself. */
182
183static bool
184function_always_visible_to_compiler_p (tree decl)
185{
60722a03 186 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
187 || DECL_COMDAT (decl));
2c06958d 188}
189
190/* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
191 is true if the function is known to be finite. The diagnostic is
431205b7 192 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
2c06958d 193 OPTION, this function may initialize it and it is always returned
194 by the function. */
195
431205b7 196static hash_set<tree> *
2c06958d 197suggest_attribute (int option, tree decl, bool known_finite,
431205b7 198 hash_set<tree> *warned_about,
2c06958d 199 const char * attrib_name)
200{
2c5d2e39 201 if (!option_enabled (option, &global_options))
2c06958d 202 return warned_about;
203 if (TREE_THIS_VOLATILE (decl)
204 || (known_finite && function_always_visible_to_compiler_p (decl)))
205 return warned_about;
206
207 if (!warned_about)
431205b7 208 warned_about = new hash_set<tree>;
209 if (warned_about->contains (decl))
2c06958d 210 return warned_about;
431205b7 211 warned_about->add (decl);
2c06958d 212 warning_at (DECL_SOURCE_LOCATION (decl),
213 option,
214 known_finite
ca1f4c7a 215 ? G_("function might be candidate for attribute %qs")
216 : G_("function might be candidate for attribute %qs"
217 " if it is known to return normally"), attrib_name);
2c06958d 218 return warned_about;
219}
220
221/* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
222 is true if the function is known to be finite. */
223
224static void
225warn_function_pure (tree decl, bool known_finite)
226{
2acafe26 227 /* Declaring a void function pure makes no sense and is diagnosed
228 by -Wattributes because calling it would have no effect. */
229 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
230 return;
2c06958d 231
2acafe26 232 static hash_set<tree> *warned_about;
233 warned_about
2c06958d 234 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
235 known_finite, warned_about, "pure");
236}
237
238/* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
239 is true if the function is known to be finite. */
240
241static void
242warn_function_const (tree decl, bool known_finite)
243{
2acafe26 244 /* Declaring a void function const makes no sense is diagnosed
245 by -Wattributes because calling it would have no effect. */
246 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
247 return;
248
431205b7 249 static hash_set<tree> *warned_about;
2acafe26 250 warned_about
2c06958d 251 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
252 known_finite, warned_about, "const");
253}
43d60d64 254
bd5ef087 255/* Emit suggestion about __attribute__((malloc)) for DECL. */
256
257static void
258warn_function_malloc (tree decl)
259{
260 static hash_set<tree> *warned_about;
261 warned_about
262 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
9deb0d09 263 true, warned_about, "malloc");
bd5ef087 264}
265
266/* Emit suggestion about __attribute__((noreturn)) for DECL. */
267
64641360 268static void
43d60d64 269warn_function_noreturn (tree decl)
270{
313dfc4e 271 tree original_decl = decl;
272
431205b7 273 static hash_set<tree> *warned_about;
08c6cbd2 274 if (!lang_hooks.missing_noreturn_ok_p (decl)
275 && targetm.warn_func_return (decl))
43d60d64 276 warned_about
313dfc4e 277 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
43d60d64 278 true, warned_about, "noreturn");
279}
9631926a 280
60722a03 281void
282warn_function_cold (tree decl)
283{
284 tree original_decl = decl;
285
60722a03 286 static hash_set<tree> *warned_about;
287 warned_about
288 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
289 true, warned_about, "cold");
290}
291
f0b5f617 292/* Check to see if the use (or definition when CHECKING_WRITE is true)
f7d118a9 293 variable T is legal in a function that is either pure or const. */
294
48e1416a 295static inline void
296check_decl (funct_state local,
023a28e1 297 tree t, bool checking_write, bool ipa)
f7d118a9 298{
f7d118a9 299 /* Do not want to do anything with volatile except mark any
300 function that uses one to be not const or pure. */
48e1416a 301 if (TREE_THIS_VOLATILE (t))
302 {
f7d118a9 303 local->pure_const_state = IPA_NEITHER;
b5cebd44 304 if (dump_file)
f7d67aae 305 fprintf (dump_file, " Volatile operand is not const/pure\n");
f7d118a9 306 return;
307 }
308
309 /* Do not care about a local automatic that is not static. */
310 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
311 return;
312
b5cebd44 313 /* If the variable has the "used" attribute, treat it as if it had a
314 been touched by the devil. */
83a23b05 315 if (DECL_PRESERVE_P (t))
b5cebd44 316 {
317 local->pure_const_state = IPA_NEITHER;
318 if (dump_file)
319 fprintf (dump_file, " Used static/global variable is not const/pure\n");
320 return;
321 }
322
023a28e1 323 /* In IPA mode we are not interested in checking actual loads and stores;
324 they will be processed at propagation time using ipa_ref. */
325 if (ipa)
326 return;
327
f7d118a9 328 /* Since we have dealt with the locals and params cases above, if we
329 are CHECKING_WRITE, this cannot be a pure or constant
330 function. */
48e1416a 331 if (checking_write)
bc17fd99 332 {
333 local->pure_const_state = IPA_NEITHER;
b5cebd44 334 if (dump_file)
335 fprintf (dump_file, " static/global memory write is not const/pure\n");
bc17fd99 336 return;
337 }
f7d118a9 338
339 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
340 {
b5cebd44 341 /* Readonly reads are safe. */
7d136f71 342 if (TREE_READONLY (t))
b5cebd44 343 return; /* Read of a constant, do not change the function state. */
48e1416a 344 else
f7d118a9 345 {
b5cebd44 346 if (dump_file)
347 fprintf (dump_file, " global memory read is not const\n");
f7d118a9 348 /* Just a regular read. */
349 if (local->pure_const_state == IPA_CONST)
350 local->pure_const_state = IPA_PURE;
351 }
352 }
b5cebd44 353 else
354 {
355 /* Compilation level statics can be read if they are readonly
356 variables. */
357 if (TREE_READONLY (t))
358 return;
359
360 if (dump_file)
361 fprintf (dump_file, " static memory read is not const\n");
362 /* Just a regular read. */
363 if (local->pure_const_state == IPA_CONST)
364 local->pure_const_state = IPA_PURE;
365 }
f7d118a9 366}
367
f7d118a9 368
b5cebd44 369/* Check to see if the use (or definition when CHECKING_WRITE is true)
370 variable T is legal in a function that is either pure or const. */
f7d118a9 371
48e1416a 372static inline void
5ed0b345 373check_op (funct_state local, tree t, bool checking_write)
f7d118a9 374{
3ae61172 375 t = get_base_address (t);
376 if (t && TREE_THIS_VOLATILE (t))
f7d118a9 377 {
5ed0b345 378 local->pure_const_state = IPA_NEITHER;
379 if (dump_file)
380 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
381 return;
382 }
3ae61172 383 else if (t
182cf5a9 384 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
3ae61172 385 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
386 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
387 {
388 if (dump_file)
389 fprintf (dump_file, " Indirect ref to local memory is OK\n");
390 return;
391 }
5ed0b345 392 else if (checking_write)
393 {
394 local->pure_const_state = IPA_NEITHER;
395 if (dump_file)
396 fprintf (dump_file, " Indirect ref write is not const/pure\n");
397 return;
398 }
399 else
400 {
401 if (dump_file)
402 fprintf (dump_file, " Indirect ref read is not const\n");
403 if (local->pure_const_state == IPA_CONST)
404 local->pure_const_state = IPA_PURE;
f7d118a9 405 }
406}
407
023a28e1 408/* compute state based on ECF FLAGS and store to STATE and LOOPING. */
409
410static void
411state_from_flags (enum pure_const_state_e *state, bool *looping,
412 int flags, bool cannot_lead_to_return)
413{
414 *looping = false;
415 if (flags & ECF_LOOPING_CONST_OR_PURE)
416 {
417 *looping = true;
418 if (dump_file && (dump_flags & TDF_DETAILS))
f7d67aae 419 fprintf (dump_file, " looping\n");
023a28e1 420 }
421 if (flags & ECF_CONST)
422 {
423 *state = IPA_CONST;
424 if (dump_file && (dump_flags & TDF_DETAILS))
425 fprintf (dump_file, " const\n");
426 }
427 else if (flags & ECF_PURE)
428 {
429 *state = IPA_PURE;
430 if (dump_file && (dump_flags & TDF_DETAILS))
431 fprintf (dump_file, " pure\n");
432 }
433 else if (cannot_lead_to_return)
434 {
435 *state = IPA_PURE;
436 *looping = true;
437 if (dump_file && (dump_flags & TDF_DETAILS))
438 fprintf (dump_file, " ignoring side effects->pure looping\n");
439 }
440 else
441 {
442 if (dump_file && (dump_flags & TDF_DETAILS))
9631926a 443 fprintf (dump_file, " neither\n");
023a28e1 444 *state = IPA_NEITHER;
445 *looping = true;
446 }
447}
448
449/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
450 into STATE and LOOPING better of the two variants.
451 Be sure to merge looping correctly. IPA_NEITHER functions
452 have looping 0 even if they don't have to return. */
453
454static inline void
455better_state (enum pure_const_state_e *state, bool *looping,
456 enum pure_const_state_e state2, bool looping2)
457{
458 if (state2 < *state)
459 {
460 if (*state == IPA_NEITHER)
461 *looping = looping2;
462 else
463 *looping = MIN (*looping, looping2);
26fc128e 464 *state = state2;
023a28e1 465 }
466 else if (state2 != IPA_NEITHER)
467 *looping = MIN (*looping, looping2);
468}
469
470/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
8b4ee73c 471 into STATE and LOOPING worse of the two variants.
472 N is the actual node called. */
023a28e1 473
474static inline void
475worse_state (enum pure_const_state_e *state, bool *looping,
8b4ee73c 476 enum pure_const_state_e state2, bool looping2,
477 struct symtab_node *from,
478 struct symtab_node *to)
023a28e1 479{
8b4ee73c 480 /* Consider function:
481
482 bool a(int *p)
483 {
484 return *p==*p;
485 }
486
487 During early optimization we will turn this into:
488
489 bool a(int *p)
490 {
491 return true;
492 }
493
494 Now if this function will be detected as CONST however when interposed it
495 may end up being just pure. We always must assume the worst scenario here.
496 */
497 if (*state == IPA_CONST && state2 == IPA_CONST
498 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
499 {
500 if (dump_file && (dump_flags & TDF_DETAILS))
501 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
502 "bind to current def.\n", to->name ());
503 state2 = IPA_PURE;
504 }
023a28e1 505 *state = MAX (*state, state2);
506 *looping = MAX (*looping, looping2);
507}
508
0a10fd82 509/* Recognize special cases of builtins that are by themselves not pure or const
7dd42908 510 but function using them is. */
511static bool
a53e7471 512special_builtin_state (enum pure_const_state_e *state, bool *looping,
7dd42908 513 tree callee)
514{
515 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
516 switch (DECL_FUNCTION_CODE (callee))
517 {
518 case BUILT_IN_RETURN:
519 case BUILT_IN_UNREACHABLE:
2b34677f 520 CASE_BUILT_IN_ALLOCA:
7dd42908 521 case BUILT_IN_STACK_SAVE:
522 case BUILT_IN_STACK_RESTORE:
523 case BUILT_IN_EH_POINTER:
524 case BUILT_IN_EH_FILTER:
525 case BUILT_IN_UNWIND_RESUME:
526 case BUILT_IN_CXA_END_CLEANUP:
527 case BUILT_IN_EH_COPY_VALUES:
528 case BUILT_IN_FRAME_ADDRESS:
529 case BUILT_IN_APPLY:
530 case BUILT_IN_APPLY_ARGS:
a940fdc7 531 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
532 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
7dd42908 533 *looping = false;
534 *state = IPA_CONST;
535 return true;
536 case BUILT_IN_PREFETCH:
537 *looping = true;
538 *state = IPA_CONST;
539 return true;
5213d6c9 540 default:
541 break;
7dd42908 542 }
543 return false;
544}
545
f7d118a9 546/* Check the parameters of a function call to CALL_EXPR to see if
547 there are any references in the parameters that are not allowed for
548 pure or const functions. Also check to see if this is either an
549 indirect call, a call outside the compilation unit, or has special
550 attributes that may also effect the purity. The CALL_EXPR node for
551 the entire call expression. */
552
553static void
1a91d914 554check_call (funct_state local, gcall *call, bool ipa)
f7d118a9 555{
75a70cf9 556 int flags = gimple_call_flags (call);
b5cebd44 557 tree callee_t = gimple_call_fndecl (call);
aac19106 558 bool possibly_throws = stmt_could_throw_p (cfun, call);
b5cebd44 559 bool possibly_throws_externally = (possibly_throws
aac19106 560 && stmt_can_throw_external (cfun, call));
f7d118a9 561
b5cebd44 562 if (possibly_throws)
563 {
564 unsigned int i;
565 for (i = 0; i < gimple_num_ops (call); i++)
566 if (gimple_op (call, i)
567 && tree_could_throw_p (gimple_op (call, i)))
568 {
cbeb677e 569 if (possibly_throws && cfun->can_throw_non_call_exceptions)
b5cebd44 570 {
571 if (dump_file)
572 fprintf (dump_file, " operand can throw; looping\n");
573 local->looping = true;
574 }
575 if (possibly_throws_externally)
576 {
577 if (dump_file)
578 fprintf (dump_file, " operand can throw externally\n");
579 local->can_throw = true;
580 }
581 }
582 }
48e1416a 583
f7d118a9 584 /* The const and pure flags are set by a variety of places in the
585 compiler (including here). If someone has already set the flags
586 for the callee, (such as for some of the builtins) we will use
48e1416a 587 them, otherwise we will compute our own information.
588
f7d118a9 589 Const and pure functions have less clobber effects than other
590 functions so we process these first. Otherwise if it is a call
591 outside the compilation unit or an indirect call we punt. This
592 leaves local calls which will be processed by following the call
48e1416a 593 graph. */
f7d118a9 594 if (callee_t)
595 {
7dd42908 596 enum pure_const_state_e call_state;
597 bool call_looping;
598
04c849b3 599 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
600 && !nonfreeing_call_p (call))
601 local->can_free = true;
602
a53e7471 603 if (special_builtin_state (&call_state, &call_looping, callee_t))
7dd42908 604 {
605 worse_state (&local->pure_const_state, &local->looping,
8b4ee73c 606 call_state, call_looping,
607 NULL, NULL);
7dd42908 608 return;
609 }
f7d118a9 610 /* When bad things happen to bad functions, they cannot be const
611 or pure. */
612 if (setjmp_call_p (callee_t))
9c2a0c05 613 {
b5cebd44 614 if (dump_file)
615 fprintf (dump_file, " setjmp is not const/pure\n");
616 local->looping = true;
9c2a0c05 617 local->pure_const_state = IPA_NEITHER;
9c2a0c05 618 }
f7d118a9 619
620 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
621 switch (DECL_FUNCTION_CODE (callee_t))
622 {
623 case BUILT_IN_LONGJMP:
624 case BUILT_IN_NONLOCAL_GOTO:
b5cebd44 625 if (dump_file)
626 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
f7d118a9 627 local->pure_const_state = IPA_NEITHER;
b5cebd44 628 local->looping = true;
f7d118a9 629 break;
630 default:
631 break;
632 }
633 }
04c849b3 634 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
635 local->can_free = true;
f7d118a9 636
b5cebd44 637 /* When not in IPA mode, we can still handle self recursion. */
e9de52cc 638 if (!ipa && callee_t
639 && recursive_call_p (current_function_decl, callee_t))
2c06958d 640 {
641 if (dump_file)
642 fprintf (dump_file, " Recursive call can loop.\n");
643 local->looping = true;
644 }
0a10fd82 645 /* Either callee is unknown or we are doing local analysis.
ef378c27 646 Look to see if there are any bits available for the callee (such as by
647 declaration or because it is builtin) and process solely on the basis of
621733d8 648 those bits. Handle internal calls always, those calls don't have
649 corresponding cgraph edges and thus aren't processed during
650 the propagation. */
651 else if (!ipa || gimple_call_internal_p (call))
f7d118a9 652 {
023a28e1 653 enum pure_const_state_e call_state;
654 bool call_looping;
cbeb677e 655 if (possibly_throws && cfun->can_throw_non_call_exceptions)
b5cebd44 656 {
657 if (dump_file)
658 fprintf (dump_file, " can throw; looping\n");
659 local->looping = true;
660 }
661 if (possibly_throws_externally)
662 {
663 if (dump_file)
664 {
e38def9c 665 fprintf (dump_file, " can throw externally to lp %i\n",
666 lookup_stmt_eh_lp (call));
b5cebd44 667 if (callee_t)
668 fprintf (dump_file, " callee:%s\n",
669 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
670 }
671 local->can_throw = true;
672 }
023a28e1 673 if (dump_file && (dump_flags & TDF_DETAILS))
674 fprintf (dump_file, " checking flags for call:");
675 state_from_flags (&call_state, &call_looping, flags,
676 ((flags & (ECF_NORETURN | ECF_NOTHROW))
677 == (ECF_NORETURN | ECF_NOTHROW))
678 || (!flag_exceptions && (flags & ECF_NORETURN)));
679 worse_state (&local->pure_const_state, &local->looping,
8b4ee73c 680 call_state, call_looping, NULL, NULL);
f7d118a9 681 }
b5cebd44 682 /* Direct functions calls are handled by IPA propagation. */
f7d118a9 683}
684
023a28e1 685/* Wrapper around check_decl for loads in local more. */
5ed0b345 686
687static bool
42acab1c 688check_load (gimple *, tree op, tree, void *data)
5ed0b345 689{
690 if (DECL_P (op))
023a28e1 691 check_decl ((funct_state)data, op, false, false);
5ed0b345 692 else
693 check_op ((funct_state)data, op, false);
694 return false;
695}
696
023a28e1 697/* Wrapper around check_decl for stores in local more. */
5ed0b345 698
699static bool
42acab1c 700check_store (gimple *, tree op, tree, void *data)
5ed0b345 701{
702 if (DECL_P (op))
023a28e1 703 check_decl ((funct_state)data, op, true, false);
704 else
705 check_op ((funct_state)data, op, true);
706 return false;
707}
708
709/* Wrapper around check_decl for loads in ipa mode. */
710
711static bool
42acab1c 712check_ipa_load (gimple *, tree op, tree, void *data)
023a28e1 713{
714 if (DECL_P (op))
715 check_decl ((funct_state)data, op, false, true);
716 else
717 check_op ((funct_state)data, op, false);
718 return false;
719}
720
721/* Wrapper around check_decl for stores in ipa mode. */
722
723static bool
42acab1c 724check_ipa_store (gimple *, tree op, tree, void *data)
023a28e1 725{
726 if (DECL_P (op))
727 check_decl ((funct_state)data, op, true, true);
5ed0b345 728 else
729 check_op ((funct_state)data, op, true);
730 return false;
731}
732
dd277d48 733/* Look into pointer pointed to by GSIP and figure out what interesting side
734 effects it has. */
b5cebd44 735static void
736check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
f7d118a9 737{
42acab1c 738 gimple *stmt = gsi_stmt (*gsip);
f7d118a9 739
9845d120 740 if (is_gimple_debug (stmt))
741 return;
742
73594827 743 /* Do consider clobber as side effects before IPA, so we rather inline
744 C++ destructors and keep clobber semantics than eliminate them.
745
746 TODO: We may get smarter during early optimizations on these and let
747 functions containing only clobbers to be optimized more. This is a common
748 case of C++ destructors. */
749
750 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
751 return;
752
b5cebd44 753 if (dump_file)
f7d118a9 754 {
b5cebd44 755 fprintf (dump_file, " scanning: ");
1ffa4346 756 print_gimple_stmt (dump_file, stmt, 0);
b5cebd44 757 }
dd277d48 758
541c6dbb 759 if (gimple_has_volatile_ops (stmt)
760 && !gimple_clobber_p (stmt))
4b68df2e 761 {
762 local->pure_const_state = IPA_NEITHER;
763 if (dump_file)
764 fprintf (dump_file, " Volatile stmt is not const/pure\n");
765 }
766
5ed0b345 767 /* Look for loads and stores. */
023a28e1 768 walk_stmt_load_store_ops (stmt, local,
769 ipa ? check_ipa_load : check_load,
770 ipa ? check_ipa_store : check_store);
b5cebd44 771
772 if (gimple_code (stmt) != GIMPLE_CALL
aac19106 773 && stmt_could_throw_p (cfun, stmt))
b5cebd44 774 {
cbeb677e 775 if (cfun->can_throw_non_call_exceptions)
b5cebd44 776 {
777 if (dump_file)
10f4615f 778 fprintf (dump_file, " can throw; looping\n");
b5cebd44 779 local->looping = true;
780 }
aac19106 781 if (stmt_can_throw_external (cfun, stmt))
b5cebd44 782 {
783 if (dump_file)
10f4615f 784 fprintf (dump_file, " can throw externally\n");
b5cebd44 785 local->can_throw = true;
786 }
10f4615f 787 else
788 if (dump_file)
789 fprintf (dump_file, " can throw\n");
75a70cf9 790 }
75a70cf9 791 switch (gimple_code (stmt))
792 {
b5cebd44 793 case GIMPLE_CALL:
1a91d914 794 check_call (local, as_a <gcall *> (stmt), ipa);
f7d118a9 795 break;
75a70cf9 796 case GIMPLE_LABEL:
1a91d914 797 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
f7d118a9 798 /* Target of long jump. */
9c2a0c05 799 {
b5cebd44 800 if (dump_file)
10f4615f 801 fprintf (dump_file, " nonlocal label is not const/pure\n");
9c2a0c05 802 local->pure_const_state = IPA_NEITHER;
9c2a0c05 803 }
f7d118a9 804 break;
75a70cf9 805 case GIMPLE_ASM:
1a91d914 806 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
b5cebd44 807 {
97cf41ec 808 if (dump_file)
10f4615f 809 fprintf (dump_file, " memory asm clobber is not const/pure\n");
97cf41ec 810 /* Abandon all hope, ye who enter here. */
811 local->pure_const_state = IPA_NEITHER;
04c849b3 812 local->can_free = true;
b5cebd44 813 }
1a91d914 814 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
b5cebd44 815 {
816 if (dump_file)
10f4615f 817 fprintf (dump_file, " volatile is not const/pure\n");
b5cebd44 818 /* Abandon all hope, ye who enter here. */
819 local->pure_const_state = IPA_NEITHER;
04c849b3 820 local->looping = true;
821 local->can_free = true;
b5cebd44 822 }
823 return;
f7d118a9 824 default:
825 break;
826 }
f7d118a9 827}
828
bd5ef087 829/* Check that RETVAL is used only in STMT and in comparisons against 0.
830 RETVAL is return value of the function and STMT is return stmt. */
831
832static bool
833check_retval_uses (tree retval, gimple *stmt)
834{
835 imm_use_iterator use_iter;
836 gimple *use_stmt;
837
838 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
839 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
840 {
841 tree op2 = gimple_cond_rhs (cond);
842 if (!integer_zerop (op2))
843 RETURN_FROM_IMM_USE_STMT (use_iter, false);
844 }
845 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
846 {
847 enum tree_code code = gimple_assign_rhs_code (ga);
848 if (TREE_CODE_CLASS (code) != tcc_comparison)
849 RETURN_FROM_IMM_USE_STMT (use_iter, false);
850 if (!integer_zerop (gimple_assign_rhs2 (ga)))
851 RETURN_FROM_IMM_USE_STMT (use_iter, false);
852 }
853 else if (is_gimple_debug (use_stmt))
854 ;
855 else if (use_stmt != stmt)
856 RETURN_FROM_IMM_USE_STMT (use_iter, false);
857
858 return true;
859}
860
861/* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
862 attribute. Currently this function does a very conservative analysis.
863 FUN is considered to be a candidate if
864 1) It returns a value of pointer type.
865 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
866 a phi, and element of phi is either NULL or
867 SSA_NAME_DEF_STMT(element) is function call.
868 3) The return-value has immediate uses only within comparisons (gcond or gassign)
869 and return_stmt (and likewise a phi arg has immediate use only within comparison
870 or the phi stmt). */
871
bd5ef087 872#define DUMP_AND_RETURN(reason) \
873{ \
874 if (dump_file && (dump_flags & TDF_DETAILS)) \
d51fd3fc 875 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
876 (node->name()), (reason)); \
bd5ef087 877 return false; \
878}
879
18ea7971 880static bool
b5f91d03 881malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
882 bitmap visited)
18ea7971 883{
884 cgraph_node *node = cgraph_node::get_create (fun->decl);
b5f91d03 885 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
886 return true;
18ea7971 887
888 if (!check_retval_uses (retval, ret_stmt))
889 DUMP_AND_RETURN("Return value has uses outside return stmt"
890 " and comparisons against 0.")
891
892 gimple *def = SSA_NAME_DEF_STMT (retval);
893
894 if (gcall *call_stmt = dyn_cast<gcall *> (def))
895 {
896 tree callee_decl = gimple_call_fndecl (call_stmt);
897 if (!callee_decl)
898 return false;
899
900 if (!ipa && !DECL_IS_MALLOC (callee_decl))
901 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
902 " non-ipa mode.")
903
904 cgraph_edge *cs = node->get_edge (call_stmt);
905 if (cs)
906 {
907 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
908 es->is_return_callee_uncaptured = true;
909 }
910 }
911
912 else if (gphi *phi = dyn_cast<gphi *> (def))
913 {
914 bool all_args_zero = true;
915 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
916 {
917 tree arg = gimple_phi_arg_def (phi, i);
918 if (integer_zerop (arg))
919 continue;
920
921 all_args_zero = false;
922 if (TREE_CODE (arg) != SSA_NAME)
923 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
924 if (!check_retval_uses (arg, phi))
925 DUMP_AND_RETURN ("phi arg has uses outside phi"
926 " and comparisons against 0.")
927
928 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
929 if (is_a<gphi *> (arg_def))
930 {
b5f91d03 931 if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
18ea7971 932 DUMP_AND_RETURN ("nested phi fail")
933 continue;
934 }
935
936 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
937 if (!call_stmt)
938 DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
939
940 tree callee_decl = gimple_call_fndecl (call_stmt);
941 if (!callee_decl)
942 return false;
943 if (!ipa && !DECL_IS_MALLOC (callee_decl))
944 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
945 " for non-ipa mode.")
946
947 cgraph_edge *cs = node->get_edge (call_stmt);
948 if (cs)
949 {
950 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
951 es->is_return_callee_uncaptured = true;
952 }
953 }
954
955 if (all_args_zero)
956 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
957 }
958
959 else
960 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
961
962 return true;
963}
964
965static bool
966malloc_candidate_p (function *fun, bool ipa)
967{
968 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
969 edge e;
970 edge_iterator ei;
971 cgraph_node *node = cgraph_node::get_create (fun->decl);
972
d51fd3fc 973 if (EDGE_COUNT (exit_block->preds) == 0
974 || !flag_delete_null_pointer_checks)
bd5ef087 975 return false;
976
b5f91d03 977 auto_bitmap visited;
bd5ef087 978 FOR_EACH_EDGE (e, ei, exit_block->preds)
979 {
980 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
981 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
982
983 if (!ret_stmt)
984 return false;
985
986 tree retval = gimple_return_retval (ret_stmt);
987 if (!retval)
988 DUMP_AND_RETURN("No return value.")
989
990 if (TREE_CODE (retval) != SSA_NAME
991 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
992 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
993
b5f91d03 994 if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
18ea7971 995 return false;
bd5ef087 996 }
997
998 if (dump_file && (dump_flags & TDF_DETAILS))
999 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1000 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1001 return true;
bd5ef087 1002}
1003
18ea7971 1004#undef DUMP_AND_RETURN
cb886925 1005
f7d118a9 1006/* This is the main routine for finding the reference patterns for
1007 global variables within a function FN. */
1008
b5cebd44 1009static funct_state
1010analyze_function (struct cgraph_node *fn, bool ipa)
f7d118a9 1011{
02774f2d 1012 tree decl = fn->decl;
b5cebd44 1013 funct_state l;
1014 basic_block this_block;
f7d118a9 1015
b5cebd44 1016 l = XCNEW (struct funct_state_d);
1017 l->pure_const_state = IPA_CONST;
df9b545b 1018 l->state_previously_known = IPA_NEITHER;
1019 l->looping_previously_known = true;
b5cebd44 1020 l->looping = false;
1021 l->can_throw = false;
04c849b3 1022 l->can_free = false;
91bf9d9a 1023 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
02774f2d 1024 flags_from_decl_or_type (fn->decl),
415d1b9a 1025 fn->cannot_return_p ());
91bf9d9a 1026
02774f2d 1027 if (fn->thunk.thunk_p || fn->alias)
91bf9d9a 1028 {
1029 /* Thunk gets propagated through, so nothing interesting happens. */
1030 gcc_assert (ipa);
cfd85d03 1031 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
1032 l->pure_const_state = IPA_NEITHER;
91bf9d9a 1033 return l;
1034 }
f7d118a9 1035
1036 if (dump_file)
1037 {
48e1416a 1038 fprintf (dump_file, "\n\n local analysis of %s\n ",
f1c8b4d7 1039 fn->name ());
f7d118a9 1040 }
48e1416a 1041
b5cebd44 1042 push_cfun (DECL_STRUCT_FUNCTION (decl));
48e1416a 1043
fc00614f 1044 FOR_EACH_BB_FN (this_block, cfun)
f7d118a9 1045 {
b5cebd44 1046 gimple_stmt_iterator gsi;
1047 struct walk_stmt_info wi;
f7d118a9 1048
9af5ce0c 1049 memset (&wi, 0, sizeof (wi));
b5cebd44 1050 for (gsi = gsi_start_bb (this_block);
1051 !gsi_end_p (gsi);
1052 gsi_next (&gsi))
f7d118a9 1053 {
b5cebd44 1054 check_stmt (&gsi, l, ipa);
04c849b3 1055 if (l->pure_const_state == IPA_NEITHER
1056 && l->looping
1057 && l->can_throw
1058 && l->can_free)
b5cebd44 1059 goto end;
f7d118a9 1060 }
1061 }
dcccac3e 1062
1063end:
b5cebd44 1064 if (l->pure_const_state != IPA_NEITHER)
1065 {
1066 /* Const functions cannot have back edges (an
1067 indication of possible infinite loop side
1068 effect. */
1069 if (mark_dfs_back_edges ())
c9263b6a 1070 {
b1887855 1071 /* Preheaders are needed for SCEV to work.
0a10fd82 1072 Simple latches and recorded exits improve chances that loop will
8ff30f9a 1073 proved to be finite in testcases such as in loop-15.c
1074 and loop-24.c */
1075 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1076 | LOOPS_HAVE_SIMPLE_LATCHES
b1887855 1077 | LOOPS_HAVE_RECORDED_EXITS);
c9263b6a 1078 if (dump_file && (dump_flags & TDF_DETAILS))
1079 flow_loops_dump (dump_file, NULL, 0);
1080 if (mark_irreducible_loops ())
1081 {
1082 if (dump_file)
1083 fprintf (dump_file, " has irreducible loops\n");
1084 l->looping = true;
1085 }
48e1416a 1086 else
c9263b6a 1087 {
c9263b6a 1088 struct loop *loop;
1089 scev_initialize ();
f21d4d00 1090 FOR_EACH_LOOP (loop, 0)
c9263b6a 1091 if (!finite_loop_p (loop))
1092 {
1093 if (dump_file)
f4d3c071 1094 fprintf (dump_file, " cannot prove finiteness of "
8ff30f9a 1095 "loop %i\n", loop->num);
c9263b6a 1096 l->looping =true;
f21d4d00 1097 break;
c9263b6a 1098 }
1099 scev_finalize ();
1100 }
1101 loop_optimizer_finalize ();
1102 }
b5cebd44 1103 }
1104
023a28e1 1105 if (dump_file && (dump_flags & TDF_DETAILS))
1106 fprintf (dump_file, " checking previously known:");
023a28e1 1107
1108 better_state (&l->pure_const_state, &l->looping,
1109 l->state_previously_known,
1110 l->looping_previously_known);
b5cebd44 1111 if (TREE_NOTHROW (decl))
1112 l->can_throw = false;
1113
bd5ef087 1114 l->malloc_state = STATE_MALLOC_BOTTOM;
1115 if (DECL_IS_MALLOC (decl))
1116 l->malloc_state = STATE_MALLOC;
1117 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1118 l->malloc_state = STATE_MALLOC_TOP;
1119 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1120 l->malloc_state = STATE_MALLOC;
1121
b5cebd44 1122 pop_cfun ();
dcccac3e 1123 if (dump_file)
1124 {
b5cebd44 1125 if (l->looping)
1126 fprintf (dump_file, "Function is locally looping.\n");
1127 if (l->can_throw)
1128 fprintf (dump_file, "Function is locally throwing.\n");
1129 if (l->pure_const_state == IPA_CONST)
1130 fprintf (dump_file, "Function is locally const.\n");
1131 if (l->pure_const_state == IPA_PURE)
1132 fprintf (dump_file, "Function is locally pure.\n");
04c849b3 1133 if (l->can_free)
1134 fprintf (dump_file, "Function can locally free.\n");
bd5ef087 1135 if (l->malloc_state == STATE_MALLOC)
1136 fprintf (dump_file, "Function is locally malloc.\n");
dcccac3e 1137 }
b5cebd44 1138 return l;
f7d118a9 1139}
1140
16f72bd0 1141void
1142funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
50828ed8 1143{
50828ed8 1144 /* There are some shared nodes, in particular the initializers on
1145 static declarations. We do not need to scan them more than once
1146 since all we would be interested in are the addressof
1147 operations. */
8b4ee73c 1148 if (opt_for_fn (node->decl, flag_ipa_pure_const))
86844d6c 1149 {
16f72bd0 1150 funct_state_d *a = analyze_function (node, true);
1151 new (state) funct_state_d (*a);
1152 free (a);
86844d6c 1153 }
1154}
1155
1156/* Called when new clone is inserted to callgraph late. */
1157
16f72bd0 1158void
1159funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *,
1160 funct_state_d *src_data,
1161 funct_state_d *dst_data)
86844d6c 1162{
16f72bd0 1163 new (dst_data) funct_state_d (*src_data);
86844d6c 1164}
1165
f7d118a9 1166\f
415309e2 1167void
1168pass_ipa_pure_const::
7bfefa9d 1169register_hooks (void)
f7d118a9 1170{
7bfefa9d 1171 if (init_p)
1172 return;
1173
1174 init_p = true;
f7d118a9 1175
16f72bd0 1176 funct_state_summaries = new funct_state_summary_t (symtab);
7bfefa9d 1177}
1178
1179
1180/* Analyze each function in the cgraph to see if it is locally PURE or
1181 CONST. */
1182
48e1416a 1183static void
80880eb6 1184pure_const_generate_summary (void)
7bfefa9d 1185{
1186 struct cgraph_node *node;
1187
415309e2 1188 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1189 pass->register_hooks ();
7bfefa9d 1190
48e1416a 1191 /* Process all of the functions.
f7d118a9 1192
f4d3c071 1193 We process AVAIL_INTERPOSABLE functions. We cannot use the results
ef378c27 1194 by default, but the info can be used at LTO with -fwhole-program or
0a10fd82 1195 when function got cloned and the clone is AVAILABLE. */
ef378c27 1196
7c455d87 1197 FOR_EACH_DEFINED_FUNCTION (node)
8b4ee73c 1198 if (opt_for_fn (node->decl, flag_ipa_pure_const))
16f72bd0 1199 {
1200 funct_state_d *a = analyze_function (node, true);
1201 new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1202 free (a);
1203 }
cb886925 1204}
1205
7bfefa9d 1206
1207/* Serialize the ipa info for lto. */
1208
1209static void
eab36a5a 1210pure_const_write_summary (void)
7bfefa9d 1211{
1212 struct cgraph_node *node;
1213 struct lto_simple_output_block *ob
1214 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1215 unsigned int count = 0;
eab36a5a 1216 lto_symtab_encoder_iterator lsei;
1217 lto_symtab_encoder_t encoder;
7bfefa9d 1218
eab36a5a 1219 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1220
1221 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1222 lsei_next_function_in_partition (&lsei))
7bfefa9d 1223 {
eab36a5a 1224 node = lsei_cgraph_node (lsei);
16f72bd0 1225 if (node->definition && funct_state_summaries->exists (node))
7bfefa9d 1226 count++;
1227 }
48e1416a 1228
7f385784 1229 streamer_write_uhwi_stream (ob->main_stream, count);
48e1416a 1230
7bfefa9d 1231 /* Process all of the functions. */
eab36a5a 1232 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1233 lsei_next_function_in_partition (&lsei))
7bfefa9d 1234 {
eab36a5a 1235 node = lsei_cgraph_node (lsei);
16f72bd0 1236 funct_state_d *fs = funct_state_summaries->get (node);
1237 if (node->definition && fs != NULL)
7bfefa9d 1238 {
30baba90 1239 struct bitpack_d bp;
7bfefa9d 1240 int node_ref;
70225339 1241 lto_symtab_encoder_t encoder;
48e1416a 1242
70225339 1243 encoder = ob->decl_state->symtab_node_encoder;
02774f2d 1244 node_ref = lto_symtab_encoder_encode (encoder, node);
7f385784 1245 streamer_write_uhwi_stream (ob->main_stream, node_ref);
48e1416a 1246
7bfefa9d 1247 /* Note that flags will need to be read in the opposite
1248 order as we are pushing the bitflags into FLAGS. */
30baba90 1249 bp = bitpack_create (ob->main_stream);
1250 bp_pack_value (&bp, fs->pure_const_state, 2);
1251 bp_pack_value (&bp, fs->state_previously_known, 2);
1252 bp_pack_value (&bp, fs->looping_previously_known, 1);
1253 bp_pack_value (&bp, fs->looping, 1);
1254 bp_pack_value (&bp, fs->can_throw, 1);
04c849b3 1255 bp_pack_value (&bp, fs->can_free, 1);
bd5ef087 1256 bp_pack_value (&bp, fs->malloc_state, 2);
7f385784 1257 streamer_write_bitpack (&bp);
7bfefa9d 1258 }
1259 }
1260
1261 lto_destroy_simple_output_block (ob);
1262}
1263
1264
1265/* Deserialize the ipa info for lto. */
1266
48e1416a 1267static void
7bfefa9d 1268pure_const_read_summary (void)
1269{
1270 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1271 struct lto_file_decl_data *file_data;
1272 unsigned int j = 0;
1273
415309e2 1274 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1275 pass->register_hooks ();
1276
7bfefa9d 1277 while ((file_data = file_data_vec[j++]))
1278 {
1279 const char *data;
1280 size_t len;
1281 struct lto_input_block *ib
48e1416a 1282 = lto_create_simple_input_block (file_data,
1283 LTO_section_ipa_pure_const,
7bfefa9d 1284 &data, &len);
1285 if (ib)
1286 {
1287 unsigned int i;
7f385784 1288 unsigned int count = streamer_read_uhwi (ib);
7bfefa9d 1289
1290 for (i = 0; i < count; i++)
1291 {
1292 unsigned int index;
1293 struct cgraph_node *node;
30baba90 1294 struct bitpack_d bp;
7bfefa9d 1295 funct_state fs;
70225339 1296 lto_symtab_encoder_t encoder;
7bfefa9d 1297
7f385784 1298 index = streamer_read_uhwi (ib);
70225339 1299 encoder = file_data->symtab_node_encoder;
415d1b9a 1300 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1301 index));
7bfefa9d 1302
16f72bd0 1303 fs = funct_state_summaries->get_create (node);
7bfefa9d 1304 /* Note that the flags must be read in the opposite
1305 order in which they were written (the bitflags were
1306 pushed into FLAGS). */
7f385784 1307 bp = streamer_read_bitpack (ib);
7bfefa9d 1308 fs->pure_const_state
30baba90 1309 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
7bfefa9d 1310 fs->state_previously_known
30baba90 1311 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1312 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1313 fs->looping = bp_unpack_value (&bp, 1);
1314 fs->can_throw = bp_unpack_value (&bp, 1);
04c849b3 1315 fs->can_free = bp_unpack_value (&bp, 1);
bd5ef087 1316 fs->malloc_state
1317 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1318
fc94a528 1319 if (dump_file)
1320 {
02774f2d 1321 int flags = flags_from_decl_or_type (node->decl);
0e388735 1322 fprintf (dump_file, "Read info for %s ", node->dump_name ());
fc94a528 1323 if (flags & ECF_CONST)
1324 fprintf (dump_file, " const");
1325 if (flags & ECF_PURE)
1326 fprintf (dump_file, " pure");
1327 if (flags & ECF_NOTHROW)
1328 fprintf (dump_file, " nothrow");
1329 fprintf (dump_file, "\n pure const state: %s\n",
1330 pure_const_names[fs->pure_const_state]);
1331 fprintf (dump_file, " previously known state: %s\n",
8b4ee73c 1332 pure_const_names[fs->state_previously_known]);
fc94a528 1333 if (fs->looping)
1334 fprintf (dump_file," function is locally looping\n");
1335 if (fs->looping_previously_known)
1336 fprintf (dump_file," function is previously known looping\n");
1337 if (fs->can_throw)
1338 fprintf (dump_file," function is locally throwing\n");
04c849b3 1339 if (fs->can_free)
1340 fprintf (dump_file," function can locally free\n");
bd5ef087 1341 fprintf (dump_file, "\n malloc state: %s\n",
1342 malloc_state_names[fs->malloc_state]);
fc94a528 1343 }
7bfefa9d 1344 }
1345
48e1416a 1346 lto_destroy_simple_input_block (file_data,
1347 LTO_section_ipa_pure_const,
7bfefa9d 1348 ib, data, len);
1349 }
1350 }
1351}
1352
355c1f40 1353/* We only propagate across edges that can throw externally and their callee
1354 is not interposable. */
7bfefa9d 1355
17b28e52 1356static bool
355c1f40 1357ignore_edge_for_nothrow (struct cgraph_edge *e)
17b28e52 1358{
355c1f40 1359 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1360 return true;
1361
1362 enum availability avail;
8b4ee73c 1363 cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
1364 e->caller);
ace7bf06 1365 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
1366 return true;
1367 return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1368 && !e->callee->binds_to_current_def_p (e->caller);
17b28e52 1369}
1370
b2c2e188 1371/* Return true if NODE is self recursive function.
e9de52cc 1372 Indirectly recursive functions appears as non-trivial strongly
1373 connected components, so we need to care about self recursion
1374 only. */
a1e88032 1375
1376static bool
1377self_recursive_p (struct cgraph_node *node)
1378{
1379 struct cgraph_edge *e;
1380 for (e = node->callees; e; e = e->next_callee)
415d1b9a 1381 if (e->callee->function_symbol () == node)
a1e88032 1382 return true;
1383 return false;
1384}
1385
366970c6 1386/* Return true if N is cdtor that is not const or pure. In this case we may
1387 need to remove unreachable function if it is marked const/pure. */
1388
1389static bool
1390cdtor_p (cgraph_node *n, void *)
1391{
1392 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
98b8f2a5 1393 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1394 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
366970c6 1395 return false;
1396}
1397
d2c6c2b4 1398/* We only propagate across edges with non-interposable callee. */
1399
1400static bool
1401ignore_edge_for_pure_const (struct cgraph_edge *e)
1402{
1403 enum availability avail;
8b4ee73c 1404 e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
d2c6c2b4 1405 return (avail <= AVAIL_INTERPOSABLE);
1406}
1407
1408
c0240443 1409/* Produce transitive closure over the callgraph and compute pure/const
1410 attributes. */
023a28e1 1411
366970c6 1412static bool
c0240443 1413propagate_pure_const (void)
cb886925 1414{
1415 struct cgraph_node *node;
1416 struct cgraph_node *w;
1417 struct cgraph_node **order =
35ee1c66 1418 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
cb886925 1419 int order_pos;
1420 int i;
1421 struct ipa_dfs_info * w_info;
366970c6 1422 bool remove_p = false;
aaa36a78 1423 bool has_cdtor;
cb886925 1424
8cdb0206 1425 order_pos = ipa_reduced_postorder (order, true,
d2c6c2b4 1426 ignore_edge_for_pure_const);
f7d118a9 1427 if (dump_file)
1428 {
415d1b9a 1429 cgraph_node::dump_cgraph (dump_file);
48669653 1430 ipa_print_order (dump_file, "reduced", order, order_pos);
f7d118a9 1431 }
1432
9d75589a 1433 /* Propagate the local information through the call graph to produce
f7d118a9 1434 the global information. All the nodes within a cycle will have
1435 the same info so we collapse cycles first. Then we can do the
1436 propagation in one pass from the leaves to the roots. */
1437 for (i = 0; i < order_pos; i++ )
1438 {
1439 enum pure_const_state_e pure_const_state = IPA_CONST;
9c2a0c05 1440 bool looping = false;
54157bf1 1441 int count = 0;
f7d118a9 1442 node = order[i];
1443
02774f2d 1444 if (node->alias)
8c1fce46 1445 continue;
1446
fc94a528 1447 if (dump_file && (dump_flags & TDF_DETAILS))
1448 fprintf (dump_file, "Starting cycle\n");
1449
f7d118a9 1450 /* Find the worst state for any node in the cycle. */
1451 w = node;
023a28e1 1452 while (w && pure_const_state != IPA_NEITHER)
f7d118a9 1453 {
b5cebd44 1454 struct cgraph_edge *e;
023a28e1 1455 struct cgraph_edge *ie;
1456 int i;
51ce5652 1457 struct ipa_ref *ref = NULL;
023a28e1 1458
16f72bd0 1459 funct_state w_l = funct_state_summaries->get_create (w);
fc94a528 1460 if (dump_file && (dump_flags & TDF_DETAILS))
0e388735 1461 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1462 w->dump_name (),
fc94a528 1463 pure_const_names[w_l->pure_const_state],
1464 w_l->looping);
f7d118a9 1465
8b4ee73c 1466 /* First merge in function body properties.
1467 We are safe to pass NULL as FROM and TO because we will take care
1468 of possible interposition when walking callees. */
023a28e1 1469 worse_state (&pure_const_state, &looping,
8b4ee73c 1470 w_l->pure_const_state, w_l->looping,
1471 NULL, NULL);
023a28e1 1472 if (pure_const_state == IPA_NEITHER)
1473 break;
1474
b5cebd44 1475 count++;
1476
023a28e1 1477 /* We consider recursive cycles as possibly infinite.
1478 This might be relaxed since infinite recursion leads to stack
1479 overflow. */
b5cebd44 1480 if (count > 1)
1481 looping = true;
48e1416a 1482
023a28e1 1483 /* Now walk the edges and merge in callee properties. */
d2c6c2b4 1484 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1485 e = e->next_callee)
f7d118a9 1486 {
b2c2e188 1487 enum availability avail;
cfd85d03 1488 struct cgraph_node *y = e->callee->
8b4ee73c 1489 function_or_virtual_thunk_symbol (&avail,
1490 e->caller);
fc94a528 1491 enum pure_const_state_e edge_state = IPA_CONST;
1492 bool edge_looping = false;
54157bf1 1493
fc94a528 1494 if (dump_file && (dump_flags & TDF_DETAILS))
1495 {
0e388735 1496 fprintf (dump_file, " Call to %s",
1497 e->callee->dump_name ());
fc94a528 1498 }
415d1b9a 1499 if (avail > AVAIL_INTERPOSABLE)
f7d118a9 1500 {
bd3c34e9 1501 funct_state y_l = funct_state_summaries->get (y);
fc94a528 1502 if (dump_file && (dump_flags & TDF_DETAILS))
1503 {
1504 fprintf (dump_file,
1505 " state:%s looping:%i\n",
1506 pure_const_names[y_l->pure_const_state],
1507 y_l->looping);
1508 }
4c170941 1509 if (y_l->pure_const_state > IPA_PURE
35ee1c66 1510 && e->cannot_lead_to_return_p ())
fc94a528 1511 {
1512 if (dump_file && (dump_flags & TDF_DETAILS))
023a28e1 1513 fprintf (dump_file,
1514 " Ignoring side effects"
1515 " -> pure, looping\n");
fc94a528 1516 edge_state = IPA_PURE;
1517 edge_looping = true;
1518 }
1519 else
1520 {
1521 edge_state = y_l->pure_const_state;
1522 edge_looping = y_l->looping;
1523 }
f7d118a9 1524 }
a53e7471 1525 else if (special_builtin_state (&edge_state, &edge_looping,
02774f2d 1526 y->decl))
7dd42908 1527 ;
ef378c27 1528 else
023a28e1 1529 state_from_flags (&edge_state, &edge_looping,
02774f2d 1530 flags_from_decl_or_type (y->decl),
35ee1c66 1531 e->cannot_lead_to_return_p ());
023a28e1 1532
1533 /* Merge the results with what we already know. */
1534 better_state (&edge_state, &edge_looping,
1535 w_l->state_previously_known,
1536 w_l->looping_previously_known);
1537 worse_state (&pure_const_state, &looping,
8b4ee73c 1538 edge_state, edge_looping, e->caller, e->callee);
023a28e1 1539 if (pure_const_state == IPA_NEITHER)
1540 break;
1541 }
ef378c27 1542
023a28e1 1543 /* Now process the indirect call. */
d2c6c2b4 1544 for (ie = w->indirect_calls;
1545 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
023a28e1 1546 {
1547 enum pure_const_state_e edge_state = IPA_CONST;
1548 bool edge_looping = false;
4c170941 1549
023a28e1 1550 if (dump_file && (dump_flags & TDF_DETAILS))
1551 fprintf (dump_file, " Indirect call");
1552 state_from_flags (&edge_state, &edge_looping,
1553 ie->indirect_info->ecf_flags,
35ee1c66 1554 ie->cannot_lead_to_return_p ());
023a28e1 1555 /* Merge the results with what we already know. */
1556 better_state (&edge_state, &edge_looping,
1557 w_l->state_previously_known,
1558 w_l->looping_previously_known);
1559 worse_state (&pure_const_state, &looping,
8b4ee73c 1560 edge_state, edge_looping, NULL, NULL);
fc94a528 1561 if (pure_const_state == IPA_NEITHER)
1562 break;
f7d118a9 1563 }
023a28e1 1564
1565 /* And finally all loads and stores. */
d2c6c2b4 1566 for (i = 0; w->iterate_reference (i, ref)
1567 && pure_const_state != IPA_NEITHER; i++)
023a28e1 1568 {
1569 enum pure_const_state_e ref_state = IPA_CONST;
1570 bool ref_looping = false;
1571 switch (ref->use)
1572 {
1573 case IPA_REF_LOAD:
1574 /* readonly reads are safe. */
51ce5652 1575 if (TREE_READONLY (ref->referred->decl))
023a28e1 1576 break;
1577 if (dump_file && (dump_flags & TDF_DETAILS))
1578 fprintf (dump_file, " nonreadonly global var read\n");
1579 ref_state = IPA_PURE;
1580 break;
1581 case IPA_REF_STORE:
51ce5652 1582 if (ref->cannot_lead_to_return ())
023a28e1 1583 break;
1584 ref_state = IPA_NEITHER;
1585 if (dump_file && (dump_flags & TDF_DETAILS))
1586 fprintf (dump_file, " global var write\n");
1587 break;
1588 case IPA_REF_ADDR:
1589 break;
5cb7c8cf 1590 default:
1591 gcc_unreachable ();
023a28e1 1592 }
1593 better_state (&ref_state, &ref_looping,
1594 w_l->state_previously_known,
1595 w_l->looping_previously_known);
1596 worse_state (&pure_const_state, &looping,
8b4ee73c 1597 ref_state, ref_looping, NULL, NULL);
023a28e1 1598 if (pure_const_state == IPA_NEITHER)
1599 break;
1600 }
02774f2d 1601 w_info = (struct ipa_dfs_info *) w->aux;
f7d118a9 1602 w = w_info->next_cycle;
1603 }
fc94a528 1604 if (dump_file && (dump_flags & TDF_DETAILS))
1605 fprintf (dump_file, "Result %s looping %i\n",
1606 pure_const_names [pure_const_state],
1607 looping);
f7d118a9 1608
04c849b3 1609 /* Find the worst state of can_free for any node in the cycle. */
1610 bool can_free = false;
1611 w = node;
1612 while (w && !can_free)
1613 {
1614 struct cgraph_edge *e;
bd3c34e9 1615 funct_state w_l = funct_state_summaries->get (w);
04c849b3 1616
1617 if (w_l->can_free
1618 || w->get_availability () == AVAIL_INTERPOSABLE
1619 || w->indirect_calls)
1620 can_free = true;
1621
1622 for (e = w->callees; e && !can_free; e = e->next_callee)
1623 {
1624 enum availability avail;
cfd85d03 1625 struct cgraph_node *y = e->callee->
8b4ee73c 1626 function_or_virtual_thunk_symbol (&avail,
1627 e->caller);
04c849b3 1628
1629 if (avail > AVAIL_INTERPOSABLE)
bd3c34e9 1630 can_free = funct_state_summaries->get (y)->can_free;
04c849b3 1631 else
1632 can_free = true;
1633 }
1634 w_info = (struct ipa_dfs_info *) w->aux;
1635 w = w_info->next_cycle;
1636 }
1637
f7d118a9 1638 /* Copy back the region's pure_const_state which is shared by
1639 all nodes in the region. */
1640 w = node;
1641 while (w)
1642 {
bd3c34e9 1643 funct_state w_l = funct_state_summaries->get (w);
b5cebd44 1644 enum pure_const_state_e this_state = pure_const_state;
1645 bool this_looping = looping;
f7d118a9 1646
04c849b3 1647 w_l->can_free = can_free;
1648 w->nonfreeing_fn = !can_free;
1649 if (!can_free && dump_file)
1650 fprintf (dump_file, "Function found not to call free: %s\n",
1651 w->name ());
1652
df9b545b 1653 if (w_l->state_previously_known != IPA_NEITHER
1654 && this_state > w_l->state_previously_known)
4c170941 1655 {
1656 this_state = w_l->state_previously_known;
d2c6c2b4 1657 if (this_state == IPA_NEITHER)
1658 this_looping = w_l->looping_previously_known;
4c170941 1659 }
a1e88032 1660 if (!this_looping && self_recursive_p (w))
1661 this_looping = true;
df9b545b 1662 if (!w_l->looping_previously_known)
1663 this_looping = false;
cb886925 1664
b5cebd44 1665 /* All nodes within a cycle share the same info. */
1666 w_l->pure_const_state = this_state;
1667 w_l->looping = this_looping;
1668
ce2d198d 1669 /* Inline clones share declaration with their offline copies;
1670 do not modify their declarations since the offline copy may
1671 be different. */
1672 if (!w->global.inlined_to)
1673 switch (this_state)
1674 {
1675 case IPA_CONST:
1676 if (!TREE_READONLY (w->decl))
1677 {
1678 warn_function_const (w->decl, !this_looping);
1679 if (dump_file)
1680 fprintf (dump_file, "Function found to be %sconst: %s\n",
1681 this_looping ? "looping " : "",
1682 w->name ());
1683 }
aaa36a78 1684 /* Turning constructor or destructor to non-looping const/pure
1685 enables us to possibly remove the function completely. */
1686 if (this_looping)
1687 has_cdtor = false;
1688 else
1689 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1690 NULL, true);
1691 if (w->set_const_flag (true, this_looping))
1692 {
1693 if (dump_file)
1694 fprintf (dump_file,
1695 "Declaration updated to be %sconst: %s\n",
1696 this_looping ? "looping " : "",
1697 w->name ());
1698 remove_p |= has_cdtor;
1699 }
ce2d198d 1700 break;
48e1416a 1701
ce2d198d 1702 case IPA_PURE:
1703 if (!DECL_PURE_P (w->decl))
1704 {
1705 warn_function_pure (w->decl, !this_looping);
1706 if (dump_file)
1707 fprintf (dump_file, "Function found to be %spure: %s\n",
1708 this_looping ? "looping " : "",
1709 w->name ());
1710 }
aaa36a78 1711 if (this_looping)
1712 has_cdtor = false;
1713 else
1714 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1715 NULL, true);
1716 if (w->set_pure_flag (true, this_looping))
1717 {
1718 if (dump_file)
1719 fprintf (dump_file,
1720 "Declaration updated to be %spure: %s\n",
1721 this_looping ? "looping " : "",
1722 w->name ());
1723 remove_p |= has_cdtor;
1724 }
ce2d198d 1725 break;
48e1416a 1726
ce2d198d 1727 default:
1728 break;
1729 }
02774f2d 1730 w_info = (struct ipa_dfs_info *) w->aux;
17b28e52 1731 w = w_info->next_cycle;
1732 }
1733 }
1734
7771d558 1735 ipa_free_postorder_info ();
c0240443 1736 free (order);
366970c6 1737 return remove_p;
c0240443 1738}
1739
1740/* Produce transitive closure over the callgraph and compute nothrow
1741 attributes. */
1742
1743static void
1744propagate_nothrow (void)
1745{
1746 struct cgraph_node *node;
1747 struct cgraph_node *w;
1748 struct cgraph_node **order =
35ee1c66 1749 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
c0240443 1750 int order_pos;
1751 int i;
1752 struct ipa_dfs_info * w_info;
1753
8cdb0206 1754 order_pos = ipa_reduced_postorder (order, true,
355c1f40 1755 ignore_edge_for_nothrow);
17b28e52 1756 if (dump_file)
1757 {
415d1b9a 1758 cgraph_node::dump_cgraph (dump_file);
7771d558 1759 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
17b28e52 1760 }
c0240443 1761
9d75589a 1762 /* Propagate the local information through the call graph to produce
17b28e52 1763 the global information. All the nodes within a cycle will have
1764 the same info so we collapse cycles first. Then we can do the
1765 propagation in one pass from the leaves to the roots. */
1766 for (i = 0; i < order_pos; i++ )
1767 {
1768 bool can_throw = false;
1769 node = order[i];
1770
02774f2d 1771 if (node->alias)
8c1fce46 1772 continue;
1773
17b28e52 1774 /* Find the worst state for any node in the cycle. */
1775 w = node;
b2c90c54 1776 while (w && !can_throw)
17b28e52 1777 {
023a28e1 1778 struct cgraph_edge *e, *ie;
17b28e52 1779
355c1f40 1780 if (!TREE_NOTHROW (w->decl))
17b28e52 1781 {
16f72bd0 1782 funct_state w_l = funct_state_summaries->get_create (w);
17b28e52 1783
355c1f40 1784 if (w_l->can_throw
1785 || w->get_availability () == AVAIL_INTERPOSABLE)
1786 can_throw = true;
1787
1788 for (e = w->callees; e && !can_throw; e = e->next_callee)
17b28e52 1789 {
355c1f40 1790 enum availability avail;
1791
1792 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1793 continue;
1794
1795 struct cgraph_node *y = e->callee->
ace7bf06 1796 function_or_virtual_thunk_symbol (&avail,
1797 e->caller);
17b28e52 1798
07c11f2b 1799 /* We can use info about the callee only if we know it
1800 cannot be interposed.
ace7bf06 1801 When callee is compiled with non-call exceptions we also
1802 must check that the declaration is bound to current
1803 body as other semantically equivalent body may still
1804 throw. */
355c1f40 1805 if (avail <= AVAIL_INTERPOSABLE
1806 || (!TREE_NOTHROW (y->decl)
16f72bd0 1807 && (funct_state_summaries->get_create (y)->can_throw
ace7bf06 1808 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1809 && !e->callee->binds_to_current_def_p (w)))))
17b28e52 1810 can_throw = true;
1811 }
355c1f40 1812 for (ie = w->indirect_calls; ie && !can_throw;
1813 ie = ie->next_callee)
1814 if (ie->can_throw_external
1815 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1816 can_throw = true;
17b28e52 1817 }
02774f2d 1818 w_info = (struct ipa_dfs_info *) w->aux;
17b28e52 1819 w = w_info->next_cycle;
1820 }
1821
1822 /* Copy back the region's pure_const_state which is shared by
1823 all nodes in the region. */
1824 w = node;
1825 while (w)
1826 {
eb57efa5 1827 funct_state w_l = funct_state_summaries->get_create (w);
02774f2d 1828 if (!can_throw && !TREE_NOTHROW (w->decl))
b5cebd44 1829 {
ce2d198d 1830 /* Inline clones share declaration with their offline copies;
1831 do not modify their declarations since the offline copy may
1832 be different. */
1833 if (!w->global.inlined_to)
1834 {
415d1b9a 1835 w->set_nothrow_flag (true);
ce2d198d 1836 if (dump_file)
1837 fprintf (dump_file, "Function found to be nothrow: %s\n",
1838 w->name ());
1839 }
f7d118a9 1840 }
02774f2d 1841 else if (can_throw && !TREE_NOTHROW (w->decl))
ecfab407 1842 w_l->can_throw = true;
02774f2d 1843 w_info = (struct ipa_dfs_info *) w->aux;
f7d118a9 1844 w = w_info->next_cycle;
1845 }
1846 }
1847
7771d558 1848 ipa_free_postorder_info ();
f7d118a9 1849 free (order);
c0240443 1850}
1851
bd5ef087 1852/* Debugging function to dump state of malloc lattice. */
1853
1854DEBUG_FUNCTION
1855static void
1856dump_malloc_lattice (FILE *dump_file, const char *s)
1857{
1858 if (!dump_file)
1859 return;
1860
1861 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1862 cgraph_node *node;
1863 FOR_EACH_FUNCTION (node)
1864 {
2c085ec2 1865 funct_state fs = funct_state_summaries->get (node);
1866 if (fs)
1867 fprintf (dump_file, "%s: %s\n", node->name (),
1868 malloc_state_names[fs->malloc_state]);
bd5ef087 1869 }
1870}
1871
1872/* Propagate malloc attribute across the callgraph. */
1873
1874static void
1875propagate_malloc (void)
1876{
1877 cgraph_node *node;
1878 FOR_EACH_FUNCTION (node)
1879 {
1880 if (DECL_IS_MALLOC (node->decl))
16f72bd0 1881 if (!funct_state_summaries->exists (node))
bd5ef087 1882 {
16f72bd0 1883 funct_state fs = funct_state_summaries->get_create (node);
1884 fs->malloc_state = STATE_MALLOC;
bd5ef087 1885 }
1886 }
1887
1888 dump_malloc_lattice (dump_file, "Initial");
1889 struct cgraph_node **order
1890 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1891 int order_pos = ipa_reverse_postorder (order);
1892 bool changed = true;
1893
1894 while (changed)
1895 {
1896 changed = false;
1897 /* Walk in postorder. */
1898 for (int i = order_pos - 1; i >= 0; --i)
1899 {
1900 cgraph_node *node = order[i];
1901 if (node->alias
1902 || !node->definition
16f72bd0 1903 || !funct_state_summaries->exists (node))
bd5ef087 1904 continue;
1905
2c085ec2 1906 funct_state l = funct_state_summaries->get (node);
bd5ef087 1907
1908 /* FIXME: add support for indirect-calls. */
1909 if (node->indirect_calls)
1910 {
1911 l->malloc_state = STATE_MALLOC_BOTTOM;
1912 continue;
1913 }
1914
1915 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1916 {
1917 l->malloc_state = STATE_MALLOC_BOTTOM;
1918 continue;
1919 }
1920
1921 if (l->malloc_state == STATE_MALLOC_BOTTOM)
1922 continue;
1923
1924 vec<cgraph_node *> callees = vNULL;
1925 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1926 {
b53d4f56 1927 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
bd5ef087 1928 if (es && es->is_return_callee_uncaptured)
1929 callees.safe_push (cs->callee);
1930 }
1931
1932 malloc_state_e new_state = l->malloc_state;
1933 for (unsigned j = 0; j < callees.length (); j++)
1934 {
1935 cgraph_node *callee = callees[j];
16f72bd0 1936 if (!funct_state_summaries->exists (node))
bd5ef087 1937 {
1938 new_state = STATE_MALLOC_BOTTOM;
1939 break;
1940 }
16f72bd0 1941 malloc_state_e callee_state
1942 = funct_state_summaries->get_create (callee)->malloc_state;
bd5ef087 1943 if (new_state < callee_state)
1944 new_state = callee_state;
1945 }
1946 if (new_state != l->malloc_state)
1947 {
1948 changed = true;
1949 l->malloc_state = new_state;
1950 }
1951 }
1952 }
1953
1954 FOR_EACH_DEFINED_FUNCTION (node)
16f72bd0 1955 if (funct_state_summaries->exists (node))
bd5ef087 1956 {
2c085ec2 1957 funct_state l = funct_state_summaries->get (node);
bd5ef087 1958 if (!node->alias
1959 && l->malloc_state == STATE_MALLOC
1960 && !node->global.inlined_to)
1961 {
1962 if (dump_file && (dump_flags & TDF_DETAILS))
1963 fprintf (dump_file, "Function %s found to be malloc\n",
1964 node->name ());
1965
1966 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1967 node->set_malloc_flag (true);
1968 if (!malloc_decl_p && warn_suggest_attribute_malloc)
1969 warn_function_malloc (node->decl);
1970 }
1971 }
1972
1973 dump_malloc_lattice (dump_file, "after propagation");
1974 ipa_free_postorder_info ();
1975 free (order);
1976}
c0240443 1977
1978/* Produce the global information by preforming a transitive closure
1979 on the local information that was produced by generate_summary. */
1980
415309e2 1981unsigned int
1982pass_ipa_pure_const::
1983execute (function *)
c0240443 1984{
366970c6 1985 bool remove_p;
c0240443 1986
c0240443 1987 /* Nothrow makes more function to not lead to return and improve
1988 later analysis. */
1989 propagate_nothrow ();
bd5ef087 1990 propagate_malloc ();
366970c6 1991 remove_p = propagate_pure_const ();
c0240443 1992
16f72bd0 1993 delete funct_state_summaries;
366970c6 1994 return remove_p ? TODO_remove_functions : 0;
f7d118a9 1995}
1996
1997static bool
1998gate_pure_const (void)
1999{
d1f68cd8 2000 return flag_ipa_pure_const || in_lto_p;
f7d118a9 2001}
2002
415309e2 2003pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2004 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2005 pure_const_generate_summary, /* generate_summary */
2006 pure_const_write_summary, /* write_summary */
2007 pure_const_read_summary, /* read_summary */
2008 NULL, /* write_optimization_summary */
2009 NULL, /* read_optimization_summary */
2010 NULL, /* stmt_fixup */
2011 0, /* function_transform_todo_flags_start */
2012 NULL, /* function_transform */
2013 NULL), /* variable_transform */
16f72bd0 2014 init_p (false) {}
cbe8bda8 2015
2016ipa_opt_pass_d *
2017make_pass_ipa_pure_const (gcc::context *ctxt)
2018{
2019 return new pass_ipa_pure_const (ctxt);
2020}
2021
2c06958d 2022/* Return true if function should be skipped for local pure const analysis. */
b5cebd44 2023
2c06958d 2024static bool
2025skip_function_for_local_pure_const (struct cgraph_node *node)
b5cebd44 2026{
8b4ee73c 2027 /* Because we do not schedule pass_fixup_cfg over whole program after early
2028 optimizations we must not promote functions that are called by already
2029 processed functions. */
b5cebd44 2030
2031 if (function_called_by_processed_nodes_p ())
2032 {
2033 if (dump_file)
2034 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2c06958d 2035 return true;
b5cebd44 2036 }
aaa36a78 2037 /* Save some work and do not analyze functions which are interposable and
2038 do not have any non-interposable aliases. */
2039 if (node->get_availability () <= AVAIL_INTERPOSABLE
2040 && !node->has_aliases_p ())
b5cebd44 2041 {
2042 if (dump_file)
8b4ee73c 2043 fprintf (dump_file,
aaa36a78 2044 "Function is interposable; not analyzing.\n");
2c06958d 2045 return true;
b5cebd44 2046 }
2c06958d 2047 return false;
2048}
2049
2050/* Simple local pass for pure const discovery reusing the analysis from
2051 ipa_pure_const. This pass is effective when executed together with
2052 other optimization passes in early optimization pass queue. */
b5cebd44 2053
7620bc82 2054namespace {
2055
2056const pass_data pass_data_local_pure_const =
65b0537f 2057{
2058 GIMPLE_PASS, /* type */
2059 "local-pure-const", /* name */
2060 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 2061 TV_IPA_PURE_CONST, /* tv_id */
2062 0, /* properties_required */
2063 0, /* properties_provided */
2064 0, /* properties_destroyed */
2065 0, /* todo_flags_start */
2066 0, /* todo_flags_finish */
2067};
2068
7620bc82 2069class pass_local_pure_const : public gimple_opt_pass
65b0537f 2070{
2071public:
2072 pass_local_pure_const (gcc::context *ctxt)
2073 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2074 {}
2075
2076 /* opt_pass methods: */
2077 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
2078 virtual bool gate (function *) { return gate_pure_const (); }
2079 virtual unsigned int execute (function *);
2080
2081}; // class pass_local_pure_const
2082
2083unsigned int
2084pass_local_pure_const::execute (function *fun)
2c06958d 2085{
2086 bool changed = false;
2087 funct_state l;
2088 bool skip;
2089 struct cgraph_node *node;
2090
415d1b9a 2091 node = cgraph_node::get (current_function_decl);
2c06958d 2092 skip = skip_function_for_local_pure_const (node);
60722a03 2093
2c06958d 2094 if (!warn_suggest_attribute_const
2095 && !warn_suggest_attribute_pure
2096 && skip)
2097 return 0;
e0933141 2098
ab3db708 2099 l = analyze_function (node, false);
2100
2101 /* Do NORETURN discovery. */
e0933141 2102 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
65b0537f 2103 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
e0933141 2104 {
65b0537f 2105 warn_function_noreturn (fun->decl);
e0933141 2106 if (dump_file)
65b0537f 2107 fprintf (dump_file, "Function found to be noreturn: %s\n",
2108 current_function_name ());
e0933141 2109
2110 /* Update declaration and reduce profile to executed once. */
2111 TREE_THIS_VOLATILE (current_function_decl) = 1;
2112 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
65b0537f 2113 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
e0933141 2114
2115 changed = true;
2116 }
ef378c27 2117
b5cebd44 2118 switch (l->pure_const_state)
2119 {
2120 case IPA_CONST:
2121 if (!TREE_READONLY (current_function_decl))
2122 {
2c06958d 2123 warn_function_const (current_function_decl, !l->looping);
b5cebd44 2124 if (dump_file)
2125 fprintf (dump_file, "Function found to be %sconst: %s\n",
2126 l->looping ? "looping " : "",
9631926a 2127 current_function_name ());
b5cebd44 2128 }
2129 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2130 && !l->looping)
2131 {
b5cebd44 2132 if (dump_file)
2133 fprintf (dump_file, "Function found to be non-looping: %s\n",
9631926a 2134 current_function_name ());
b5cebd44 2135 }
aaa36a78 2136 if (!skip && node->set_const_flag (true, l->looping))
2137 {
2138 if (dump_file)
2139 fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2140 l->looping ? "looping " : "",
2141 current_function_name ());
2142 changed = true;
2143 }
b5cebd44 2144 break;
2145
2146 case IPA_PURE:
2c06958d 2147 if (!DECL_PURE_P (current_function_decl))
b5cebd44 2148 {
2c06958d 2149 warn_function_pure (current_function_decl, !l->looping);
b5cebd44 2150 if (dump_file)
2151 fprintf (dump_file, "Function found to be %spure: %s\n",
2152 l->looping ? "looping " : "",
9631926a 2153 current_function_name ());
b5cebd44 2154 }
2155 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2156 && !l->looping)
2157 {
b5cebd44 2158 if (dump_file)
2159 fprintf (dump_file, "Function found to be non-looping: %s\n",
9631926a 2160 current_function_name ());
b5cebd44 2161 }
aaa36a78 2162 if (!skip && node->set_pure_flag (true, l->looping))
2163 {
2164 if (dump_file)
2165 fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2166 l->looping ? "looping " : "",
2167 current_function_name ());
2168 changed = true;
2169 }
b5cebd44 2170 break;
2171
2172 default:
2173 break;
2174 }
2175 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2176 {
415d1b9a 2177 node->set_nothrow_flag (true);
b5cebd44 2178 changed = true;
2179 if (dump_file)
2180 fprintf (dump_file, "Function found to be nothrow: %s\n",
9631926a 2181 current_function_name ());
b5cebd44 2182 }
bd5ef087 2183
2184 if (l->malloc_state == STATE_MALLOC
2185 && !DECL_IS_MALLOC (current_function_decl))
2186 {
2187 node->set_malloc_flag (true);
2188 if (warn_suggest_attribute_malloc)
2189 warn_function_malloc (node->decl);
2190 changed = true;
2191 if (dump_file)
2192 fprintf (dump_file, "Function found to be malloc: %s\n",
2193 node->name ());
2194 }
2195
dd045aee 2196 free (l);
b5cebd44 2197 if (changed)
2198 return execute_fixup_cfg ();
2199 else
2200 return 0;
2201}
2202
7620bc82 2203} // anon namespace
2204
cbe8bda8 2205gimple_opt_pass *
2206make_pass_local_pure_const (gcc::context *ctxt)
2207{
2208 return new pass_local_pure_const (ctxt);
2209}
64641360 2210
2211/* Emit noreturn warnings. */
2212
7620bc82 2213namespace {
2214
2215const pass_data pass_data_warn_function_noreturn =
64641360 2216{
2217 GIMPLE_PASS, /* type */
2218 "*warn_function_noreturn", /* name */
2219 OPTGROUP_NONE, /* optinfo_flags */
64641360 2220 TV_NONE, /* tv_id */
2221 PROP_cfg, /* properties_required */
2222 0, /* properties_provided */
2223 0, /* properties_destroyed */
2224 0, /* todo_flags_start */
2225 0, /* todo_flags_finish */
2226};
2227
7620bc82 2228class pass_warn_function_noreturn : public gimple_opt_pass
64641360 2229{
2230public:
2231 pass_warn_function_noreturn (gcc::context *ctxt)
2232 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2233 {}
2234
2235 /* opt_pass methods: */
31315c24 2236 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
65b0537f 2237 virtual unsigned int execute (function *fun)
2238 {
2239 if (!TREE_THIS_VOLATILE (current_function_decl)
2240 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2241 warn_function_noreturn (current_function_decl);
2242 return 0;
2243 }
64641360 2244
2245}; // class pass_warn_function_noreturn
2246
7620bc82 2247} // anon namespace
2248
64641360 2249gimple_opt_pass *
2250make_pass_warn_function_noreturn (gcc::context *ctxt)
2251{
2252 return new pass_warn_function_noreturn (ctxt);
2253}
73594827 2254
2255/* Simple local pass for pure const discovery reusing the analysis from
2256 ipa_pure_const. This pass is effective when executed together with
2257 other optimization passes in early optimization pass queue. */
2258
7620bc82 2259namespace {
2260
2261const pass_data pass_data_nothrow =
73594827 2262{
2263 GIMPLE_PASS, /* type */
2264 "nothrow", /* name */
2265 OPTGROUP_NONE, /* optinfo_flags */
2266 TV_IPA_PURE_CONST, /* tv_id */
2267 0, /* properties_required */
2268 0, /* properties_provided */
2269 0, /* properties_destroyed */
2270 0, /* todo_flags_start */
2271 0, /* todo_flags_finish */
2272};
2273
7620bc82 2274class pass_nothrow : public gimple_opt_pass
73594827 2275{
2276public:
2277 pass_nothrow (gcc::context *ctxt)
2278 : gimple_opt_pass (pass_data_nothrow, ctxt)
2279 {}
2280
2281 /* opt_pass methods: */
2282 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
2283 virtual bool gate (function *) { return optimize; }
2284 virtual unsigned int execute (function *);
2285
2286}; // class pass_nothrow
2287
2288unsigned int
2289pass_nothrow::execute (function *)
2290{
2291 struct cgraph_node *node;
2292 basic_block this_block;
2293
2294 if (TREE_NOTHROW (current_function_decl))
2295 return 0;
2296
2297 node = cgraph_node::get (current_function_decl);
2298
f4d3c071 2299 /* We run during lowering, we cannot really use availability yet. */
73594827 2300 if (cgraph_node::get (current_function_decl)->get_availability ()
2301 <= AVAIL_INTERPOSABLE)
2302 {
2303 if (dump_file)
2304 fprintf (dump_file, "Function is interposable;"
2305 " not analyzing.\n");
2306 return true;
2307 }
2308
2309 FOR_EACH_BB_FN (this_block, cfun)
2310 {
2311 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2312 !gsi_end_p (gsi);
2313 gsi_next (&gsi))
aac19106 2314 if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
73594827 2315 {
2316 if (is_gimple_call (gsi_stmt (gsi)))
2317 {
2318 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2319 if (callee_t && recursive_call_p (current_function_decl,
2320 callee_t))
2321 continue;
2322 }
2323
2324 if (dump_file)
2325 {
2326 fprintf (dump_file, "Statement can throw: ");
1ffa4346 2327 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
73594827 2328 }
2329 return 0;
2330 }
2331 }
2332
2333 node->set_nothrow_flag (true);
fd499010 2334
2335 bool cfg_changed = false;
2336 if (self_recursive_p (node))
2337 FOR_EACH_BB_FN (this_block, cfun)
2338 if (gimple *g = last_stmt (this_block))
2339 if (is_gimple_call (g))
2340 {
2341 tree callee_t = gimple_call_fndecl (g);
2342 if (callee_t
2343 && recursive_call_p (current_function_decl, callee_t)
2344 && maybe_clean_eh_stmt (g)
2345 && gimple_purge_dead_eh_edges (this_block))
2346 cfg_changed = true;
2347 }
2348
73594827 2349 if (dump_file)
2350 fprintf (dump_file, "Function found to be nothrow: %s\n",
2351 current_function_name ());
fd499010 2352 return cfg_changed ? TODO_cleanup_cfg : 0;
73594827 2353}
2354
7620bc82 2355} // anon namespace
2356
73594827 2357gimple_opt_pass *
2358make_pass_nothrow (gcc::context *ctxt)
2359{
2360 return new pass_nothrow (ctxt);
2361}