]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-pure-const.c
PR other/16615 [3/5]
[thirdparty/gcc.git] / gcc / ipa-pure-const.c
CommitLineData
ea900239 1/* Callgraph based analysis of static variables.
a5544970 2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
ea900239
DB
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9dcd6f09 9Software Foundation; either version 3, or (at your option) any later
ea900239
DB
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
ea900239 20
fa10beec
RW
21/* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
ea900239
DB
24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30/* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34#include "config.h"
35#include "system.h"
36#include "coretypes.h"
c7131fb2 37#include "backend.h"
957060b5 38#include "target.h"
ea900239 39#include "tree.h"
c7131fb2 40#include "gimple.h"
957060b5
AM
41#include "tree-pass.h"
42#include "tree-streamer.h"
43#include "cgraph.h"
44#include "diagnostic.h"
d8a2d370 45#include "calls.h"
60393bbc 46#include "cfganal.h"
2fb9a547 47#include "tree-eh.h"
5be5c238
AM
48#include "gimple-iterator.h"
49#include "gimple-walk.h"
442b4905 50#include "tree-cfg.h"
e28030cf 51#include "tree-ssa-loop-niter.h"
ea900239 52#include "langhooks.h"
ea900239 53#include "ipa-utils.h"
cf835838 54#include "gimple-pretty-print.h"
2de58650
JH
55#include "cfgloop.h"
56#include "tree-scalar-evolution.h"
5dc16b19
MLI
57#include "intl.h"
58#include "opts.h"
0fab169b
PK
59#include "ssa.h"
60#include "alloc-pool.h"
61#include "symbol-summary.h"
62#include "ipa-prop.h"
63#include "ipa-fnsummary.h"
ea900239
DB
64
65/* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
67 what is found. */
68enum pure_const_state_e
69{
70 IPA_CONST,
71 IPA_PURE,
72 IPA_NEITHER
73};
74
0fab169b
PK
75static const char *pure_const_names[3] = {"const", "pure", "neither"};
76
77enum malloc_state_e
78{
79 STATE_MALLOC_TOP,
80 STATE_MALLOC,
81 STATE_MALLOC_BOTTOM
82};
83
84static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
d56026c2 85
812dbce5
JH
86/* Holder for the const_state. There is one of these per function
87 decl. */
36330f82 88class funct_state_d
ea900239 89{
36330f82
ML
90public:
91 funct_state_d (): pure_const_state (IPA_NEITHER),
92 state_previously_known (IPA_NEITHER), looping_previously_known (true),
93 looping (true), can_throw (true), can_free (true),
94 malloc_state (STATE_MALLOC_BOTTOM) {}
95
96 funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
97 state_previously_known (s.state_previously_known),
98 looping_previously_known (s.looping_previously_known),
99 looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
100 malloc_state (s.malloc_state) {}
101
812dbce5 102 /* See above. */
ea900239 103 enum pure_const_state_e pure_const_state;
33977f81 104 /* What user set here; we can be always sure about this. */
b8698a0f
L
105 enum pure_const_state_e state_previously_known;
106 bool looping_previously_known;
812dbce5
JH
107
108 /* True if the function could possibly infinite loop. There are a
109 lot of ways that this could be determined. We are pretty
110 conservative here. While it is possible to cse pure and const
111 calls, it is not legal to have dce get rid of the call if there
112 is a possibility that the call could infinite loop since this is
113 a behavioral change. */
becfd6e5 114 bool looping;
812dbce5 115
33977f81 116 bool can_throw;
8413ca87
JJ
117
118 /* If function can call free, munmap or otherwise make previously
119 non-trapping memory accesses trapping. */
120 bool can_free;
0fab169b
PK
121
122 enum malloc_state_e malloc_state;
ea900239
DB
123};
124
125typedef struct funct_state_d * funct_state;
126
812dbce5
JH
127/* The storage of the funct_state is abstracted because there is the
128 possibility that it may be desirable to move this to the cgraph
b8698a0f 129 local info. */
812dbce5 130
36330f82
ML
131class funct_state_summary_t: public function_summary <funct_state_d *>
132{
133public:
134 funct_state_summary_t (symbol_table *symtab):
135 function_summary <funct_state_d *> (symtab) {}
136
137 virtual void insert (cgraph_node *, funct_state_d *state);
138 virtual void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
139 funct_state_d *src_data,
140 funct_state_d *dst_data);
141};
812dbce5 142
36330f82 143static funct_state_summary_t *funct_state_summaries = NULL;
812dbce5 144
3edf64aa
DM
145static bool gate_pure_const (void);
146
17795822
TS
147namespace {
148
149const pass_data pass_data_ipa_pure_const =
3edf64aa
DM
150{
151 IPA_PASS, /* type */
152 "pure-const", /* name */
153 OPTGROUP_NONE, /* optinfo_flags */
154 TV_IPA_PURE_CONST, /* tv_id */
155 0, /* properties_required */
156 0, /* properties_provided */
157 0, /* properties_destroyed */
158 0, /* todo_flags_start */
159 0, /* todo_flags_finish */
160};
161
17795822 162class pass_ipa_pure_const : public ipa_opt_pass_d
3edf64aa
DM
163{
164public:
165 pass_ipa_pure_const(gcc::context *ctxt);
166
167 /* opt_pass methods: */
168 bool gate (function *) { return gate_pure_const (); }
169 unsigned int execute (function *fun);
170
171 void register_hooks (void);
172
173private:
174 bool init_p;
3edf64aa
DM
175}; // class pass_ipa_pure_const
176
17795822
TS
177} // anon namespace
178
5dc16b19
MLI
179/* Try to guess if function body will always be visible to compiler
180 when compiling the call and whether compiler will be able
181 to propagate the information by itself. */
182
183static bool
184function_always_visible_to_compiler_p (tree decl)
185{
12b9f3ac
JH
186 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
187 || DECL_COMDAT (decl));
5dc16b19
MLI
188}
189
190/* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
191 is true if the function is known to be finite. The diagnostic is
6e2830c3 192 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
5dc16b19
MLI
193 OPTION, this function may initialize it and it is always returned
194 by the function. */
195
6e2830c3 196static hash_set<tree> *
5dc16b19 197suggest_attribute (int option, tree decl, bool known_finite,
6e2830c3 198 hash_set<tree> *warned_about,
5dc16b19
MLI
199 const char * attrib_name)
200{
46625112 201 if (!option_enabled (option, &global_options))
5dc16b19
MLI
202 return warned_about;
203 if (TREE_THIS_VOLATILE (decl)
204 || (known_finite && function_always_visible_to_compiler_p (decl)))
205 return warned_about;
206
207 if (!warned_about)
6e2830c3
TS
208 warned_about = new hash_set<tree>;
209 if (warned_about->contains (decl))
5dc16b19 210 return warned_about;
6e2830c3 211 warned_about->add (decl);
5dc16b19
MLI
212 warning_at (DECL_SOURCE_LOCATION (decl),
213 option,
214 known_finite
2f6f187a
DM
215 ? G_("function might be candidate for attribute %qs")
216 : G_("function might be candidate for attribute %qs"
217 " if it is known to return normally"), attrib_name);
5dc16b19
MLI
218 return warned_about;
219}
220
221/* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
222 is true if the function is known to be finite. */
223
224static void
225warn_function_pure (tree decl, bool known_finite)
226{
a594cff3
MS
227 /* Declaring a void function pure makes no sense and is diagnosed
228 by -Wattributes because calling it would have no effect. */
229 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
230 return;
5dc16b19 231
a594cff3
MS
232 static hash_set<tree> *warned_about;
233 warned_about
5dc16b19
MLI
234 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
235 known_finite, warned_about, "pure");
236}
237
238/* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
239 is true if the function is known to be finite. */
240
241static void
242warn_function_const (tree decl, bool known_finite)
243{
a594cff3
MS
244 /* Declaring a void function const makes no sense is diagnosed
245 by -Wattributes because calling it would have no effect. */
246 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
247 return;
248
6e2830c3 249 static hash_set<tree> *warned_about;
a594cff3 250 warned_about
5dc16b19
MLI
251 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
252 known_finite, warned_about, "const");
253}
7ea6b6cf 254
0fab169b
PK
255/* Emit suggestion about __attribute__((malloc)) for DECL. */
256
257static void
258warn_function_malloc (tree decl)
259{
260 static hash_set<tree> *warned_about;
261 warned_about
262 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
0fac5f2a 263 true, warned_about, "malloc");
0fab169b
PK
264}
265
266/* Emit suggestion about __attribute__((noreturn)) for DECL. */
267
c1bf2a39 268static void
7ea6b6cf
JH
269warn_function_noreturn (tree decl)
270{
a48018b5
ML
271 tree original_decl = decl;
272
6e2830c3 273 static hash_set<tree> *warned_about;
d45eae79
SL
274 if (!lang_hooks.missing_noreturn_ok_p (decl)
275 && targetm.warn_func_return (decl))
7ea6b6cf 276 warned_about
a48018b5 277 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
7ea6b6cf
JH
278 true, warned_about, "noreturn");
279}
df92c640 280
12b9f3ac
JH
281void
282warn_function_cold (tree decl)
283{
284 tree original_decl = decl;
285
12b9f3ac
JH
286 static hash_set<tree> *warned_about;
287 warned_about
288 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
289 true, warned_about, "cold");
290}
291
fa10beec 292/* Check to see if the use (or definition when CHECKING_WRITE is true)
ea900239
DB
293 variable T is legal in a function that is either pure or const. */
294
b8698a0f
L
295static inline void
296check_decl (funct_state local,
f10ea640 297 tree t, bool checking_write, bool ipa)
ea900239 298{
ea900239
DB
299 /* Do not want to do anything with volatile except mark any
300 function that uses one to be not const or pure. */
b8698a0f
L
301 if (TREE_THIS_VOLATILE (t))
302 {
ea900239 303 local->pure_const_state = IPA_NEITHER;
33977f81 304 if (dump_file)
75622c9e 305 fprintf (dump_file, " Volatile operand is not const/pure\n");
ea900239
DB
306 return;
307 }
308
309 /* Do not care about a local automatic that is not static. */
310 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
311 return;
312
33977f81
JH
313 /* If the variable has the "used" attribute, treat it as if it had a
314 been touched by the devil. */
b42186f1 315 if (DECL_PRESERVE_P (t))
33977f81
JH
316 {
317 local->pure_const_state = IPA_NEITHER;
318 if (dump_file)
319 fprintf (dump_file, " Used static/global variable is not const/pure\n");
320 return;
321 }
322
f10ea640
JH
323 /* In IPA mode we are not interested in checking actual loads and stores;
324 they will be processed at propagation time using ipa_ref. */
325 if (ipa)
326 return;
327
ea900239
DB
328 /* Since we have dealt with the locals and params cases above, if we
329 are CHECKING_WRITE, this cannot be a pure or constant
330 function. */
b8698a0f 331 if (checking_write)
eb80272a
ST
332 {
333 local->pure_const_state = IPA_NEITHER;
33977f81
JH
334 if (dump_file)
335 fprintf (dump_file, " static/global memory write is not const/pure\n");
eb80272a
ST
336 return;
337 }
ea900239
DB
338
339 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
340 {
33977f81 341 /* Readonly reads are safe. */
bd78c6d5 342 if (TREE_READONLY (t))
33977f81 343 return; /* Read of a constant, do not change the function state. */
b8698a0f 344 else
ea900239 345 {
33977f81
JH
346 if (dump_file)
347 fprintf (dump_file, " global memory read is not const\n");
ea900239
DB
348 /* Just a regular read. */
349 if (local->pure_const_state == IPA_CONST)
350 local->pure_const_state = IPA_PURE;
351 }
352 }
33977f81
JH
353 else
354 {
355 /* Compilation level statics can be read if they are readonly
356 variables. */
357 if (TREE_READONLY (t))
358 return;
359
360 if (dump_file)
361 fprintf (dump_file, " static memory read is not const\n");
362 /* Just a regular read. */
363 if (local->pure_const_state == IPA_CONST)
364 local->pure_const_state = IPA_PURE;
365 }
ea900239
DB
366}
367
ea900239 368
33977f81
JH
369/* Check to see if the use (or definition when CHECKING_WRITE is true)
370 variable T is legal in a function that is either pure or const. */
ea900239 371
b8698a0f 372static inline void
346ef3fa 373check_op (funct_state local, tree t, bool checking_write)
ea900239 374{
3c1832c3
JH
375 t = get_base_address (t);
376 if (t && TREE_THIS_VOLATILE (t))
ea900239 377 {
346ef3fa
RG
378 local->pure_const_state = IPA_NEITHER;
379 if (dump_file)
380 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
381 return;
382 }
3c1832c3 383 else if (t
70f34814 384 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
3c1832c3
JH
385 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
386 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
387 {
388 if (dump_file)
389 fprintf (dump_file, " Indirect ref to local memory is OK\n");
390 return;
391 }
346ef3fa
RG
392 else if (checking_write)
393 {
394 local->pure_const_state = IPA_NEITHER;
395 if (dump_file)
396 fprintf (dump_file, " Indirect ref write is not const/pure\n");
397 return;
398 }
399 else
400 {
401 if (dump_file)
402 fprintf (dump_file, " Indirect ref read is not const\n");
403 if (local->pure_const_state == IPA_CONST)
404 local->pure_const_state = IPA_PURE;
ea900239
DB
405 }
406}
407
f10ea640
JH
408/* compute state based on ECF FLAGS and store to STATE and LOOPING. */
409
410static void
411state_from_flags (enum pure_const_state_e *state, bool *looping,
412 int flags, bool cannot_lead_to_return)
413{
414 *looping = false;
415 if (flags & ECF_LOOPING_CONST_OR_PURE)
416 {
417 *looping = true;
418 if (dump_file && (dump_flags & TDF_DETAILS))
75622c9e 419 fprintf (dump_file, " looping\n");
f10ea640
JH
420 }
421 if (flags & ECF_CONST)
422 {
423 *state = IPA_CONST;
424 if (dump_file && (dump_flags & TDF_DETAILS))
425 fprintf (dump_file, " const\n");
426 }
427 else if (flags & ECF_PURE)
428 {
429 *state = IPA_PURE;
430 if (dump_file && (dump_flags & TDF_DETAILS))
431 fprintf (dump_file, " pure\n");
432 }
433 else if (cannot_lead_to_return)
434 {
435 *state = IPA_PURE;
436 *looping = true;
437 if (dump_file && (dump_flags & TDF_DETAILS))
438 fprintf (dump_file, " ignoring side effects->pure looping\n");
439 }
440 else
441 {
442 if (dump_file && (dump_flags & TDF_DETAILS))
df92c640 443 fprintf (dump_file, " neither\n");
f10ea640
JH
444 *state = IPA_NEITHER;
445 *looping = true;
446 }
447}
448
449/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
450 into STATE and LOOPING better of the two variants.
451 Be sure to merge looping correctly. IPA_NEITHER functions
452 have looping 0 even if they don't have to return. */
453
454static inline void
455better_state (enum pure_const_state_e *state, bool *looping,
456 enum pure_const_state_e state2, bool looping2)
457{
458 if (state2 < *state)
459 {
460 if (*state == IPA_NEITHER)
461 *looping = looping2;
462 else
463 *looping = MIN (*looping, looping2);
b0d04a5f 464 *state = state2;
f10ea640
JH
465 }
466 else if (state2 != IPA_NEITHER)
467 *looping = MIN (*looping, looping2);
468}
469
470/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
cc950f98
JH
471 into STATE and LOOPING worse of the two variants.
472 N is the actual node called. */
f10ea640
JH
473
474static inline void
475worse_state (enum pure_const_state_e *state, bool *looping,
cc950f98
JH
476 enum pure_const_state_e state2, bool looping2,
477 struct symtab_node *from,
478 struct symtab_node *to)
f10ea640 479{
cc950f98
JH
480 /* Consider function:
481
482 bool a(int *p)
483 {
484 return *p==*p;
485 }
486
487 During early optimization we will turn this into:
488
489 bool a(int *p)
490 {
491 return true;
492 }
493
494 Now if this function will be detected as CONST however when interposed it
495 may end up being just pure. We always must assume the worst scenario here.
496 */
497 if (*state == IPA_CONST && state2 == IPA_CONST
498 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
499 {
500 if (dump_file && (dump_flags & TDF_DETAILS))
501 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
502 "bind to current def.\n", to->name ());
503 state2 = IPA_PURE;
504 }
f10ea640
JH
505 *state = MAX (*state, state2);
506 *looping = MAX (*looping, looping2);
507}
508
61502ca8 509/* Recognize special cases of builtins that are by themselves not pure or const
0a42aa4e
JH
510 but function using them is. */
511static bool
9e97ff61 512special_builtin_state (enum pure_const_state_e *state, bool *looping,
0a42aa4e
JH
513 tree callee)
514{
515 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
516 switch (DECL_FUNCTION_CODE (callee))
517 {
518 case BUILT_IN_RETURN:
519 case BUILT_IN_UNREACHABLE:
9e878cf1 520 CASE_BUILT_IN_ALLOCA:
0a42aa4e
JH
521 case BUILT_IN_STACK_SAVE:
522 case BUILT_IN_STACK_RESTORE:
523 case BUILT_IN_EH_POINTER:
524 case BUILT_IN_EH_FILTER:
525 case BUILT_IN_UNWIND_RESUME:
526 case BUILT_IN_CXA_END_CLEANUP:
527 case BUILT_IN_EH_COPY_VALUES:
528 case BUILT_IN_FRAME_ADDRESS:
529 case BUILT_IN_APPLY:
530 case BUILT_IN_APPLY_ARGS:
94087e88
JJ
531 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
532 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
0a42aa4e
JH
533 *looping = false;
534 *state = IPA_CONST;
535 return true;
536 case BUILT_IN_PREFETCH:
537 *looping = true;
538 *state = IPA_CONST;
539 return true;
083e891e
MP
540 default:
541 break;
0a42aa4e
JH
542 }
543 return false;
544}
545
ea900239
DB
546/* Check the parameters of a function call to CALL_EXPR to see if
547 there are any references in the parameters that are not allowed for
548 pure or const functions. Also check to see if this is either an
549 indirect call, a call outside the compilation unit, or has special
550 attributes that may also effect the purity. The CALL_EXPR node for
551 the entire call expression. */
552
553static void
538dd0b7 554check_call (funct_state local, gcall *call, bool ipa)
ea900239 555{
726a989a 556 int flags = gimple_call_flags (call);
33977f81 557 tree callee_t = gimple_call_fndecl (call);
36bbc05d 558 bool possibly_throws = stmt_could_throw_p (cfun, call);
33977f81 559 bool possibly_throws_externally = (possibly_throws
36bbc05d 560 && stmt_can_throw_external (cfun, call));
ea900239 561
33977f81
JH
562 if (possibly_throws)
563 {
564 unsigned int i;
565 for (i = 0; i < gimple_num_ops (call); i++)
566 if (gimple_op (call, i)
567 && tree_could_throw_p (gimple_op (call, i)))
568 {
8f4f502f 569 if (possibly_throws && cfun->can_throw_non_call_exceptions)
33977f81
JH
570 {
571 if (dump_file)
572 fprintf (dump_file, " operand can throw; looping\n");
573 local->looping = true;
574 }
575 if (possibly_throws_externally)
576 {
577 if (dump_file)
578 fprintf (dump_file, " operand can throw externally\n");
579 local->can_throw = true;
580 }
581 }
582 }
b8698a0f 583
ea900239
DB
584 /* The const and pure flags are set by a variety of places in the
585 compiler (including here). If someone has already set the flags
586 for the callee, (such as for some of the builtins) we will use
b8698a0f
L
587 them, otherwise we will compute our own information.
588
ea900239
DB
589 Const and pure functions have less clobber effects than other
590 functions so we process these first. Otherwise if it is a call
591 outside the compilation unit or an indirect call we punt. This
592 leaves local calls which will be processed by following the call
b8698a0f 593 graph. */
ea900239
DB
594 if (callee_t)
595 {
0a42aa4e
JH
596 enum pure_const_state_e call_state;
597 bool call_looping;
598
8413ca87
JJ
599 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
600 && !nonfreeing_call_p (call))
601 local->can_free = true;
602
9e97ff61 603 if (special_builtin_state (&call_state, &call_looping, callee_t))
0a42aa4e
JH
604 {
605 worse_state (&local->pure_const_state, &local->looping,
cc950f98
JH
606 call_state, call_looping,
607 NULL, NULL);
0a42aa4e
JH
608 return;
609 }
ea900239
DB
610 /* When bad things happen to bad functions, they cannot be const
611 or pure. */
612 if (setjmp_call_p (callee_t))
becfd6e5 613 {
33977f81
JH
614 if (dump_file)
615 fprintf (dump_file, " setjmp is not const/pure\n");
616 local->looping = true;
becfd6e5 617 local->pure_const_state = IPA_NEITHER;
becfd6e5 618 }
ea900239
DB
619
620 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
621 switch (DECL_FUNCTION_CODE (callee_t))
622 {
623 case BUILT_IN_LONGJMP:
624 case BUILT_IN_NONLOCAL_GOTO:
33977f81
JH
625 if (dump_file)
626 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
ea900239 627 local->pure_const_state = IPA_NEITHER;
33977f81 628 local->looping = true;
ea900239
DB
629 break;
630 default:
631 break;
632 }
633 }
8413ca87
JJ
634 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
635 local->can_free = true;
ea900239 636
33977f81 637 /* When not in IPA mode, we can still handle self recursion. */
fc11f321
JH
638 if (!ipa && callee_t
639 && recursive_call_p (current_function_decl, callee_t))
5dc16b19
MLI
640 {
641 if (dump_file)
642 fprintf (dump_file, " Recursive call can loop.\n");
643 local->looping = true;
644 }
61502ca8 645 /* Either callee is unknown or we are doing local analysis.
c59f5d1b
JH
646 Look to see if there are any bits available for the callee (such as by
647 declaration or because it is builtin) and process solely on the basis of
d40790c8
JJ
648 those bits. Handle internal calls always, those calls don't have
649 corresponding cgraph edges and thus aren't processed during
650 the propagation. */
651 else if (!ipa || gimple_call_internal_p (call))
ea900239 652 {
f10ea640
JH
653 enum pure_const_state_e call_state;
654 bool call_looping;
8f4f502f 655 if (possibly_throws && cfun->can_throw_non_call_exceptions)
33977f81
JH
656 {
657 if (dump_file)
658 fprintf (dump_file, " can throw; looping\n");
659 local->looping = true;
660 }
661 if (possibly_throws_externally)
662 {
663 if (dump_file)
664 {
1d65f45c
RH
665 fprintf (dump_file, " can throw externally to lp %i\n",
666 lookup_stmt_eh_lp (call));
33977f81
JH
667 if (callee_t)
668 fprintf (dump_file, " callee:%s\n",
669 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
670 }
671 local->can_throw = true;
672 }
f10ea640
JH
673 if (dump_file && (dump_flags & TDF_DETAILS))
674 fprintf (dump_file, " checking flags for call:");
675 state_from_flags (&call_state, &call_looping, flags,
676 ((flags & (ECF_NORETURN | ECF_NOTHROW))
677 == (ECF_NORETURN | ECF_NOTHROW))
678 || (!flag_exceptions && (flags & ECF_NORETURN)));
679 worse_state (&local->pure_const_state, &local->looping,
cc950f98 680 call_state, call_looping, NULL, NULL);
ea900239 681 }
33977f81 682 /* Direct functions calls are handled by IPA propagation. */
ea900239
DB
683}
684
f10ea640 685/* Wrapper around check_decl for loads in local more. */
346ef3fa
RG
686
687static bool
355fe088 688check_load (gimple *, tree op, tree, void *data)
346ef3fa
RG
689{
690 if (DECL_P (op))
f10ea640 691 check_decl ((funct_state)data, op, false, false);
346ef3fa
RG
692 else
693 check_op ((funct_state)data, op, false);
694 return false;
695}
696
f10ea640 697/* Wrapper around check_decl for stores in local more. */
346ef3fa
RG
698
699static bool
355fe088 700check_store (gimple *, tree op, tree, void *data)
346ef3fa
RG
701{
702 if (DECL_P (op))
f10ea640
JH
703 check_decl ((funct_state)data, op, true, false);
704 else
705 check_op ((funct_state)data, op, true);
706 return false;
707}
708
709/* Wrapper around check_decl for loads in ipa mode. */
710
711static bool
355fe088 712check_ipa_load (gimple *, tree op, tree, void *data)
f10ea640
JH
713{
714 if (DECL_P (op))
715 check_decl ((funct_state)data, op, false, true);
716 else
717 check_op ((funct_state)data, op, false);
718 return false;
719}
720
721/* Wrapper around check_decl for stores in ipa mode. */
722
723static bool
355fe088 724check_ipa_store (gimple *, tree op, tree, void *data)
f10ea640
JH
725{
726 if (DECL_P (op))
727 check_decl ((funct_state)data, op, true, true);
346ef3fa
RG
728 else
729 check_op ((funct_state)data, op, true);
730 return false;
731}
732
5006671f
RG
733/* Look into pointer pointed to by GSIP and figure out what interesting side
734 effects it has. */
33977f81
JH
735static void
736check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
ea900239 737{
355fe088 738 gimple *stmt = gsi_stmt (*gsip);
ea900239 739
b5b8b0ac
AO
740 if (is_gimple_debug (stmt))
741 return;
742
38147a2a
JH
743 /* Do consider clobber as side effects before IPA, so we rather inline
744 C++ destructors and keep clobber semantics than eliminate them.
745
746 TODO: We may get smarter during early optimizations on these and let
747 functions containing only clobbers to be optimized more. This is a common
748 case of C++ destructors. */
749
750 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
751 return;
752
33977f81 753 if (dump_file)
ea900239 754 {
33977f81 755 fprintf (dump_file, " scanning: ");
ef6cb4c7 756 print_gimple_stmt (dump_file, stmt, 0);
33977f81 757 }
5006671f 758
ace018f9
RG
759 if (gimple_has_volatile_ops (stmt)
760 && !gimple_clobber_p (stmt))
4e89a3fa
RG
761 {
762 local->pure_const_state = IPA_NEITHER;
763 if (dump_file)
764 fprintf (dump_file, " Volatile stmt is not const/pure\n");
765 }
766
346ef3fa 767 /* Look for loads and stores. */
f10ea640
JH
768 walk_stmt_load_store_ops (stmt, local,
769 ipa ? check_ipa_load : check_load,
770 ipa ? check_ipa_store : check_store);
33977f81
JH
771
772 if (gimple_code (stmt) != GIMPLE_CALL
36bbc05d 773 && stmt_could_throw_p (cfun, stmt))
33977f81 774 {
8f4f502f 775 if (cfun->can_throw_non_call_exceptions)
33977f81
JH
776 {
777 if (dump_file)
425b784f 778 fprintf (dump_file, " can throw; looping\n");
33977f81
JH
779 local->looping = true;
780 }
36bbc05d 781 if (stmt_can_throw_external (cfun, stmt))
33977f81
JH
782 {
783 if (dump_file)
425b784f 784 fprintf (dump_file, " can throw externally\n");
33977f81
JH
785 local->can_throw = true;
786 }
425b784f
JH
787 else
788 if (dump_file)
789 fprintf (dump_file, " can throw\n");
726a989a 790 }
726a989a
RB
791 switch (gimple_code (stmt))
792 {
33977f81 793 case GIMPLE_CALL:
538dd0b7 794 check_call (local, as_a <gcall *> (stmt), ipa);
ea900239 795 break;
726a989a 796 case GIMPLE_LABEL:
538dd0b7 797 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
ea900239 798 /* Target of long jump. */
becfd6e5 799 {
33977f81 800 if (dump_file)
425b784f 801 fprintf (dump_file, " nonlocal label is not const/pure\n");
becfd6e5 802 local->pure_const_state = IPA_NEITHER;
becfd6e5 803 }
ea900239 804 break;
726a989a 805 case GIMPLE_ASM:
538dd0b7 806 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
33977f81 807 {
edcdea5b 808 if (dump_file)
425b784f 809 fprintf (dump_file, " memory asm clobber is not const/pure\n");
edcdea5b
NF
810 /* Abandon all hope, ye who enter here. */
811 local->pure_const_state = IPA_NEITHER;
8413ca87 812 local->can_free = true;
33977f81 813 }
538dd0b7 814 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
33977f81
JH
815 {
816 if (dump_file)
425b784f 817 fprintf (dump_file, " volatile is not const/pure\n");
33977f81
JH
818 /* Abandon all hope, ye who enter here. */
819 local->pure_const_state = IPA_NEITHER;
8413ca87
JJ
820 local->looping = true;
821 local->can_free = true;
33977f81
JH
822 }
823 return;
ea900239
DB
824 default:
825 break;
826 }
ea900239
DB
827}
828
0fab169b
PK
829/* Check that RETVAL is used only in STMT and in comparisons against 0.
830 RETVAL is return value of the function and STMT is return stmt. */
831
832static bool
833check_retval_uses (tree retval, gimple *stmt)
834{
835 imm_use_iterator use_iter;
836 gimple *use_stmt;
837
838 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
839 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
840 {
841 tree op2 = gimple_cond_rhs (cond);
842 if (!integer_zerop (op2))
843 RETURN_FROM_IMM_USE_STMT (use_iter, false);
844 }
845 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
846 {
847 enum tree_code code = gimple_assign_rhs_code (ga);
848 if (TREE_CODE_CLASS (code) != tcc_comparison)
849 RETURN_FROM_IMM_USE_STMT (use_iter, false);
850 if (!integer_zerop (gimple_assign_rhs2 (ga)))
851 RETURN_FROM_IMM_USE_STMT (use_iter, false);
852 }
853 else if (is_gimple_debug (use_stmt))
854 ;
855 else if (use_stmt != stmt)
856 RETURN_FROM_IMM_USE_STMT (use_iter, false);
857
858 return true;
859}
860
861/* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
862 attribute. Currently this function does a very conservative analysis.
863 FUN is considered to be a candidate if
864 1) It returns a value of pointer type.
865 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
866 a phi, and element of phi is either NULL or
867 SSA_NAME_DEF_STMT(element) is function call.
868 3) The return-value has immediate uses only within comparisons (gcond or gassign)
869 and return_stmt (and likewise a phi arg has immediate use only within comparison
870 or the phi stmt). */
871
0fab169b
PK
872#define DUMP_AND_RETURN(reason) \
873{ \
874 if (dump_file && (dump_flags & TDF_DETAILS)) \
a8c80d03
PK
875 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
876 (node->name()), (reason)); \
0fab169b
PK
877 return false; \
878}
879
9a471714
PK
880static bool
881malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa)
882{
883 cgraph_node *node = cgraph_node::get_create (fun->decl);
884
885 if (!check_retval_uses (retval, ret_stmt))
886 DUMP_AND_RETURN("Return value has uses outside return stmt"
887 " and comparisons against 0.")
888
889 gimple *def = SSA_NAME_DEF_STMT (retval);
890
891 if (gcall *call_stmt = dyn_cast<gcall *> (def))
892 {
893 tree callee_decl = gimple_call_fndecl (call_stmt);
894 if (!callee_decl)
895 return false;
896
897 if (!ipa && !DECL_IS_MALLOC (callee_decl))
898 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
899 " non-ipa mode.")
900
901 cgraph_edge *cs = node->get_edge (call_stmt);
902 if (cs)
903 {
904 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
905 es->is_return_callee_uncaptured = true;
906 }
907 }
908
909 else if (gphi *phi = dyn_cast<gphi *> (def))
910 {
911 bool all_args_zero = true;
912 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
913 {
914 tree arg = gimple_phi_arg_def (phi, i);
915 if (integer_zerop (arg))
916 continue;
917
918 all_args_zero = false;
919 if (TREE_CODE (arg) != SSA_NAME)
920 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
921 if (!check_retval_uses (arg, phi))
922 DUMP_AND_RETURN ("phi arg has uses outside phi"
923 " and comparisons against 0.")
924
925 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
926 if (is_a<gphi *> (arg_def))
927 {
928 if (!malloc_candidate_p_1 (fun, arg, phi, ipa))
929 DUMP_AND_RETURN ("nested phi fail")
930 continue;
931 }
932
933 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
934 if (!call_stmt)
935 DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
936
937 tree callee_decl = gimple_call_fndecl (call_stmt);
938 if (!callee_decl)
939 return false;
940 if (!ipa && !DECL_IS_MALLOC (callee_decl))
941 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
942 " for non-ipa mode.")
943
944 cgraph_edge *cs = node->get_edge (call_stmt);
945 if (cs)
946 {
947 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
948 es->is_return_callee_uncaptured = true;
949 }
950 }
951
952 if (all_args_zero)
953 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
954 }
955
956 else
957 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
958
959 return true;
960}
961
962static bool
963malloc_candidate_p (function *fun, bool ipa)
964{
965 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
966 edge e;
967 edge_iterator ei;
968 cgraph_node *node = cgraph_node::get_create (fun->decl);
969
a8c80d03
PK
970 if (EDGE_COUNT (exit_block->preds) == 0
971 || !flag_delete_null_pointer_checks)
0fab169b
PK
972 return false;
973
974 FOR_EACH_EDGE (e, ei, exit_block->preds)
975 {
976 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
977 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
978
979 if (!ret_stmt)
980 return false;
981
982 tree retval = gimple_return_retval (ret_stmt);
983 if (!retval)
984 DUMP_AND_RETURN("No return value.")
985
986 if (TREE_CODE (retval) != SSA_NAME
987 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
988 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
989
9a471714
PK
990 if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa))
991 return false;
0fab169b
PK
992 }
993
994 if (dump_file && (dump_flags & TDF_DETAILS))
995 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
996 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
997 return true;
0fab169b
PK
998}
999
9a471714 1000#undef DUMP_AND_RETURN
812dbce5 1001
ea900239
DB
1002/* This is the main routine for finding the reference patterns for
1003 global variables within a function FN. */
1004
33977f81
JH
1005static funct_state
1006analyze_function (struct cgraph_node *fn, bool ipa)
ea900239 1007{
67348ccc 1008 tree decl = fn->decl;
33977f81
JH
1009 funct_state l;
1010 basic_block this_block;
ea900239 1011
33977f81
JH
1012 l = XCNEW (struct funct_state_d);
1013 l->pure_const_state = IPA_CONST;
f87c9042
JH
1014 l->state_previously_known = IPA_NEITHER;
1015 l->looping_previously_known = true;
33977f81
JH
1016 l->looping = false;
1017 l->can_throw = false;
8413ca87 1018 l->can_free = false;
c47d0034 1019 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
67348ccc 1020 flags_from_decl_or_type (fn->decl),
d52f5295 1021 fn->cannot_return_p ());
c47d0034 1022
67348ccc 1023 if (fn->thunk.thunk_p || fn->alias)
c47d0034
JH
1024 {
1025 /* Thunk gets propagated through, so nothing interesting happens. */
1026 gcc_assert (ipa);
6cbde2e3
BE
1027 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
1028 l->pure_const_state = IPA_NEITHER;
c47d0034
JH
1029 return l;
1030 }
ea900239
DB
1031
1032 if (dump_file)
1033 {
b8698a0f 1034 fprintf (dump_file, "\n\n local analysis of %s\n ",
fec39fa6 1035 fn->name ());
ea900239 1036 }
b8698a0f 1037
33977f81 1038 push_cfun (DECL_STRUCT_FUNCTION (decl));
b8698a0f 1039
11cd3bed 1040 FOR_EACH_BB_FN (this_block, cfun)
ea900239 1041 {
33977f81
JH
1042 gimple_stmt_iterator gsi;
1043 struct walk_stmt_info wi;
ea900239 1044
c3284718 1045 memset (&wi, 0, sizeof (wi));
33977f81
JH
1046 for (gsi = gsi_start_bb (this_block);
1047 !gsi_end_p (gsi);
1048 gsi_next (&gsi))
ea900239 1049 {
33977f81 1050 check_stmt (&gsi, l, ipa);
8413ca87
JJ
1051 if (l->pure_const_state == IPA_NEITHER
1052 && l->looping
1053 && l->can_throw
1054 && l->can_free)
33977f81 1055 goto end;
ea900239
DB
1056 }
1057 }
13335ae6
AP
1058
1059end:
33977f81
JH
1060 if (l->pure_const_state != IPA_NEITHER)
1061 {
1062 /* Const functions cannot have back edges (an
1063 indication of possible infinite loop side
1064 effect. */
1065 if (mark_dfs_back_edges ())
2de58650 1066 {
7351bcaa 1067 /* Preheaders are needed for SCEV to work.
61502ca8 1068 Simple latches and recorded exits improve chances that loop will
0d5049b2
RB
1069 proved to be finite in testcases such as in loop-15.c
1070 and loop-24.c */
1071 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1072 | LOOPS_HAVE_SIMPLE_LATCHES
7351bcaa 1073 | LOOPS_HAVE_RECORDED_EXITS);
2de58650
JH
1074 if (dump_file && (dump_flags & TDF_DETAILS))
1075 flow_loops_dump (dump_file, NULL, 0);
1076 if (mark_irreducible_loops ())
1077 {
1078 if (dump_file)
1079 fprintf (dump_file, " has irreducible loops\n");
1080 l->looping = true;
1081 }
b8698a0f 1082 else
2de58650 1083 {
2de58650
JH
1084 struct loop *loop;
1085 scev_initialize ();
f0bd40b1 1086 FOR_EACH_LOOP (loop, 0)
2de58650
JH
1087 if (!finite_loop_p (loop))
1088 {
1089 if (dump_file)
67914693 1090 fprintf (dump_file, " cannot prove finiteness of "
0d5049b2 1091 "loop %i\n", loop->num);
2de58650 1092 l->looping =true;
f0bd40b1 1093 break;
2de58650
JH
1094 }
1095 scev_finalize ();
1096 }
1097 loop_optimizer_finalize ();
1098 }
33977f81
JH
1099 }
1100
f10ea640
JH
1101 if (dump_file && (dump_flags & TDF_DETAILS))
1102 fprintf (dump_file, " checking previously known:");
f10ea640
JH
1103
1104 better_state (&l->pure_const_state, &l->looping,
1105 l->state_previously_known,
1106 l->looping_previously_known);
33977f81
JH
1107 if (TREE_NOTHROW (decl))
1108 l->can_throw = false;
1109
0fab169b
PK
1110 l->malloc_state = STATE_MALLOC_BOTTOM;
1111 if (DECL_IS_MALLOC (decl))
1112 l->malloc_state = STATE_MALLOC;
1113 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1114 l->malloc_state = STATE_MALLOC_TOP;
1115 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1116 l->malloc_state = STATE_MALLOC;
1117
33977f81 1118 pop_cfun ();
13335ae6
AP
1119 if (dump_file)
1120 {
33977f81
JH
1121 if (l->looping)
1122 fprintf (dump_file, "Function is locally looping.\n");
1123 if (l->can_throw)
1124 fprintf (dump_file, "Function is locally throwing.\n");
1125 if (l->pure_const_state == IPA_CONST)
1126 fprintf (dump_file, "Function is locally const.\n");
1127 if (l->pure_const_state == IPA_PURE)
1128 fprintf (dump_file, "Function is locally pure.\n");
8413ca87
JJ
1129 if (l->can_free)
1130 fprintf (dump_file, "Function can locally free.\n");
0fab169b
PK
1131 if (l->malloc_state == STATE_MALLOC)
1132 fprintf (dump_file, "Function is locally malloc.\n");
13335ae6 1133 }
33977f81 1134 return l;
ea900239
DB
1135}
1136
36330f82
ML
1137void
1138funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
129a37fc 1139{
129a37fc
JH
1140 /* There are some shared nodes, in particular the initializers on
1141 static declarations. We do not need to scan them more than once
1142 since all we would be interested in are the addressof
1143 operations. */
cc950f98 1144 if (opt_for_fn (node->decl, flag_ipa_pure_const))
e2c9111c 1145 {
36330f82
ML
1146 funct_state_d *a = analyze_function (node, true);
1147 new (state) funct_state_d (*a);
1148 free (a);
e2c9111c
JH
1149 }
1150}
1151
1152/* Called when new clone is inserted to callgraph late. */
1153
36330f82
ML
1154void
1155funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *,
1156 funct_state_d *src_data,
1157 funct_state_d *dst_data)
e2c9111c 1158{
36330f82 1159 new (dst_data) funct_state_d (*src_data);
e2c9111c
JH
1160}
1161
ea900239 1162\f
3edf64aa
DM
1163void
1164pass_ipa_pure_const::
d7f09764 1165register_hooks (void)
ea900239 1166{
d7f09764
DN
1167 if (init_p)
1168 return;
1169
1170 init_p = true;
ea900239 1171
36330f82 1172 funct_state_summaries = new funct_state_summary_t (symtab);
d7f09764
DN
1173}
1174
1175
1176/* Analyze each function in the cgraph to see if it is locally PURE or
1177 CONST. */
1178
b8698a0f 1179static void
651df1b2 1180pure_const_generate_summary (void)
d7f09764
DN
1181{
1182 struct cgraph_node *node;
1183
3edf64aa
DM
1184 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1185 pass->register_hooks ();
d7f09764 1186
b8698a0f 1187 /* Process all of the functions.
ea900239 1188
67914693 1189 We process AVAIL_INTERPOSABLE functions. We cannot use the results
c59f5d1b 1190 by default, but the info can be used at LTO with -fwhole-program or
61502ca8 1191 when function got cloned and the clone is AVAILABLE. */
c59f5d1b 1192
65c70e6b 1193 FOR_EACH_DEFINED_FUNCTION (node)
cc950f98 1194 if (opt_for_fn (node->decl, flag_ipa_pure_const))
36330f82
ML
1195 {
1196 funct_state_d *a = analyze_function (node, true);
1197 new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1198 free (a);
1199 }
812dbce5
JH
1200}
1201
d7f09764
DN
1202
1203/* Serialize the ipa info for lto. */
1204
1205static void
f27c1867 1206pure_const_write_summary (void)
d7f09764
DN
1207{
1208 struct cgraph_node *node;
1209 struct lto_simple_output_block *ob
1210 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1211 unsigned int count = 0;
f27c1867
JH
1212 lto_symtab_encoder_iterator lsei;
1213 lto_symtab_encoder_t encoder;
d7f09764 1214
f27c1867
JH
1215 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1216
1217 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1218 lsei_next_function_in_partition (&lsei))
d7f09764 1219 {
f27c1867 1220 node = lsei_cgraph_node (lsei);
36330f82 1221 if (node->definition && funct_state_summaries->exists (node))
d7f09764
DN
1222 count++;
1223 }
b8698a0f 1224
412288f1 1225 streamer_write_uhwi_stream (ob->main_stream, count);
b8698a0f 1226
d7f09764 1227 /* Process all of the functions. */
f27c1867
JH
1228 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1229 lsei_next_function_in_partition (&lsei))
d7f09764 1230 {
f27c1867 1231 node = lsei_cgraph_node (lsei);
36330f82
ML
1232 funct_state_d *fs = funct_state_summaries->get (node);
1233 if (node->definition && fs != NULL)
d7f09764 1234 {
2465dcc2 1235 struct bitpack_d bp;
d7f09764 1236 int node_ref;
7380e6ef 1237 lto_symtab_encoder_t encoder;
b8698a0f 1238
7380e6ef 1239 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 1240 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 1241 streamer_write_uhwi_stream (ob->main_stream, node_ref);
b8698a0f 1242
d7f09764
DN
1243 /* Note that flags will need to be read in the opposite
1244 order as we are pushing the bitflags into FLAGS. */
2465dcc2
RG
1245 bp = bitpack_create (ob->main_stream);
1246 bp_pack_value (&bp, fs->pure_const_state, 2);
1247 bp_pack_value (&bp, fs->state_previously_known, 2);
1248 bp_pack_value (&bp, fs->looping_previously_known, 1);
1249 bp_pack_value (&bp, fs->looping, 1);
1250 bp_pack_value (&bp, fs->can_throw, 1);
8413ca87 1251 bp_pack_value (&bp, fs->can_free, 1);
0fab169b 1252 bp_pack_value (&bp, fs->malloc_state, 2);
412288f1 1253 streamer_write_bitpack (&bp);
d7f09764
DN
1254 }
1255 }
1256
1257 lto_destroy_simple_output_block (ob);
1258}
1259
1260
1261/* Deserialize the ipa info for lto. */
1262
b8698a0f 1263static void
d7f09764
DN
1264pure_const_read_summary (void)
1265{
1266 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1267 struct lto_file_decl_data *file_data;
1268 unsigned int j = 0;
1269
3edf64aa
DM
1270 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1271 pass->register_hooks ();
1272
d7f09764
DN
1273 while ((file_data = file_data_vec[j++]))
1274 {
1275 const char *data;
1276 size_t len;
1277 struct lto_input_block *ib
b8698a0f
L
1278 = lto_create_simple_input_block (file_data,
1279 LTO_section_ipa_pure_const,
d7f09764
DN
1280 &data, &len);
1281 if (ib)
1282 {
1283 unsigned int i;
412288f1 1284 unsigned int count = streamer_read_uhwi (ib);
d7f09764
DN
1285
1286 for (i = 0; i < count; i++)
1287 {
1288 unsigned int index;
1289 struct cgraph_node *node;
2465dcc2 1290 struct bitpack_d bp;
d7f09764 1291 funct_state fs;
7380e6ef 1292 lto_symtab_encoder_t encoder;
d7f09764 1293
412288f1 1294 index = streamer_read_uhwi (ib);
7380e6ef 1295 encoder = file_data->symtab_node_encoder;
d52f5295
ML
1296 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1297 index));
d7f09764 1298
36330f82 1299 fs = funct_state_summaries->get_create (node);
d7f09764
DN
1300 /* Note that the flags must be read in the opposite
1301 order in which they were written (the bitflags were
1302 pushed into FLAGS). */
412288f1 1303 bp = streamer_read_bitpack (ib);
d7f09764 1304 fs->pure_const_state
2465dcc2 1305 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
d7f09764 1306 fs->state_previously_known
2465dcc2
RG
1307 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1308 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1309 fs->looping = bp_unpack_value (&bp, 1);
1310 fs->can_throw = bp_unpack_value (&bp, 1);
8413ca87 1311 fs->can_free = bp_unpack_value (&bp, 1);
0fab169b
PK
1312 fs->malloc_state
1313 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1314
d56026c2
JH
1315 if (dump_file)
1316 {
67348ccc 1317 int flags = flags_from_decl_or_type (node->decl);
464d0118 1318 fprintf (dump_file, "Read info for %s ", node->dump_name ());
d56026c2
JH
1319 if (flags & ECF_CONST)
1320 fprintf (dump_file, " const");
1321 if (flags & ECF_PURE)
1322 fprintf (dump_file, " pure");
1323 if (flags & ECF_NOTHROW)
1324 fprintf (dump_file, " nothrow");
1325 fprintf (dump_file, "\n pure const state: %s\n",
1326 pure_const_names[fs->pure_const_state]);
1327 fprintf (dump_file, " previously known state: %s\n",
cc950f98 1328 pure_const_names[fs->state_previously_known]);
d56026c2
JH
1329 if (fs->looping)
1330 fprintf (dump_file," function is locally looping\n");
1331 if (fs->looping_previously_known)
1332 fprintf (dump_file," function is previously known looping\n");
1333 if (fs->can_throw)
1334 fprintf (dump_file," function is locally throwing\n");
8413ca87
JJ
1335 if (fs->can_free)
1336 fprintf (dump_file," function can locally free\n");
0fab169b
PK
1337 fprintf (dump_file, "\n malloc state: %s\n",
1338 malloc_state_names[fs->malloc_state]);
d56026c2 1339 }
d7f09764
DN
1340 }
1341
b8698a0f
L
1342 lto_destroy_simple_input_block (file_data,
1343 LTO_section_ipa_pure_const,
d7f09764
DN
1344 ib, data, len);
1345 }
1346 }
1347}
1348
9644e52a
JH
1349/* We only propagate across edges that can throw externally and their callee
1350 is not interposable. */
d7f09764 1351
2505c5ed 1352static bool
9644e52a 1353ignore_edge_for_nothrow (struct cgraph_edge *e)
2505c5ed 1354{
9644e52a
JH
1355 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1356 return true;
1357
1358 enum availability avail;
cc950f98
JH
1359 cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
1360 e->caller);
a2b056a3
JH
1361 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
1362 return true;
1363 return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1364 && !e->callee->binds_to_current_def_p (e->caller);
2505c5ed
JH
1365}
1366
fede8efa 1367/* Return true if NODE is self recursive function.
fc11f321
JH
1368 Indirectly recursive functions appears as non-trivial strongly
1369 connected components, so we need to care about self recursion
1370 only. */
03ec7d01
JH
1371
1372static bool
1373self_recursive_p (struct cgraph_node *node)
1374{
1375 struct cgraph_edge *e;
1376 for (e = node->callees; e; e = e->next_callee)
d52f5295 1377 if (e->callee->function_symbol () == node)
03ec7d01
JH
1378 return true;
1379 return false;
1380}
1381
17e0fc92
JH
1382/* Return true if N is cdtor that is not const or pure. In this case we may
1383 need to remove unreachable function if it is marked const/pure. */
1384
1385static bool
1386cdtor_p (cgraph_node *n, void *)
1387{
1388 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
cd6d518b
JJ
1389 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1390 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
17e0fc92
JH
1391 return false;
1392}
1393
d8e3e8a5
JH
1394/* We only propagate across edges with non-interposable callee. */
1395
1396static bool
1397ignore_edge_for_pure_const (struct cgraph_edge *e)
1398{
1399 enum availability avail;
cc950f98 1400 e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
d8e3e8a5
JH
1401 return (avail <= AVAIL_INTERPOSABLE);
1402}
1403
1404
15e80fc3
JH
1405/* Produce transitive closure over the callgraph and compute pure/const
1406 attributes. */
f10ea640 1407
17e0fc92 1408static bool
15e80fc3 1409propagate_pure_const (void)
812dbce5
JH
1410{
1411 struct cgraph_node *node;
1412 struct cgraph_node *w;
1413 struct cgraph_node **order =
3dafb85c 1414 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
812dbce5
JH
1415 int order_pos;
1416 int i;
1417 struct ipa_dfs_info * w_info;
17e0fc92 1418 bool remove_p = false;
6b715bf6 1419 bool has_cdtor;
812dbce5 1420
d8e3e8a5
JH
1421 order_pos = ipa_reduced_postorder (order, true, false,
1422 ignore_edge_for_pure_const);
ea900239
DB
1423 if (dump_file)
1424 {
d52f5295 1425 cgraph_node::dump_cgraph (dump_file);
40a7fe1e 1426 ipa_print_order (dump_file, "reduced", order, order_pos);
ea900239
DB
1427 }
1428
073a8998 1429 /* Propagate the local information through the call graph to produce
ea900239
DB
1430 the global information. All the nodes within a cycle will have
1431 the same info so we collapse cycles first. Then we can do the
1432 propagation in one pass from the leaves to the roots. */
1433 for (i = 0; i < order_pos; i++ )
1434 {
1435 enum pure_const_state_e pure_const_state = IPA_CONST;
becfd6e5 1436 bool looping = false;
17541d72 1437 int count = 0;
ea900239
DB
1438 node = order[i];
1439
67348ccc 1440 if (node->alias)
71fb4f92
JH
1441 continue;
1442
d56026c2
JH
1443 if (dump_file && (dump_flags & TDF_DETAILS))
1444 fprintf (dump_file, "Starting cycle\n");
1445
ea900239
DB
1446 /* Find the worst state for any node in the cycle. */
1447 w = node;
f10ea640 1448 while (w && pure_const_state != IPA_NEITHER)
ea900239 1449 {
33977f81 1450 struct cgraph_edge *e;
f10ea640
JH
1451 struct cgraph_edge *ie;
1452 int i;
d122681a 1453 struct ipa_ref *ref = NULL;
f10ea640 1454
36330f82 1455 funct_state w_l = funct_state_summaries->get_create (w);
d56026c2 1456 if (dump_file && (dump_flags & TDF_DETAILS))
464d0118
ML
1457 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1458 w->dump_name (),
d56026c2
JH
1459 pure_const_names[w_l->pure_const_state],
1460 w_l->looping);
ea900239 1461
cc950f98
JH
1462 /* First merge in function body properties.
1463 We are safe to pass NULL as FROM and TO because we will take care
1464 of possible interposition when walking callees. */
f10ea640 1465 worse_state (&pure_const_state, &looping,
cc950f98
JH
1466 w_l->pure_const_state, w_l->looping,
1467 NULL, NULL);
f10ea640
JH
1468 if (pure_const_state == IPA_NEITHER)
1469 break;
1470
33977f81
JH
1471 count++;
1472
f10ea640
JH
1473 /* We consider recursive cycles as possibly infinite.
1474 This might be relaxed since infinite recursion leads to stack
1475 overflow. */
33977f81
JH
1476 if (count > 1)
1477 looping = true;
b8698a0f 1478
f10ea640 1479 /* Now walk the edges and merge in callee properties. */
d8e3e8a5
JH
1480 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1481 e = e->next_callee)
ea900239 1482 {
fede8efa 1483 enum availability avail;
6cbde2e3 1484 struct cgraph_node *y = e->callee->
cc950f98
JH
1485 function_or_virtual_thunk_symbol (&avail,
1486 e->caller);
d56026c2
JH
1487 enum pure_const_state_e edge_state = IPA_CONST;
1488 bool edge_looping = false;
17541d72 1489
d56026c2
JH
1490 if (dump_file && (dump_flags & TDF_DETAILS))
1491 {
464d0118
ML
1492 fprintf (dump_file, " Call to %s",
1493 e->callee->dump_name ());
d56026c2 1494 }
d52f5295 1495 if (avail > AVAIL_INTERPOSABLE)
ea900239 1496 {
a756f161 1497 funct_state y_l = funct_state_summaries->get (y);
d56026c2
JH
1498 if (dump_file && (dump_flags & TDF_DETAILS))
1499 {
1500 fprintf (dump_file,
1501 " state:%s looping:%i\n",
1502 pure_const_names[y_l->pure_const_state],
1503 y_l->looping);
1504 }
da8c7675 1505 if (y_l->pure_const_state > IPA_PURE
3dafb85c 1506 && e->cannot_lead_to_return_p ())
d56026c2
JH
1507 {
1508 if (dump_file && (dump_flags & TDF_DETAILS))
f10ea640
JH
1509 fprintf (dump_file,
1510 " Ignoring side effects"
1511 " -> pure, looping\n");
d56026c2
JH
1512 edge_state = IPA_PURE;
1513 edge_looping = true;
1514 }
1515 else
1516 {
1517 edge_state = y_l->pure_const_state;
1518 edge_looping = y_l->looping;
1519 }
ea900239 1520 }
9e97ff61 1521 else if (special_builtin_state (&edge_state, &edge_looping,
67348ccc 1522 y->decl))
0a42aa4e 1523 ;
c59f5d1b 1524 else
f10ea640 1525 state_from_flags (&edge_state, &edge_looping,
67348ccc 1526 flags_from_decl_or_type (y->decl),
3dafb85c 1527 e->cannot_lead_to_return_p ());
f10ea640
JH
1528
1529 /* Merge the results with what we already know. */
1530 better_state (&edge_state, &edge_looping,
1531 w_l->state_previously_known,
1532 w_l->looping_previously_known);
1533 worse_state (&pure_const_state, &looping,
cc950f98 1534 edge_state, edge_looping, e->caller, e->callee);
f10ea640
JH
1535 if (pure_const_state == IPA_NEITHER)
1536 break;
1537 }
c59f5d1b 1538
f10ea640 1539 /* Now process the indirect call. */
d8e3e8a5
JH
1540 for (ie = w->indirect_calls;
1541 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
f10ea640
JH
1542 {
1543 enum pure_const_state_e edge_state = IPA_CONST;
1544 bool edge_looping = false;
da8c7675 1545
f10ea640
JH
1546 if (dump_file && (dump_flags & TDF_DETAILS))
1547 fprintf (dump_file, " Indirect call");
1548 state_from_flags (&edge_state, &edge_looping,
1549 ie->indirect_info->ecf_flags,
3dafb85c 1550 ie->cannot_lead_to_return_p ());
f10ea640
JH
1551 /* Merge the results with what we already know. */
1552 better_state (&edge_state, &edge_looping,
1553 w_l->state_previously_known,
1554 w_l->looping_previously_known);
1555 worse_state (&pure_const_state, &looping,
cc950f98 1556 edge_state, edge_looping, NULL, NULL);
d56026c2
JH
1557 if (pure_const_state == IPA_NEITHER)
1558 break;
ea900239 1559 }
f10ea640
JH
1560
1561 /* And finally all loads and stores. */
d8e3e8a5
JH
1562 for (i = 0; w->iterate_reference (i, ref)
1563 && pure_const_state != IPA_NEITHER; i++)
f10ea640
JH
1564 {
1565 enum pure_const_state_e ref_state = IPA_CONST;
1566 bool ref_looping = false;
1567 switch (ref->use)
1568 {
1569 case IPA_REF_LOAD:
1570 /* readonly reads are safe. */
d122681a 1571 if (TREE_READONLY (ref->referred->decl))
f10ea640
JH
1572 break;
1573 if (dump_file && (dump_flags & TDF_DETAILS))
1574 fprintf (dump_file, " nonreadonly global var read\n");
1575 ref_state = IPA_PURE;
1576 break;
1577 case IPA_REF_STORE:
d122681a 1578 if (ref->cannot_lead_to_return ())
f10ea640
JH
1579 break;
1580 ref_state = IPA_NEITHER;
1581 if (dump_file && (dump_flags & TDF_DETAILS))
1582 fprintf (dump_file, " global var write\n");
1583 break;
1584 case IPA_REF_ADDR:
1585 break;
7d2268ea
MJ
1586 default:
1587 gcc_unreachable ();
f10ea640
JH
1588 }
1589 better_state (&ref_state, &ref_looping,
1590 w_l->state_previously_known,
1591 w_l->looping_previously_known);
1592 worse_state (&pure_const_state, &looping,
cc950f98 1593 ref_state, ref_looping, NULL, NULL);
f10ea640
JH
1594 if (pure_const_state == IPA_NEITHER)
1595 break;
1596 }
67348ccc 1597 w_info = (struct ipa_dfs_info *) w->aux;
ea900239
DB
1598 w = w_info->next_cycle;
1599 }
d56026c2
JH
1600 if (dump_file && (dump_flags & TDF_DETAILS))
1601 fprintf (dump_file, "Result %s looping %i\n",
1602 pure_const_names [pure_const_state],
1603 looping);
ea900239 1604
8413ca87
JJ
1605 /* Find the worst state of can_free for any node in the cycle. */
1606 bool can_free = false;
1607 w = node;
1608 while (w && !can_free)
1609 {
1610 struct cgraph_edge *e;
a756f161 1611 funct_state w_l = funct_state_summaries->get (w);
8413ca87
JJ
1612
1613 if (w_l->can_free
1614 || w->get_availability () == AVAIL_INTERPOSABLE
1615 || w->indirect_calls)
1616 can_free = true;
1617
1618 for (e = w->callees; e && !can_free; e = e->next_callee)
1619 {
1620 enum availability avail;
6cbde2e3 1621 struct cgraph_node *y = e->callee->
cc950f98
JH
1622 function_or_virtual_thunk_symbol (&avail,
1623 e->caller);
8413ca87
JJ
1624
1625 if (avail > AVAIL_INTERPOSABLE)
a756f161 1626 can_free = funct_state_summaries->get (y)->can_free;
8413ca87
JJ
1627 else
1628 can_free = true;
1629 }
1630 w_info = (struct ipa_dfs_info *) w->aux;
1631 w = w_info->next_cycle;
1632 }
1633
ea900239
DB
1634 /* Copy back the region's pure_const_state which is shared by
1635 all nodes in the region. */
1636 w = node;
1637 while (w)
1638 {
a756f161 1639 funct_state w_l = funct_state_summaries->get (w);
33977f81
JH
1640 enum pure_const_state_e this_state = pure_const_state;
1641 bool this_looping = looping;
ea900239 1642
8413ca87
JJ
1643 w_l->can_free = can_free;
1644 w->nonfreeing_fn = !can_free;
1645 if (!can_free && dump_file)
1646 fprintf (dump_file, "Function found not to call free: %s\n",
1647 w->name ());
1648
f87c9042
JH
1649 if (w_l->state_previously_known != IPA_NEITHER
1650 && this_state > w_l->state_previously_known)
da8c7675
JH
1651 {
1652 this_state = w_l->state_previously_known;
d8e3e8a5
JH
1653 if (this_state == IPA_NEITHER)
1654 this_looping = w_l->looping_previously_known;
da8c7675 1655 }
03ec7d01
JH
1656 if (!this_looping && self_recursive_p (w))
1657 this_looping = true;
f87c9042
JH
1658 if (!w_l->looping_previously_known)
1659 this_looping = false;
812dbce5 1660
33977f81
JH
1661 /* All nodes within a cycle share the same info. */
1662 w_l->pure_const_state = this_state;
1663 w_l->looping = this_looping;
1664
d7636f56
JH
1665 /* Inline clones share declaration with their offline copies;
1666 do not modify their declarations since the offline copy may
1667 be different. */
1668 if (!w->global.inlined_to)
1669 switch (this_state)
1670 {
1671 case IPA_CONST:
1672 if (!TREE_READONLY (w->decl))
1673 {
1674 warn_function_const (w->decl, !this_looping);
1675 if (dump_file)
1676 fprintf (dump_file, "Function found to be %sconst: %s\n",
1677 this_looping ? "looping " : "",
1678 w->name ());
1679 }
6b715bf6
JH
1680 /* Turning constructor or destructor to non-looping const/pure
1681 enables us to possibly remove the function completely. */
1682 if (this_looping)
1683 has_cdtor = false;
1684 else
1685 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1686 NULL, true);
1687 if (w->set_const_flag (true, this_looping))
1688 {
1689 if (dump_file)
1690 fprintf (dump_file,
1691 "Declaration updated to be %sconst: %s\n",
1692 this_looping ? "looping " : "",
1693 w->name ());
1694 remove_p |= has_cdtor;
1695 }
d7636f56 1696 break;
b8698a0f 1697
d7636f56
JH
1698 case IPA_PURE:
1699 if (!DECL_PURE_P (w->decl))
1700 {
1701 warn_function_pure (w->decl, !this_looping);
1702 if (dump_file)
1703 fprintf (dump_file, "Function found to be %spure: %s\n",
1704 this_looping ? "looping " : "",
1705 w->name ());
1706 }
6b715bf6
JH
1707 if (this_looping)
1708 has_cdtor = false;
1709 else
1710 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1711 NULL, true);
1712 if (w->set_pure_flag (true, this_looping))
1713 {
1714 if (dump_file)
1715 fprintf (dump_file,
1716 "Declaration updated to be %spure: %s\n",
1717 this_looping ? "looping " : "",
1718 w->name ());
1719 remove_p |= has_cdtor;
1720 }
d7636f56 1721 break;
b8698a0f 1722
d7636f56
JH
1723 default:
1724 break;
1725 }
67348ccc 1726 w_info = (struct ipa_dfs_info *) w->aux;
2505c5ed
JH
1727 w = w_info->next_cycle;
1728 }
1729 }
1730
af8bca3c 1731 ipa_free_postorder_info ();
15e80fc3 1732 free (order);
17e0fc92 1733 return remove_p;
15e80fc3
JH
1734}
1735
1736/* Produce transitive closure over the callgraph and compute nothrow
1737 attributes. */
1738
1739static void
1740propagate_nothrow (void)
1741{
1742 struct cgraph_node *node;
1743 struct cgraph_node *w;
1744 struct cgraph_node **order =
3dafb85c 1745 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
15e80fc3
JH
1746 int order_pos;
1747 int i;
1748 struct ipa_dfs_info * w_info;
1749
9644e52a
JH
1750 order_pos = ipa_reduced_postorder (order, true, false,
1751 ignore_edge_for_nothrow);
2505c5ed
JH
1752 if (dump_file)
1753 {
d52f5295 1754 cgraph_node::dump_cgraph (dump_file);
af8bca3c 1755 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
2505c5ed 1756 }
15e80fc3 1757
073a8998 1758 /* Propagate the local information through the call graph to produce
2505c5ed
JH
1759 the global information. All the nodes within a cycle will have
1760 the same info so we collapse cycles first. Then we can do the
1761 propagation in one pass from the leaves to the roots. */
1762 for (i = 0; i < order_pos; i++ )
1763 {
1764 bool can_throw = false;
1765 node = order[i];
1766
67348ccc 1767 if (node->alias)
71fb4f92
JH
1768 continue;
1769
2505c5ed
JH
1770 /* Find the worst state for any node in the cycle. */
1771 w = node;
abb50207 1772 while (w && !can_throw)
2505c5ed 1773 {
f10ea640 1774 struct cgraph_edge *e, *ie;
2505c5ed 1775
9644e52a 1776 if (!TREE_NOTHROW (w->decl))
2505c5ed 1777 {
36330f82 1778 funct_state w_l = funct_state_summaries->get_create (w);
2505c5ed 1779
9644e52a
JH
1780 if (w_l->can_throw
1781 || w->get_availability () == AVAIL_INTERPOSABLE)
1782 can_throw = true;
1783
1784 for (e = w->callees; e && !can_throw; e = e->next_callee)
2505c5ed 1785 {
9644e52a
JH
1786 enum availability avail;
1787
1788 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1789 continue;
1790
1791 struct cgraph_node *y = e->callee->
a2b056a3
JH
1792 function_or_virtual_thunk_symbol (&avail,
1793 e->caller);
2505c5ed 1794
9644e52a 1795 /* We can use info about the callee only if we know it can
a2b056a3
JH
1796 not be interposed.
1797 When callee is compiled with non-call exceptions we also
1798 must check that the declaration is bound to current
1799 body as other semantically equivalent body may still
1800 throw. */
9644e52a
JH
1801 if (avail <= AVAIL_INTERPOSABLE
1802 || (!TREE_NOTHROW (y->decl)
36330f82 1803 && (funct_state_summaries->get_create (y)->can_throw
a2b056a3
JH
1804 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1805 && !e->callee->binds_to_current_def_p (w)))))
2505c5ed
JH
1806 can_throw = true;
1807 }
9644e52a
JH
1808 for (ie = w->indirect_calls; ie && !can_throw;
1809 ie = ie->next_callee)
1810 if (ie->can_throw_external
1811 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1812 can_throw = true;
2505c5ed 1813 }
67348ccc 1814 w_info = (struct ipa_dfs_info *) w->aux;
2505c5ed
JH
1815 w = w_info->next_cycle;
1816 }
1817
1818 /* Copy back the region's pure_const_state which is shared by
1819 all nodes in the region. */
1820 w = node;
1821 while (w)
1822 {
61c74e84 1823 funct_state w_l = funct_state_summaries->get_create (w);
67348ccc 1824 if (!can_throw && !TREE_NOTHROW (w->decl))
33977f81 1825 {
d7636f56
JH
1826 /* Inline clones share declaration with their offline copies;
1827 do not modify their declarations since the offline copy may
1828 be different. */
1829 if (!w->global.inlined_to)
1830 {
d52f5295 1831 w->set_nothrow_flag (true);
d7636f56
JH
1832 if (dump_file)
1833 fprintf (dump_file, "Function found to be nothrow: %s\n",
1834 w->name ());
1835 }
ea900239 1836 }
67348ccc 1837 else if (can_throw && !TREE_NOTHROW (w->decl))
b6fa5b01 1838 w_l->can_throw = true;
67348ccc 1839 w_info = (struct ipa_dfs_info *) w->aux;
ea900239
DB
1840 w = w_info->next_cycle;
1841 }
1842 }
1843
af8bca3c 1844 ipa_free_postorder_info ();
ea900239 1845 free (order);
15e80fc3
JH
1846}
1847
0fab169b
PK
1848/* Debugging function to dump state of malloc lattice. */
1849
1850DEBUG_FUNCTION
1851static void
1852dump_malloc_lattice (FILE *dump_file, const char *s)
1853{
1854 if (!dump_file)
1855 return;
1856
1857 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1858 cgraph_node *node;
1859 FOR_EACH_FUNCTION (node)
1860 {
67b3b8fe
ML
1861 funct_state fs = funct_state_summaries->get (node);
1862 if (fs)
1863 fprintf (dump_file, "%s: %s\n", node->name (),
1864 malloc_state_names[fs->malloc_state]);
0fab169b
PK
1865 }
1866}
1867
1868/* Propagate malloc attribute across the callgraph. */
1869
1870static void
1871propagate_malloc (void)
1872{
1873 cgraph_node *node;
1874 FOR_EACH_FUNCTION (node)
1875 {
1876 if (DECL_IS_MALLOC (node->decl))
36330f82 1877 if (!funct_state_summaries->exists (node))
0fab169b 1878 {
36330f82
ML
1879 funct_state fs = funct_state_summaries->get_create (node);
1880 fs->malloc_state = STATE_MALLOC;
0fab169b
PK
1881 }
1882 }
1883
1884 dump_malloc_lattice (dump_file, "Initial");
1885 struct cgraph_node **order
1886 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1887 int order_pos = ipa_reverse_postorder (order);
1888 bool changed = true;
1889
1890 while (changed)
1891 {
1892 changed = false;
1893 /* Walk in postorder. */
1894 for (int i = order_pos - 1; i >= 0; --i)
1895 {
1896 cgraph_node *node = order[i];
1897 if (node->alias
1898 || !node->definition
36330f82 1899 || !funct_state_summaries->exists (node))
0fab169b
PK
1900 continue;
1901
67b3b8fe 1902 funct_state l = funct_state_summaries->get (node);
0fab169b
PK
1903
1904 /* FIXME: add support for indirect-calls. */
1905 if (node->indirect_calls)
1906 {
1907 l->malloc_state = STATE_MALLOC_BOTTOM;
1908 continue;
1909 }
1910
1911 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1912 {
1913 l->malloc_state = STATE_MALLOC_BOTTOM;
1914 continue;
1915 }
1916
1917 if (l->malloc_state == STATE_MALLOC_BOTTOM)
1918 continue;
1919
1920 vec<cgraph_node *> callees = vNULL;
1921 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1922 {
99353fcf 1923 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
0fab169b
PK
1924 if (es && es->is_return_callee_uncaptured)
1925 callees.safe_push (cs->callee);
1926 }
1927
1928 malloc_state_e new_state = l->malloc_state;
1929 for (unsigned j = 0; j < callees.length (); j++)
1930 {
1931 cgraph_node *callee = callees[j];
36330f82 1932 if (!funct_state_summaries->exists (node))
0fab169b
PK
1933 {
1934 new_state = STATE_MALLOC_BOTTOM;
1935 break;
1936 }
36330f82
ML
1937 malloc_state_e callee_state
1938 = funct_state_summaries->get_create (callee)->malloc_state;
0fab169b
PK
1939 if (new_state < callee_state)
1940 new_state = callee_state;
1941 }
1942 if (new_state != l->malloc_state)
1943 {
1944 changed = true;
1945 l->malloc_state = new_state;
1946 }
1947 }
1948 }
1949
1950 FOR_EACH_DEFINED_FUNCTION (node)
36330f82 1951 if (funct_state_summaries->exists (node))
0fab169b 1952 {
67b3b8fe 1953 funct_state l = funct_state_summaries->get (node);
0fab169b
PK
1954 if (!node->alias
1955 && l->malloc_state == STATE_MALLOC
1956 && !node->global.inlined_to)
1957 {
1958 if (dump_file && (dump_flags & TDF_DETAILS))
1959 fprintf (dump_file, "Function %s found to be malloc\n",
1960 node->name ());
1961
1962 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1963 node->set_malloc_flag (true);
1964 if (!malloc_decl_p && warn_suggest_attribute_malloc)
1965 warn_function_malloc (node->decl);
1966 }
1967 }
1968
1969 dump_malloc_lattice (dump_file, "after propagation");
1970 ipa_free_postorder_info ();
1971 free (order);
1972}
15e80fc3
JH
1973
1974/* Produce the global information by preforming a transitive closure
1975 on the local information that was produced by generate_summary. */
1976
3edf64aa
DM
1977unsigned int
1978pass_ipa_pure_const::
1979execute (function *)
15e80fc3 1980{
17e0fc92 1981 bool remove_p;
15e80fc3 1982
15e80fc3
JH
1983 /* Nothrow makes more function to not lead to return and improve
1984 later analysis. */
1985 propagate_nothrow ();
0fab169b 1986 propagate_malloc ();
17e0fc92 1987 remove_p = propagate_pure_const ();
15e80fc3 1988
36330f82 1989 delete funct_state_summaries;
17e0fc92 1990 return remove_p ? TODO_remove_functions : 0;
ea900239
DB
1991}
1992
1993static bool
1994gate_pure_const (void)
1995{
2bf86c84 1996 return flag_ipa_pure_const || in_lto_p;
ea900239
DB
1997}
1998
3edf64aa
DM
1999pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2000 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2001 pure_const_generate_summary, /* generate_summary */
2002 pure_const_write_summary, /* write_summary */
2003 pure_const_read_summary, /* read_summary */
2004 NULL, /* write_optimization_summary */
2005 NULL, /* read_optimization_summary */
2006 NULL, /* stmt_fixup */
2007 0, /* function_transform_todo_flags_start */
2008 NULL, /* function_transform */
2009 NULL), /* variable_transform */
36330f82 2010 init_p (false) {}
27a4cd48
DM
2011
2012ipa_opt_pass_d *
2013make_pass_ipa_pure_const (gcc::context *ctxt)
2014{
2015 return new pass_ipa_pure_const (ctxt);
2016}
2017
5dc16b19 2018/* Return true if function should be skipped for local pure const analysis. */
33977f81 2019
5dc16b19
MLI
2020static bool
2021skip_function_for_local_pure_const (struct cgraph_node *node)
33977f81 2022{
cc950f98
JH
2023 /* Because we do not schedule pass_fixup_cfg over whole program after early
2024 optimizations we must not promote functions that are called by already
2025 processed functions. */
33977f81
JH
2026
2027 if (function_called_by_processed_nodes_p ())
2028 {
2029 if (dump_file)
2030 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
5dc16b19 2031 return true;
33977f81 2032 }
6b715bf6
JH
2033 /* Save some work and do not analyze functions which are interposable and
2034 do not have any non-interposable aliases. */
2035 if (node->get_availability () <= AVAIL_INTERPOSABLE
2036 && !node->has_aliases_p ())
33977f81
JH
2037 {
2038 if (dump_file)
cc950f98 2039 fprintf (dump_file,
6b715bf6 2040 "Function is interposable; not analyzing.\n");
5dc16b19 2041 return true;
33977f81 2042 }
5dc16b19
MLI
2043 return false;
2044}
2045
2046/* Simple local pass for pure const discovery reusing the analysis from
2047 ipa_pure_const. This pass is effective when executed together with
2048 other optimization passes in early optimization pass queue. */
33977f81 2049
17795822
TS
2050namespace {
2051
2052const pass_data pass_data_local_pure_const =
be55bfe6
TS
2053{
2054 GIMPLE_PASS, /* type */
2055 "local-pure-const", /* name */
2056 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
2057 TV_IPA_PURE_CONST, /* tv_id */
2058 0, /* properties_required */
2059 0, /* properties_provided */
2060 0, /* properties_destroyed */
2061 0, /* todo_flags_start */
2062 0, /* todo_flags_finish */
2063};
2064
17795822 2065class pass_local_pure_const : public gimple_opt_pass
be55bfe6
TS
2066{
2067public:
2068 pass_local_pure_const (gcc::context *ctxt)
2069 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2070 {}
2071
2072 /* opt_pass methods: */
2073 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
2074 virtual bool gate (function *) { return gate_pure_const (); }
2075 virtual unsigned int execute (function *);
2076
2077}; // class pass_local_pure_const
2078
2079unsigned int
2080pass_local_pure_const::execute (function *fun)
5dc16b19
MLI
2081{
2082 bool changed = false;
2083 funct_state l;
2084 bool skip;
2085 struct cgraph_node *node;
2086
d52f5295 2087 node = cgraph_node::get (current_function_decl);
5dc16b19 2088 skip = skip_function_for_local_pure_const (node);
12b9f3ac 2089
5dc16b19
MLI
2090 if (!warn_suggest_attribute_const
2091 && !warn_suggest_attribute_pure
2092 && skip)
2093 return 0;
73add7fe 2094
269c80f2
JJ
2095 l = analyze_function (node, false);
2096
2097 /* Do NORETURN discovery. */
73add7fe 2098 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
be55bfe6 2099 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
73add7fe 2100 {
be55bfe6 2101 warn_function_noreturn (fun->decl);
73add7fe 2102 if (dump_file)
be55bfe6
TS
2103 fprintf (dump_file, "Function found to be noreturn: %s\n",
2104 current_function_name ());
73add7fe
JH
2105
2106 /* Update declaration and reduce profile to executed once. */
2107 TREE_THIS_VOLATILE (current_function_decl) = 1;
2108 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
be55bfe6 2109 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
73add7fe
JH
2110
2111 changed = true;
2112 }
c59f5d1b 2113
33977f81
JH
2114 switch (l->pure_const_state)
2115 {
2116 case IPA_CONST:
2117 if (!TREE_READONLY (current_function_decl))
2118 {
5dc16b19 2119 warn_function_const (current_function_decl, !l->looping);
33977f81
JH
2120 if (dump_file)
2121 fprintf (dump_file, "Function found to be %sconst: %s\n",
2122 l->looping ? "looping " : "",
df92c640 2123 current_function_name ());
33977f81
JH
2124 }
2125 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2126 && !l->looping)
2127 {
33977f81
JH
2128 if (dump_file)
2129 fprintf (dump_file, "Function found to be non-looping: %s\n",
df92c640 2130 current_function_name ());
33977f81 2131 }
6b715bf6
JH
2132 if (!skip && node->set_const_flag (true, l->looping))
2133 {
2134 if (dump_file)
2135 fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2136 l->looping ? "looping " : "",
2137 current_function_name ());
2138 changed = true;
2139 }
33977f81
JH
2140 break;
2141
2142 case IPA_PURE:
5dc16b19 2143 if (!DECL_PURE_P (current_function_decl))
33977f81 2144 {
5dc16b19 2145 warn_function_pure (current_function_decl, !l->looping);
33977f81
JH
2146 if (dump_file)
2147 fprintf (dump_file, "Function found to be %spure: %s\n",
2148 l->looping ? "looping " : "",
df92c640 2149 current_function_name ());
33977f81
JH
2150 }
2151 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2152 && !l->looping)
2153 {
33977f81
JH
2154 if (dump_file)
2155 fprintf (dump_file, "Function found to be non-looping: %s\n",
df92c640 2156 current_function_name ());
33977f81 2157 }
6b715bf6
JH
2158 if (!skip && node->set_pure_flag (true, l->looping))
2159 {
2160 if (dump_file)
2161 fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2162 l->looping ? "looping " : "",
2163 current_function_name ());
2164 changed = true;
2165 }
33977f81
JH
2166 break;
2167
2168 default:
2169 break;
2170 }
2171 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2172 {
d52f5295 2173 node->set_nothrow_flag (true);
33977f81
JH
2174 changed = true;
2175 if (dump_file)
2176 fprintf (dump_file, "Function found to be nothrow: %s\n",
df92c640 2177 current_function_name ());
33977f81 2178 }
0fab169b
PK
2179
2180 if (l->malloc_state == STATE_MALLOC
2181 && !DECL_IS_MALLOC (current_function_decl))
2182 {
2183 node->set_malloc_flag (true);
2184 if (warn_suggest_attribute_malloc)
2185 warn_function_malloc (node->decl);
2186 changed = true;
2187 if (dump_file)
2188 fprintf (dump_file, "Function found to be malloc: %s\n",
2189 node->name ());
2190 }
2191
04695783 2192 free (l);
33977f81
JH
2193 if (changed)
2194 return execute_fixup_cfg ();
2195 else
2196 return 0;
2197}
2198
17795822
TS
2199} // anon namespace
2200
27a4cd48
DM
2201gimple_opt_pass *
2202make_pass_local_pure_const (gcc::context *ctxt)
2203{
2204 return new pass_local_pure_const (ctxt);
2205}
c1bf2a39
AM
2206
2207/* Emit noreturn warnings. */
2208
17795822
TS
2209namespace {
2210
2211const pass_data pass_data_warn_function_noreturn =
c1bf2a39
AM
2212{
2213 GIMPLE_PASS, /* type */
2214 "*warn_function_noreturn", /* name */
2215 OPTGROUP_NONE, /* optinfo_flags */
c1bf2a39
AM
2216 TV_NONE, /* tv_id */
2217 PROP_cfg, /* properties_required */
2218 0, /* properties_provided */
2219 0, /* properties_destroyed */
2220 0, /* todo_flags_start */
2221 0, /* todo_flags_finish */
2222};
2223
17795822 2224class pass_warn_function_noreturn : public gimple_opt_pass
c1bf2a39
AM
2225{
2226public:
2227 pass_warn_function_noreturn (gcc::context *ctxt)
2228 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2229 {}
2230
2231 /* opt_pass methods: */
1a3d085c 2232 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
be55bfe6
TS
2233 virtual unsigned int execute (function *fun)
2234 {
2235 if (!TREE_THIS_VOLATILE (current_function_decl)
2236 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2237 warn_function_noreturn (current_function_decl);
2238 return 0;
2239 }
c1bf2a39
AM
2240
2241}; // class pass_warn_function_noreturn
2242
17795822
TS
2243} // anon namespace
2244
c1bf2a39
AM
2245gimple_opt_pass *
2246make_pass_warn_function_noreturn (gcc::context *ctxt)
2247{
2248 return new pass_warn_function_noreturn (ctxt);
2249}
38147a2a
JH
2250
2251/* Simple local pass for pure const discovery reusing the analysis from
2252 ipa_pure_const. This pass is effective when executed together with
2253 other optimization passes in early optimization pass queue. */
2254
17795822
TS
2255namespace {
2256
2257const pass_data pass_data_nothrow =
38147a2a
JH
2258{
2259 GIMPLE_PASS, /* type */
2260 "nothrow", /* name */
2261 OPTGROUP_NONE, /* optinfo_flags */
2262 TV_IPA_PURE_CONST, /* tv_id */
2263 0, /* properties_required */
2264 0, /* properties_provided */
2265 0, /* properties_destroyed */
2266 0, /* todo_flags_start */
2267 0, /* todo_flags_finish */
2268};
2269
17795822 2270class pass_nothrow : public gimple_opt_pass
38147a2a
JH
2271{
2272public:
2273 pass_nothrow (gcc::context *ctxt)
2274 : gimple_opt_pass (pass_data_nothrow, ctxt)
2275 {}
2276
2277 /* opt_pass methods: */
2278 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
2279 virtual bool gate (function *) { return optimize; }
2280 virtual unsigned int execute (function *);
2281
2282}; // class pass_nothrow
2283
2284unsigned int
2285pass_nothrow::execute (function *)
2286{
2287 struct cgraph_node *node;
2288 basic_block this_block;
2289
2290 if (TREE_NOTHROW (current_function_decl))
2291 return 0;
2292
2293 node = cgraph_node::get (current_function_decl);
2294
67914693 2295 /* We run during lowering, we cannot really use availability yet. */
38147a2a
JH
2296 if (cgraph_node::get (current_function_decl)->get_availability ()
2297 <= AVAIL_INTERPOSABLE)
2298 {
2299 if (dump_file)
2300 fprintf (dump_file, "Function is interposable;"
2301 " not analyzing.\n");
2302 return true;
2303 }
2304
2305 FOR_EACH_BB_FN (this_block, cfun)
2306 {
2307 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2308 !gsi_end_p (gsi);
2309 gsi_next (&gsi))
36bbc05d 2310 if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
38147a2a
JH
2311 {
2312 if (is_gimple_call (gsi_stmt (gsi)))
2313 {
2314 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2315 if (callee_t && recursive_call_p (current_function_decl,
2316 callee_t))
2317 continue;
2318 }
2319
2320 if (dump_file)
2321 {
2322 fprintf (dump_file, "Statement can throw: ");
ef6cb4c7 2323 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
38147a2a
JH
2324 }
2325 return 0;
2326 }
2327 }
2328
2329 node->set_nothrow_flag (true);
d93c452f
JJ
2330
2331 bool cfg_changed = false;
2332 if (self_recursive_p (node))
2333 FOR_EACH_BB_FN (this_block, cfun)
2334 if (gimple *g = last_stmt (this_block))
2335 if (is_gimple_call (g))
2336 {
2337 tree callee_t = gimple_call_fndecl (g);
2338 if (callee_t
2339 && recursive_call_p (current_function_decl, callee_t)
2340 && maybe_clean_eh_stmt (g)
2341 && gimple_purge_dead_eh_edges (this_block))
2342 cfg_changed = true;
2343 }
2344
38147a2a
JH
2345 if (dump_file)
2346 fprintf (dump_file, "Function found to be nothrow: %s\n",
2347 current_function_name ());
d93c452f 2348 return cfg_changed ? TODO_cleanup_cfg : 0;
38147a2a
JH
2349}
2350
17795822
TS
2351} // anon namespace
2352
38147a2a
JH
2353gimple_opt_pass *
2354make_pass_nothrow (gcc::context *ctxt)
2355{
2356 return new pass_nothrow (ctxt);
2357}