]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-pure-const.cc
aarch64: Avoid using mismatched ZERO ZA sizes
[thirdparty/gcc.git] / gcc / ipa-pure-const.cc
CommitLineData
ea900239 1/* Callgraph based analysis of static variables.
a945c346 2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
ea900239
DB
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9dcd6f09 9Software Foundation; either version 3, or (at your option) any later
ea900239
DB
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
ea900239 20
fa10beec
RW
21/* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
ea900239
DB
24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30/* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34#include "config.h"
35#include "system.h"
36#include "coretypes.h"
c7131fb2 37#include "backend.h"
957060b5 38#include "target.h"
ea900239 39#include "tree.h"
c7131fb2 40#include "gimple.h"
957060b5
AM
41#include "tree-pass.h"
42#include "tree-streamer.h"
43#include "cgraph.h"
44#include "diagnostic.h"
d8a2d370 45#include "calls.h"
60393bbc 46#include "cfganal.h"
2fb9a547 47#include "tree-eh.h"
5be5c238
AM
48#include "gimple-iterator.h"
49#include "gimple-walk.h"
442b4905 50#include "tree-cfg.h"
e28030cf 51#include "tree-ssa-loop-niter.h"
ea900239 52#include "langhooks.h"
ea900239 53#include "ipa-utils.h"
cf835838 54#include "gimple-pretty-print.h"
2de58650
JH
55#include "cfgloop.h"
56#include "tree-scalar-evolution.h"
5dc16b19
MLI
57#include "intl.h"
58#include "opts.h"
0fab169b
PK
59#include "ssa.h"
60#include "alloc-pool.h"
61#include "symbol-summary.h"
c8742849
MJ
62#include "sreal.h"
63#include "ipa-cp.h"
0fab169b
PK
64#include "ipa-prop.h"
65#include "ipa-fnsummary.h"
67f3791f 66#include "symtab-thunks.h"
6471396d 67#include "dbgcnt.h"
ea900239
DB
68
69/* Lattice values for const and pure functions. Everything starts out
70 being const, then may drop to pure and then neither depending on
71 what is found. */
72enum pure_const_state_e
73{
74 IPA_CONST,
75 IPA_PURE,
76 IPA_NEITHER
77};
78
0fab169b
PK
79static const char *pure_const_names[3] = {"const", "pure", "neither"};
80
81enum malloc_state_e
82{
83 STATE_MALLOC_TOP,
84 STATE_MALLOC,
85 STATE_MALLOC_BOTTOM
86};
87
88static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
d56026c2 89
812dbce5
JH
90/* Holder for the const_state. There is one of these per function
91 decl. */
36330f82 92class funct_state_d
ea900239 93{
36330f82
ML
94public:
95 funct_state_d (): pure_const_state (IPA_NEITHER),
96 state_previously_known (IPA_NEITHER), looping_previously_known (true),
97 looping (true), can_throw (true), can_free (true),
98 malloc_state (STATE_MALLOC_BOTTOM) {}
99
100 funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
101 state_previously_known (s.state_previously_known),
102 looping_previously_known (s.looping_previously_known),
103 looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
104 malloc_state (s.malloc_state) {}
105
812dbce5 106 /* See above. */
ea900239 107 enum pure_const_state_e pure_const_state;
33977f81 108 /* What user set here; we can be always sure about this. */
b8698a0f
L
109 enum pure_const_state_e state_previously_known;
110 bool looping_previously_known;
812dbce5
JH
111
112 /* True if the function could possibly infinite loop. There are a
113 lot of ways that this could be determined. We are pretty
114 conservative here. While it is possible to cse pure and const
115 calls, it is not legal to have dce get rid of the call if there
116 is a possibility that the call could infinite loop since this is
117 a behavioral change. */
becfd6e5 118 bool looping;
812dbce5 119
33977f81 120 bool can_throw;
8413ca87
JJ
121
122 /* If function can call free, munmap or otherwise make previously
123 non-trapping memory accesses trapping. */
124 bool can_free;
0fab169b
PK
125
126 enum malloc_state_e malloc_state;
ea900239
DB
127};
128
99b1c316 129typedef class funct_state_d * funct_state;
ea900239 130
812dbce5
JH
131/* The storage of the funct_state is abstracted because there is the
132 possibility that it may be desirable to move this to the cgraph
b8698a0f 133 local info. */
812dbce5 134
db30281f
ML
135class funct_state_summary_t:
136 public fast_function_summary <funct_state_d *, va_heap>
36330f82
ML
137{
138public:
139 funct_state_summary_t (symbol_table *symtab):
db30281f 140 fast_function_summary <funct_state_d *, va_heap> (symtab) {}
36330f82 141
f31ba116
DM
142 void insert (cgraph_node *, funct_state_d *state) final override;
143 void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
144 funct_state_d *src_data,
145 funct_state_d *dst_data) final override;
36330f82 146};
812dbce5 147
36330f82 148static funct_state_summary_t *funct_state_summaries = NULL;
812dbce5 149
3edf64aa
DM
150static bool gate_pure_const (void);
151
17795822
TS
152namespace {
153
154const pass_data pass_data_ipa_pure_const =
3edf64aa
DM
155{
156 IPA_PASS, /* type */
157 "pure-const", /* name */
158 OPTGROUP_NONE, /* optinfo_flags */
159 TV_IPA_PURE_CONST, /* tv_id */
160 0, /* properties_required */
161 0, /* properties_provided */
162 0, /* properties_destroyed */
163 0, /* todo_flags_start */
164 0, /* todo_flags_finish */
165};
166
17795822 167class pass_ipa_pure_const : public ipa_opt_pass_d
3edf64aa
DM
168{
169public:
170 pass_ipa_pure_const(gcc::context *ctxt);
171
172 /* opt_pass methods: */
725793af
DM
173 bool gate (function *) final override { return gate_pure_const (); }
174 unsigned int execute (function *fun) final override;
3edf64aa
DM
175
176 void register_hooks (void);
177
178private:
179 bool init_p;
3edf64aa
DM
180}; // class pass_ipa_pure_const
181
17795822
TS
182} // anon namespace
183
5dc16b19
MLI
184/* Try to guess if function body will always be visible to compiler
185 when compiling the call and whether compiler will be able
186 to propagate the information by itself. */
187
188static bool
189function_always_visible_to_compiler_p (tree decl)
190{
12b9f3ac
JH
191 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
192 || DECL_COMDAT (decl));
5dc16b19
MLI
193}
194
195/* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
196 is true if the function is known to be finite. The diagnostic is
6e2830c3 197 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
5dc16b19
MLI
198 OPTION, this function may initialize it and it is always returned
199 by the function. */
200
6e2830c3 201static hash_set<tree> *
5dc16b19 202suggest_attribute (int option, tree decl, bool known_finite,
6e2830c3 203 hash_set<tree> *warned_about,
5dc16b19
MLI
204 const char * attrib_name)
205{
fa5baeed 206 if (!option_enabled (option, lang_hooks.option_lang_mask (), &global_options))
5dc16b19
MLI
207 return warned_about;
208 if (TREE_THIS_VOLATILE (decl)
209 || (known_finite && function_always_visible_to_compiler_p (decl)))
210 return warned_about;
211
212 if (!warned_about)
6e2830c3
TS
213 warned_about = new hash_set<tree>;
214 if (warned_about->contains (decl))
5dc16b19 215 return warned_about;
6e2830c3 216 warned_about->add (decl);
5dc16b19
MLI
217 warning_at (DECL_SOURCE_LOCATION (decl),
218 option,
219 known_finite
2f6f187a
DM
220 ? G_("function might be candidate for attribute %qs")
221 : G_("function might be candidate for attribute %qs"
222 " if it is known to return normally"), attrib_name);
5dc16b19
MLI
223 return warned_about;
224}
225
226/* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
227 is true if the function is known to be finite. */
228
229static void
230warn_function_pure (tree decl, bool known_finite)
231{
a594cff3
MS
232 /* Declaring a void function pure makes no sense and is diagnosed
233 by -Wattributes because calling it would have no effect. */
234 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
235 return;
5dc16b19 236
a594cff3
MS
237 static hash_set<tree> *warned_about;
238 warned_about
5dc16b19
MLI
239 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
240 known_finite, warned_about, "pure");
241}
242
243/* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
244 is true if the function is known to be finite. */
245
246static void
247warn_function_const (tree decl, bool known_finite)
248{
a594cff3
MS
249 /* Declaring a void function const makes no sense is diagnosed
250 by -Wattributes because calling it would have no effect. */
251 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
252 return;
253
6e2830c3 254 static hash_set<tree> *warned_about;
a594cff3 255 warned_about
5dc16b19
MLI
256 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
257 known_finite, warned_about, "const");
258}
7ea6b6cf 259
0fab169b
PK
260/* Emit suggestion about __attribute__((malloc)) for DECL. */
261
262static void
263warn_function_malloc (tree decl)
264{
265 static hash_set<tree> *warned_about;
266 warned_about
267 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
0fac5f2a 268 true, warned_about, "malloc");
0fab169b
PK
269}
270
271/* Emit suggestion about __attribute__((noreturn)) for DECL. */
272
c1bf2a39 273static void
7ea6b6cf
JH
274warn_function_noreturn (tree decl)
275{
a48018b5
ML
276 tree original_decl = decl;
277
6e2830c3 278 static hash_set<tree> *warned_about;
d45eae79
SL
279 if (!lang_hooks.missing_noreturn_ok_p (decl)
280 && targetm.warn_func_return (decl))
494bdadf 281 warned_about
a48018b5 282 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
7ea6b6cf
JH
283 true, warned_about, "noreturn");
284}
df92c640 285
12b9f3ac
JH
286void
287warn_function_cold (tree decl)
288{
289 tree original_decl = decl;
290
12b9f3ac 291 static hash_set<tree> *warned_about;
494bdadf 292 warned_about
12b9f3ac
JH
293 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
294 true, warned_about, "cold");
295}
296
53ba8d66
JH
297void
298warn_function_returns_nonnull (tree decl)
299{
300 static hash_set<tree> *warned_about;
301 warned_about
302 = suggest_attribute (OPT_Wsuggest_attribute_returns_nonnull, decl,
303 true, warned_about, "returns_nonnull");
304}
305
fa10beec 306/* Check to see if the use (or definition when CHECKING_WRITE is true)
ea900239
DB
307 variable T is legal in a function that is either pure or const. */
308
b8698a0f
L
309static inline void
310check_decl (funct_state local,
f10ea640 311 tree t, bool checking_write, bool ipa)
ea900239 312{
ea900239
DB
313 /* Do not want to do anything with volatile except mark any
314 function that uses one to be not const or pure. */
b8698a0f
L
315 if (TREE_THIS_VOLATILE (t))
316 {
ea900239 317 local->pure_const_state = IPA_NEITHER;
33977f81 318 if (dump_file)
75622c9e 319 fprintf (dump_file, " Volatile operand is not const/pure\n");
ea900239
DB
320 return;
321 }
322
323 /* Do not care about a local automatic that is not static. */
324 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
325 return;
326
33977f81
JH
327 /* If the variable has the "used" attribute, treat it as if it had a
328 been touched by the devil. */
b42186f1 329 if (DECL_PRESERVE_P (t))
33977f81
JH
330 {
331 local->pure_const_state = IPA_NEITHER;
332 if (dump_file)
333 fprintf (dump_file, " Used static/global variable is not const/pure\n");
334 return;
335 }
336
f10ea640
JH
337 /* In IPA mode we are not interested in checking actual loads and stores;
338 they will be processed at propagation time using ipa_ref. */
339 if (ipa)
340 return;
341
ea900239
DB
342 /* Since we have dealt with the locals and params cases above, if we
343 are CHECKING_WRITE, this cannot be a pure or constant
344 function. */
b8698a0f 345 if (checking_write)
eb80272a
ST
346 {
347 local->pure_const_state = IPA_NEITHER;
33977f81
JH
348 if (dump_file)
349 fprintf (dump_file, " static/global memory write is not const/pure\n");
eb80272a
ST
350 return;
351 }
ea900239
DB
352
353 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
354 {
33977f81 355 /* Readonly reads are safe. */
bd78c6d5 356 if (TREE_READONLY (t))
33977f81 357 return; /* Read of a constant, do not change the function state. */
b8698a0f 358 else
ea900239 359 {
33977f81
JH
360 if (dump_file)
361 fprintf (dump_file, " global memory read is not const\n");
ea900239
DB
362 /* Just a regular read. */
363 if (local->pure_const_state == IPA_CONST)
364 local->pure_const_state = IPA_PURE;
365 }
366 }
33977f81
JH
367 else
368 {
369 /* Compilation level statics can be read if they are readonly
370 variables. */
371 if (TREE_READONLY (t))
372 return;
373
374 if (dump_file)
375 fprintf (dump_file, " static memory read is not const\n");
376 /* Just a regular read. */
377 if (local->pure_const_state == IPA_CONST)
378 local->pure_const_state = IPA_PURE;
379 }
ea900239
DB
380}
381
ea900239 382
33977f81
JH
383/* Check to see if the use (or definition when CHECKING_WRITE is true)
384 variable T is legal in a function that is either pure or const. */
ea900239 385
b8698a0f 386static inline void
346ef3fa 387check_op (funct_state local, tree t, bool checking_write)
ea900239 388{
3c1832c3
JH
389 t = get_base_address (t);
390 if (t && TREE_THIS_VOLATILE (t))
ea900239 391 {
346ef3fa
RG
392 local->pure_const_state = IPA_NEITHER;
393 if (dump_file)
394 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
395 return;
396 }
e977dd5e 397 else if (refs_local_or_readonly_memory_p (t))
3c1832c3
JH
398 {
399 if (dump_file)
e977dd5e
JH
400 fprintf (dump_file, " Indirect ref to local or readonly "
401 "memory is OK\n");
3c1832c3
JH
402 return;
403 }
346ef3fa
RG
404 else if (checking_write)
405 {
406 local->pure_const_state = IPA_NEITHER;
407 if (dump_file)
408 fprintf (dump_file, " Indirect ref write is not const/pure\n");
409 return;
410 }
411 else
412 {
413 if (dump_file)
414 fprintf (dump_file, " Indirect ref read is not const\n");
415 if (local->pure_const_state == IPA_CONST)
416 local->pure_const_state = IPA_PURE;
ea900239
DB
417 }
418}
419
f10ea640
JH
420/* compute state based on ECF FLAGS and store to STATE and LOOPING. */
421
422static void
423state_from_flags (enum pure_const_state_e *state, bool *looping,
424 int flags, bool cannot_lead_to_return)
425{
426 *looping = false;
427 if (flags & ECF_LOOPING_CONST_OR_PURE)
428 {
429 *looping = true;
430 if (dump_file && (dump_flags & TDF_DETAILS))
75622c9e 431 fprintf (dump_file, " looping\n");
f10ea640
JH
432 }
433 if (flags & ECF_CONST)
434 {
435 *state = IPA_CONST;
436 if (dump_file && (dump_flags & TDF_DETAILS))
437 fprintf (dump_file, " const\n");
438 }
439 else if (flags & ECF_PURE)
440 {
441 *state = IPA_PURE;
442 if (dump_file && (dump_flags & TDF_DETAILS))
443 fprintf (dump_file, " pure\n");
444 }
445 else if (cannot_lead_to_return)
446 {
447 *state = IPA_PURE;
448 *looping = true;
449 if (dump_file && (dump_flags & TDF_DETAILS))
450 fprintf (dump_file, " ignoring side effects->pure looping\n");
451 }
452 else
453 {
454 if (dump_file && (dump_flags & TDF_DETAILS))
df92c640 455 fprintf (dump_file, " neither\n");
f10ea640
JH
456 *state = IPA_NEITHER;
457 *looping = true;
458 }
459}
460
461/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
462 into STATE and LOOPING better of the two variants.
463 Be sure to merge looping correctly. IPA_NEITHER functions
464 have looping 0 even if they don't have to return. */
465
466static inline void
467better_state (enum pure_const_state_e *state, bool *looping,
468 enum pure_const_state_e state2, bool looping2)
469{
470 if (state2 < *state)
471 {
472 if (*state == IPA_NEITHER)
473 *looping = looping2;
474 else
475 *looping = MIN (*looping, looping2);
b0d04a5f 476 *state = state2;
f10ea640
JH
477 }
478 else if (state2 != IPA_NEITHER)
479 *looping = MIN (*looping, looping2);
480}
481
482/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
cc950f98
JH
483 into STATE and LOOPING worse of the two variants.
484 N is the actual node called. */
f10ea640
JH
485
486static inline void
487worse_state (enum pure_const_state_e *state, bool *looping,
cc950f98
JH
488 enum pure_const_state_e state2, bool looping2,
489 struct symtab_node *from,
490 struct symtab_node *to)
f10ea640 491{
cc950f98
JH
492 /* Consider function:
493
494 bool a(int *p)
495 {
496 return *p==*p;
497 }
498
499 During early optimization we will turn this into:
500
501 bool a(int *p)
502 {
503 return true;
504 }
505
506 Now if this function will be detected as CONST however when interposed it
507 may end up being just pure. We always must assume the worst scenario here.
508 */
509 if (*state == IPA_CONST && state2 == IPA_CONST
510 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
511 {
512 if (dump_file && (dump_flags & TDF_DETAILS))
513 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
3629ff8a 514 "bind to current def.\n", to->dump_name ());
cc950f98
JH
515 state2 = IPA_PURE;
516 }
f10ea640
JH
517 *state = MAX (*state, state2);
518 *looping = MAX (*looping, looping2);
519}
520
992644c3 521/* Recognize special cases of builtins that are by themselves not const
0a42aa4e 522 but function using them is. */
992644c3
JH
523bool
524builtin_safe_for_const_function_p (bool *looping, tree callee)
0a42aa4e
JH
525{
526 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
527 switch (DECL_FUNCTION_CODE (callee))
528 {
813b70a1
JJ
529 case BUILT_IN_RETURN:
530 case BUILT_IN_UNREACHABLE:
531 CASE_BUILT_IN_ALLOCA:
532 case BUILT_IN_STACK_SAVE:
533 case BUILT_IN_STACK_RESTORE:
534 case BUILT_IN_EH_POINTER:
535 case BUILT_IN_EH_FILTER:
536 case BUILT_IN_UNWIND_RESUME:
537 case BUILT_IN_CXA_END_CLEANUP:
538 case BUILT_IN_EH_COPY_VALUES:
539 case BUILT_IN_FRAME_ADDRESS:
540 case BUILT_IN_APPLY_ARGS:
541 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
542 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
33a7a632
JH
543 case BUILT_IN_DWARF_CFA:
544 case BUILT_IN_RETURN_ADDRESS:
813b70a1 545 *looping = false;
813b70a1
JJ
546 return true;
547 case BUILT_IN_PREFETCH:
548 *looping = true;
813b70a1
JJ
549 return true;
550 default:
551 break;
0a42aa4e
JH
552 }
553 return false;
554}
555
ea900239
DB
556/* Check the parameters of a function call to CALL_EXPR to see if
557 there are any references in the parameters that are not allowed for
558 pure or const functions. Also check to see if this is either an
559 indirect call, a call outside the compilation unit, or has special
560 attributes that may also effect the purity. The CALL_EXPR node for
561 the entire call expression. */
562
563static void
538dd0b7 564check_call (funct_state local, gcall *call, bool ipa)
ea900239 565{
726a989a 566 int flags = gimple_call_flags (call);
33977f81 567 tree callee_t = gimple_call_fndecl (call);
36bbc05d 568 bool possibly_throws = stmt_could_throw_p (cfun, call);
33977f81 569 bool possibly_throws_externally = (possibly_throws
36bbc05d 570 && stmt_can_throw_external (cfun, call));
ea900239 571
33977f81
JH
572 if (possibly_throws)
573 {
574 unsigned int i;
575 for (i = 0; i < gimple_num_ops (call); i++)
576 if (gimple_op (call, i)
577 && tree_could_throw_p (gimple_op (call, i)))
578 {
8f4f502f 579 if (possibly_throws && cfun->can_throw_non_call_exceptions)
33977f81
JH
580 {
581 if (dump_file)
582 fprintf (dump_file, " operand can throw; looping\n");
583 local->looping = true;
584 }
585 if (possibly_throws_externally)
586 {
587 if (dump_file)
588 fprintf (dump_file, " operand can throw externally\n");
589 local->can_throw = true;
590 }
591 }
592 }
b8698a0f 593
ea900239
DB
594 /* The const and pure flags are set by a variety of places in the
595 compiler (including here). If someone has already set the flags
596 for the callee, (such as for some of the builtins) we will use
b8698a0f
L
597 them, otherwise we will compute our own information.
598
ea900239
DB
599 Const and pure functions have less clobber effects than other
600 functions so we process these first. Otherwise if it is a call
601 outside the compilation unit or an indirect call we punt. This
602 leaves local calls which will be processed by following the call
b8698a0f 603 graph. */
ea900239
DB
604 if (callee_t)
605 {
0a42aa4e
JH
606 bool call_looping;
607
8413ca87
JJ
608 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
609 && !nonfreeing_call_p (call))
610 local->can_free = true;
611
992644c3 612 if (builtin_safe_for_const_function_p (&call_looping, callee_t))
0a42aa4e
JH
613 {
614 worse_state (&local->pure_const_state, &local->looping,
992644c3 615 IPA_CONST, call_looping,
cc950f98 616 NULL, NULL);
0a42aa4e
JH
617 return;
618 }
ea900239
DB
619 /* When bad things happen to bad functions, they cannot be const
620 or pure. */
621 if (setjmp_call_p (callee_t))
becfd6e5 622 {
33977f81
JH
623 if (dump_file)
624 fprintf (dump_file, " setjmp is not const/pure\n");
625 local->looping = true;
becfd6e5 626 local->pure_const_state = IPA_NEITHER;
becfd6e5 627 }
ea900239
DB
628
629 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
630 switch (DECL_FUNCTION_CODE (callee_t))
631 {
632 case BUILT_IN_LONGJMP:
633 case BUILT_IN_NONLOCAL_GOTO:
33977f81 634 if (dump_file)
813b70a1
JJ
635 fprintf (dump_file,
636 " longjmp and nonlocal goto is not const/pure\n");
ea900239 637 local->pure_const_state = IPA_NEITHER;
813b70a1 638 local->looping = true;
ea900239
DB
639 break;
640 default:
641 break;
642 }
643 }
8413ca87
JJ
644 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
645 local->can_free = true;
ea900239 646
33977f81 647 /* When not in IPA mode, we can still handle self recursion. */
fc11f321
JH
648 if (!ipa && callee_t
649 && recursive_call_p (current_function_decl, callee_t))
5dc16b19
MLI
650 {
651 if (dump_file)
652 fprintf (dump_file, " Recursive call can loop.\n");
653 local->looping = true;
654 }
61502ca8 655 /* Either callee is unknown or we are doing local analysis.
c59f5d1b
JH
656 Look to see if there are any bits available for the callee (such as by
657 declaration or because it is builtin) and process solely on the basis of
d40790c8
JJ
658 those bits. Handle internal calls always, those calls don't have
659 corresponding cgraph edges and thus aren't processed during
660 the propagation. */
661 else if (!ipa || gimple_call_internal_p (call))
ea900239 662 {
f10ea640
JH
663 enum pure_const_state_e call_state;
664 bool call_looping;
8f4f502f 665 if (possibly_throws && cfun->can_throw_non_call_exceptions)
33977f81
JH
666 {
667 if (dump_file)
668 fprintf (dump_file, " can throw; looping\n");
669 local->looping = true;
670 }
671 if (possibly_throws_externally)
672 {
673 if (dump_file)
674 {
1d65f45c
RH
675 fprintf (dump_file, " can throw externally to lp %i\n",
676 lookup_stmt_eh_lp (call));
33977f81
JH
677 if (callee_t)
678 fprintf (dump_file, " callee:%s\n",
679 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
680 }
681 local->can_throw = true;
682 }
f10ea640
JH
683 if (dump_file && (dump_flags & TDF_DETAILS))
684 fprintf (dump_file, " checking flags for call:");
685 state_from_flags (&call_state, &call_looping, flags,
686 ((flags & (ECF_NORETURN | ECF_NOTHROW))
687 == (ECF_NORETURN | ECF_NOTHROW))
688 || (!flag_exceptions && (flags & ECF_NORETURN)));
689 worse_state (&local->pure_const_state, &local->looping,
cc950f98 690 call_state, call_looping, NULL, NULL);
ea900239 691 }
33977f81 692 /* Direct functions calls are handled by IPA propagation. */
ea900239
DB
693}
694
f10ea640 695/* Wrapper around check_decl for loads in local more. */
346ef3fa
RG
696
697static bool
355fe088 698check_load (gimple *, tree op, tree, void *data)
346ef3fa
RG
699{
700 if (DECL_P (op))
f10ea640 701 check_decl ((funct_state)data, op, false, false);
346ef3fa
RG
702 else
703 check_op ((funct_state)data, op, false);
704 return false;
705}
706
f10ea640 707/* Wrapper around check_decl for stores in local more. */
346ef3fa
RG
708
709static bool
355fe088 710check_store (gimple *, tree op, tree, void *data)
346ef3fa
RG
711{
712 if (DECL_P (op))
f10ea640
JH
713 check_decl ((funct_state)data, op, true, false);
714 else
715 check_op ((funct_state)data, op, true);
716 return false;
717}
718
719/* Wrapper around check_decl for loads in ipa mode. */
720
721static bool
355fe088 722check_ipa_load (gimple *, tree op, tree, void *data)
f10ea640
JH
723{
724 if (DECL_P (op))
725 check_decl ((funct_state)data, op, false, true);
726 else
727 check_op ((funct_state)data, op, false);
728 return false;
729}
730
731/* Wrapper around check_decl for stores in ipa mode. */
732
733static bool
355fe088 734check_ipa_store (gimple *, tree op, tree, void *data)
f10ea640
JH
735{
736 if (DECL_P (op))
737 check_decl ((funct_state)data, op, true, true);
346ef3fa
RG
738 else
739 check_op ((funct_state)data, op, true);
740 return false;
741}
742
5006671f
RG
743/* Look into pointer pointed to by GSIP and figure out what interesting side
744 effects it has. */
33977f81
JH
745static void
746check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
ea900239 747{
355fe088 748 gimple *stmt = gsi_stmt (*gsip);
ea900239 749
b5b8b0ac
AO
750 if (is_gimple_debug (stmt))
751 return;
752
38147a2a
JH
753 /* Do consider clobber as side effects before IPA, so we rather inline
754 C++ destructors and keep clobber semantics than eliminate them.
755
3991912e
JH
756 Similar logic is in ipa-modref.
757
38147a2a
JH
758 TODO: We may get smarter during early optimizations on these and let
759 functions containing only clobbers to be optimized more. This is a common
760 case of C++ destructors. */
761
762 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
763 return;
764
33977f81 765 if (dump_file)
ea900239 766 {
33977f81 767 fprintf (dump_file, " scanning: ");
ef6cb4c7 768 print_gimple_stmt (dump_file, stmt, 0);
33977f81 769 }
5006671f 770
ace018f9
RG
771 if (gimple_has_volatile_ops (stmt)
772 && !gimple_clobber_p (stmt))
4e89a3fa
RG
773 {
774 local->pure_const_state = IPA_NEITHER;
775 if (dump_file)
776 fprintf (dump_file, " Volatile stmt is not const/pure\n");
777 }
778
346ef3fa 779 /* Look for loads and stores. */
f10ea640
JH
780 walk_stmt_load_store_ops (stmt, local,
781 ipa ? check_ipa_load : check_load,
782 ipa ? check_ipa_store : check_store);
33977f81
JH
783
784 if (gimple_code (stmt) != GIMPLE_CALL
36bbc05d 785 && stmt_could_throw_p (cfun, stmt))
33977f81 786 {
8f4f502f 787 if (cfun->can_throw_non_call_exceptions)
33977f81
JH
788 {
789 if (dump_file)
425b784f 790 fprintf (dump_file, " can throw; looping\n");
33977f81
JH
791 local->looping = true;
792 }
36bbc05d 793 if (stmt_can_throw_external (cfun, stmt))
33977f81
JH
794 {
795 if (dump_file)
425b784f 796 fprintf (dump_file, " can throw externally\n");
33977f81
JH
797 local->can_throw = true;
798 }
425b784f
JH
799 else
800 if (dump_file)
801 fprintf (dump_file, " can throw\n");
726a989a 802 }
726a989a
RB
803 switch (gimple_code (stmt))
804 {
33977f81 805 case GIMPLE_CALL:
538dd0b7 806 check_call (local, as_a <gcall *> (stmt), ipa);
ea900239 807 break;
726a989a 808 case GIMPLE_LABEL:
538dd0b7 809 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
ea900239 810 /* Target of long jump. */
becfd6e5 811 {
33977f81 812 if (dump_file)
425b784f 813 fprintf (dump_file, " nonlocal label is not const/pure\n");
becfd6e5 814 local->pure_const_state = IPA_NEITHER;
becfd6e5 815 }
ea900239 816 break;
726a989a 817 case GIMPLE_ASM:
538dd0b7 818 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
33977f81 819 {
edcdea5b 820 if (dump_file)
425b784f 821 fprintf (dump_file, " memory asm clobber is not const/pure\n");
edcdea5b
NF
822 /* Abandon all hope, ye who enter here. */
823 local->pure_const_state = IPA_NEITHER;
8413ca87 824 local->can_free = true;
33977f81 825 }
538dd0b7 826 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
33977f81
JH
827 {
828 if (dump_file)
425b784f 829 fprintf (dump_file, " volatile is not const/pure\n");
33977f81
JH
830 /* Abandon all hope, ye who enter here. */
831 local->pure_const_state = IPA_NEITHER;
8413ca87
JJ
832 local->looping = true;
833 local->can_free = true;
33977f81
JH
834 }
835 return;
ea900239
DB
836 default:
837 break;
838 }
ea900239
DB
839}
840
0fab169b
PK
841/* Check that RETVAL is used only in STMT and in comparisons against 0.
842 RETVAL is return value of the function and STMT is return stmt. */
843
844static bool
845check_retval_uses (tree retval, gimple *stmt)
846{
847 imm_use_iterator use_iter;
848 gimple *use_stmt;
849
850 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
851 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
852 {
853 tree op2 = gimple_cond_rhs (cond);
854 if (!integer_zerop (op2))
640296c3 855 return false;
0fab169b
PK
856 }
857 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
858 {
859 enum tree_code code = gimple_assign_rhs_code (ga);
860 if (TREE_CODE_CLASS (code) != tcc_comparison)
640296c3 861 return false;
0fab169b 862 if (!integer_zerop (gimple_assign_rhs2 (ga)))
640296c3 863 return false;
0fab169b
PK
864 }
865 else if (is_gimple_debug (use_stmt))
866 ;
867 else if (use_stmt != stmt)
640296c3 868 return false;
0fab169b
PK
869
870 return true;
871}
872
873/* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
874 attribute. Currently this function does a very conservative analysis.
875 FUN is considered to be a candidate if
876 1) It returns a value of pointer type.
877 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
878 a phi, and element of phi is either NULL or
879 SSA_NAME_DEF_STMT(element) is function call.
880 3) The return-value has immediate uses only within comparisons (gcond or gassign)
881 and return_stmt (and likewise a phi arg has immediate use only within comparison
882 or the phi stmt). */
883
0fab169b
PK
884#define DUMP_AND_RETURN(reason) \
885{ \
886 if (dump_file && (dump_flags & TDF_DETAILS)) \
a8c80d03 887 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
3629ff8a 888 (node->dump_name ()), (reason)); \
0fab169b
PK
889 return false; \
890}
891
9a471714 892static bool
4882e5ba
RB
893malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
894 bitmap visited)
9a471714
PK
895{
896 cgraph_node *node = cgraph_node::get_create (fun->decl);
4882e5ba
RB
897 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
898 return true;
9a471714
PK
899
900 if (!check_retval_uses (retval, ret_stmt))
901 DUMP_AND_RETURN("Return value has uses outside return stmt"
902 " and comparisons against 0.")
903
904 gimple *def = SSA_NAME_DEF_STMT (retval);
905
906 if (gcall *call_stmt = dyn_cast<gcall *> (def))
907 {
908 tree callee_decl = gimple_call_fndecl (call_stmt);
909 if (!callee_decl)
910 return false;
911
912 if (!ipa && !DECL_IS_MALLOC (callee_decl))
913 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
914 " non-ipa mode.")
915
916 cgraph_edge *cs = node->get_edge (call_stmt);
917 if (cs)
918 {
919 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
920 es->is_return_callee_uncaptured = true;
921 }
922 }
923
924 else if (gphi *phi = dyn_cast<gphi *> (def))
925 {
926 bool all_args_zero = true;
927 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
928 {
929 tree arg = gimple_phi_arg_def (phi, i);
930 if (integer_zerop (arg))
931 continue;
932
933 all_args_zero = false;
934 if (TREE_CODE (arg) != SSA_NAME)
935 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
936 if (!check_retval_uses (arg, phi))
937 DUMP_AND_RETURN ("phi arg has uses outside phi"
938 " and comparisons against 0.")
939
940 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
941 if (is_a<gphi *> (arg_def))
942 {
4882e5ba 943 if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
9a471714
PK
944 DUMP_AND_RETURN ("nested phi fail")
945 continue;
946 }
947
948 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
949 if (!call_stmt)
950 DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
951
952 tree callee_decl = gimple_call_fndecl (call_stmt);
953 if (!callee_decl)
954 return false;
955 if (!ipa && !DECL_IS_MALLOC (callee_decl))
956 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
957 " for non-ipa mode.")
958
959 cgraph_edge *cs = node->get_edge (call_stmt);
960 if (cs)
961 {
962 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
963 es->is_return_callee_uncaptured = true;
964 }
965 }
966
967 if (all_args_zero)
968 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
969 }
970
971 else
972 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
973
974 return true;
975}
976
977static bool
978malloc_candidate_p (function *fun, bool ipa)
979{
980 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
981 edge e;
982 edge_iterator ei;
983 cgraph_node *node = cgraph_node::get_create (fun->decl);
984
a8c80d03
PK
985 if (EDGE_COUNT (exit_block->preds) == 0
986 || !flag_delete_null_pointer_checks)
0fab169b
PK
987 return false;
988
4882e5ba 989 auto_bitmap visited;
0fab169b
PK
990 FOR_EACH_EDGE (e, ei, exit_block->preds)
991 {
992 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
993 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
994
995 if (!ret_stmt)
996 return false;
997
998 tree retval = gimple_return_retval (ret_stmt);
999 if (!retval)
1000 DUMP_AND_RETURN("No return value.")
1001
1002 if (TREE_CODE (retval) != SSA_NAME
1003 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
1004 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
1005
4882e5ba 1006 if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
9a471714 1007 return false;
0fab169b
PK
1008 }
1009
1010 if (dump_file && (dump_flags & TDF_DETAILS))
1011 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1012 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1013 return true;
0fab169b
PK
1014}
1015
9a471714 1016#undef DUMP_AND_RETURN
812dbce5 1017
992644c3
JH
1018/* Return true if function is known to be finite. */
1019
1020bool
1021finite_function_p ()
1022{
1023 /* Const functions cannot have back edges (an
1024 indication of possible infinite loop side
1025 effect. */
1026 bool finite = true;
1027 if (mark_dfs_back_edges ())
1028 {
1029 /* Preheaders are needed for SCEV to work.
1030 Simple latches and recorded exits improve chances that loop will
1031 proved to be finite in testcases such as in loop-15.c
1032 and loop-24.c */
1033 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1034 | LOOPS_HAVE_SIMPLE_LATCHES
1035 | LOOPS_HAVE_RECORDED_EXITS);
1036 if (dump_file && (dump_flags & TDF_DETAILS))
1037 flow_loops_dump (dump_file, NULL, 0);
1038 if (mark_irreducible_loops ())
1039 {
1040 if (dump_file)
1041 fprintf (dump_file, " has irreducible loops\n");
1042 finite = false;
1043 }
1044 else
1045 {
1046 scev_initialize ();
1047 for (auto loop : loops_list (cfun, 0))
1048 if (!finite_loop_p (loop))
1049 {
1050 if (dump_file)
1051 fprintf (dump_file, " cannot prove finiteness of "
1052 "loop %i\n", loop->num);
1053 finite =false;
1054 break;
1055 }
1056 scev_finalize ();
1057 }
1058 loop_optimizer_finalize ();
1059 }
1060 return finite;
1061}
1062
ea900239
DB
1063/* This is the main routine for finding the reference patterns for
1064 global variables within a function FN. */
1065
33977f81
JH
1066static funct_state
1067analyze_function (struct cgraph_node *fn, bool ipa)
ea900239 1068{
67348ccc 1069 tree decl = fn->decl;
33977f81
JH
1070 funct_state l;
1071 basic_block this_block;
ea900239 1072
99b1c316 1073 l = XCNEW (class funct_state_d);
33977f81 1074 l->pure_const_state = IPA_CONST;
f87c9042
JH
1075 l->state_previously_known = IPA_NEITHER;
1076 l->looping_previously_known = true;
33977f81
JH
1077 l->looping = false;
1078 l->can_throw = false;
8413ca87 1079 l->can_free = false;
c47d0034 1080 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
67348ccc 1081 flags_from_decl_or_type (fn->decl),
d52f5295 1082 fn->cannot_return_p ());
c47d0034 1083
67f3791f 1084 if (fn->thunk || fn->alias)
c47d0034
JH
1085 {
1086 /* Thunk gets propagated through, so nothing interesting happens. */
1087 gcc_assert (ipa);
67f3791f 1088 if (fn->thunk && thunk_info::get (fn)->virtual_offset_p)
6cbde2e3 1089 l->pure_const_state = IPA_NEITHER;
c47d0034
JH
1090 return l;
1091 }
ea900239
DB
1092
1093 if (dump_file)
1094 {
b8698a0f 1095 fprintf (dump_file, "\n\n local analysis of %s\n ",
3629ff8a 1096 fn->dump_name ());
ea900239 1097 }
b8698a0f 1098
33977f81 1099 push_cfun (DECL_STRUCT_FUNCTION (decl));
b8698a0f 1100
11cd3bed 1101 FOR_EACH_BB_FN (this_block, cfun)
ea900239 1102 {
33977f81
JH
1103 gimple_stmt_iterator gsi;
1104 struct walk_stmt_info wi;
ea900239 1105
c3284718 1106 memset (&wi, 0, sizeof (wi));
33977f81
JH
1107 for (gsi = gsi_start_bb (this_block);
1108 !gsi_end_p (gsi);
1109 gsi_next (&gsi))
ea900239 1110 {
e93809f6
JH
1111 /* NULL memory accesses terminates BB. These accesses are known
1112 to trip undefined behaviour. gimple-ssa-isolate-paths turns them
1113 to volatile accesses and adds builtin_trap call which would
1114 confuse us otherwise. */
1115 if (infer_nonnull_range_by_dereference (gsi_stmt (gsi),
1116 null_pointer_node))
1117 {
1118 if (dump_file)
1119 fprintf (dump_file, " NULL memory access; terminating BB%s\n",
1120 flag_non_call_exceptions ? "; looping" : "");
1121 if (flag_non_call_exceptions)
1122 {
1123 l->looping = true;
1124 if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
1125 {
1126 if (dump_file)
1127 fprintf (dump_file, " can throw externally\n");
1128 l->can_throw = true;
1129 }
1130 }
1131 break;
1132 }
33977f81 1133 check_stmt (&gsi, l, ipa);
8413ca87
JJ
1134 if (l->pure_const_state == IPA_NEITHER
1135 && l->looping
1136 && l->can_throw
1137 && l->can_free)
33977f81 1138 goto end;
ea900239
DB
1139 }
1140 }
13335ae6
AP
1141
1142end:
992644c3
JH
1143 if (l->pure_const_state != IPA_NEITHER
1144 && !l->looping
1145 && !finite_function_p ())
1146 l->looping = true;
33977f81 1147
f10ea640
JH
1148 if (dump_file && (dump_flags & TDF_DETAILS))
1149 fprintf (dump_file, " checking previously known:");
f10ea640
JH
1150
1151 better_state (&l->pure_const_state, &l->looping,
1152 l->state_previously_known,
1153 l->looping_previously_known);
33977f81
JH
1154 if (TREE_NOTHROW (decl))
1155 l->can_throw = false;
1156
0fab169b
PK
1157 l->malloc_state = STATE_MALLOC_BOTTOM;
1158 if (DECL_IS_MALLOC (decl))
1159 l->malloc_state = STATE_MALLOC;
1160 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1161 l->malloc_state = STATE_MALLOC_TOP;
1162 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1163 l->malloc_state = STATE_MALLOC;
1164
33977f81 1165 pop_cfun ();
13335ae6
AP
1166 if (dump_file)
1167 {
33977f81
JH
1168 if (l->looping)
1169 fprintf (dump_file, "Function is locally looping.\n");
1170 if (l->can_throw)
1171 fprintf (dump_file, "Function is locally throwing.\n");
1172 if (l->pure_const_state == IPA_CONST)
1173 fprintf (dump_file, "Function is locally const.\n");
1174 if (l->pure_const_state == IPA_PURE)
1175 fprintf (dump_file, "Function is locally pure.\n");
8413ca87
JJ
1176 if (l->can_free)
1177 fprintf (dump_file, "Function can locally free.\n");
0fab169b
PK
1178 if (l->malloc_state == STATE_MALLOC)
1179 fprintf (dump_file, "Function is locally malloc.\n");
13335ae6 1180 }
33977f81 1181 return l;
ea900239
DB
1182}
1183
36330f82
ML
1184void
1185funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
129a37fc 1186{
129a37fc
JH
1187 /* There are some shared nodes, in particular the initializers on
1188 static declarations. We do not need to scan them more than once
1189 since all we would be interested in are the addressof
1190 operations. */
cc950f98 1191 if (opt_for_fn (node->decl, flag_ipa_pure_const))
e2c9111c 1192 {
36330f82
ML
1193 funct_state_d *a = analyze_function (node, true);
1194 new (state) funct_state_d (*a);
1195 free (a);
e2c9111c 1196 }
d5d9706f
JH
1197 else
1198 /* Do not keep stale summaries. */
1199 funct_state_summaries->remove (node);
e2c9111c
JH
1200}
1201
1202/* Called when new clone is inserted to callgraph late. */
1203
36330f82 1204void
33351ff9 1205funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
36330f82
ML
1206 funct_state_d *src_data,
1207 funct_state_d *dst_data)
e2c9111c 1208{
36330f82 1209 new (dst_data) funct_state_d (*src_data);
33351ff9
ML
1210 if (dst_data->malloc_state == STATE_MALLOC
1211 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
1212 dst_data->malloc_state = STATE_MALLOC_BOTTOM;
e2c9111c
JH
1213}
1214
ea900239 1215\f
3edf64aa
DM
1216void
1217pass_ipa_pure_const::
d7f09764 1218register_hooks (void)
ea900239 1219{
d7f09764
DN
1220 if (init_p)
1221 return;
1222
1223 init_p = true;
ea900239 1224
36330f82 1225 funct_state_summaries = new funct_state_summary_t (symtab);
d7f09764
DN
1226}
1227
1228
1229/* Analyze each function in the cgraph to see if it is locally PURE or
1230 CONST. */
1231
b8698a0f 1232static void
651df1b2 1233pure_const_generate_summary (void)
d7f09764
DN
1234{
1235 struct cgraph_node *node;
1236
3edf64aa
DM
1237 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1238 pass->register_hooks ();
d7f09764 1239
b8698a0f 1240 /* Process all of the functions.
ea900239 1241
67914693 1242 We process AVAIL_INTERPOSABLE functions. We cannot use the results
c59f5d1b 1243 by default, but the info can be used at LTO with -fwhole-program or
61502ca8 1244 when function got cloned and the clone is AVAILABLE. */
c59f5d1b 1245
65c70e6b 1246 FOR_EACH_DEFINED_FUNCTION (node)
cc950f98 1247 if (opt_for_fn (node->decl, flag_ipa_pure_const))
36330f82
ML
1248 {
1249 funct_state_d *a = analyze_function (node, true);
1250 new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1251 free (a);
1252 }
812dbce5
JH
1253}
1254
d7f09764
DN
1255
1256/* Serialize the ipa info for lto. */
1257
1258static void
f27c1867 1259pure_const_write_summary (void)
d7f09764
DN
1260{
1261 struct cgraph_node *node;
1262 struct lto_simple_output_block *ob
1263 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1264 unsigned int count = 0;
f27c1867
JH
1265 lto_symtab_encoder_iterator lsei;
1266 lto_symtab_encoder_t encoder;
d7f09764 1267
f27c1867
JH
1268 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1269
1270 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1271 lsei_next_function_in_partition (&lsei))
d7f09764 1272 {
f27c1867 1273 node = lsei_cgraph_node (lsei);
36330f82 1274 if (node->definition && funct_state_summaries->exists (node))
d7f09764
DN
1275 count++;
1276 }
b8698a0f 1277
412288f1 1278 streamer_write_uhwi_stream (ob->main_stream, count);
b8698a0f 1279
d7f09764 1280 /* Process all of the functions. */
f27c1867
JH
1281 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1282 lsei_next_function_in_partition (&lsei))
d7f09764 1283 {
f27c1867 1284 node = lsei_cgraph_node (lsei);
36330f82
ML
1285 funct_state_d *fs = funct_state_summaries->get (node);
1286 if (node->definition && fs != NULL)
d7f09764 1287 {
2465dcc2 1288 struct bitpack_d bp;
d7f09764 1289 int node_ref;
7380e6ef 1290 lto_symtab_encoder_t encoder;
b8698a0f 1291
7380e6ef 1292 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 1293 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 1294 streamer_write_uhwi_stream (ob->main_stream, node_ref);
b8698a0f 1295
d7f09764
DN
1296 /* Note that flags will need to be read in the opposite
1297 order as we are pushing the bitflags into FLAGS. */
2465dcc2
RG
1298 bp = bitpack_create (ob->main_stream);
1299 bp_pack_value (&bp, fs->pure_const_state, 2);
1300 bp_pack_value (&bp, fs->state_previously_known, 2);
1301 bp_pack_value (&bp, fs->looping_previously_known, 1);
1302 bp_pack_value (&bp, fs->looping, 1);
1303 bp_pack_value (&bp, fs->can_throw, 1);
8413ca87 1304 bp_pack_value (&bp, fs->can_free, 1);
0fab169b 1305 bp_pack_value (&bp, fs->malloc_state, 2);
412288f1 1306 streamer_write_bitpack (&bp);
d7f09764
DN
1307 }
1308 }
1309
1310 lto_destroy_simple_output_block (ob);
1311}
1312
1313
1314/* Deserialize the ipa info for lto. */
1315
b8698a0f 1316static void
d7f09764
DN
1317pure_const_read_summary (void)
1318{
1319 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1320 struct lto_file_decl_data *file_data;
1321 unsigned int j = 0;
1322
3edf64aa
DM
1323 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1324 pass->register_hooks ();
1325
d7f09764
DN
1326 while ((file_data = file_data_vec[j++]))
1327 {
1328 const char *data;
1329 size_t len;
99b1c316 1330 class lto_input_block *ib
b8698a0f
L
1331 = lto_create_simple_input_block (file_data,
1332 LTO_section_ipa_pure_const,
d7f09764
DN
1333 &data, &len);
1334 if (ib)
1335 {
1336 unsigned int i;
412288f1 1337 unsigned int count = streamer_read_uhwi (ib);
d7f09764
DN
1338
1339 for (i = 0; i < count; i++)
1340 {
1341 unsigned int index;
1342 struct cgraph_node *node;
2465dcc2 1343 struct bitpack_d bp;
d7f09764 1344 funct_state fs;
7380e6ef 1345 lto_symtab_encoder_t encoder;
d7f09764 1346
412288f1 1347 index = streamer_read_uhwi (ib);
7380e6ef 1348 encoder = file_data->symtab_node_encoder;
d52f5295
ML
1349 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1350 index));
d7f09764 1351
36330f82 1352 fs = funct_state_summaries->get_create (node);
d7f09764
DN
1353 /* Note that the flags must be read in the opposite
1354 order in which they were written (the bitflags were
1355 pushed into FLAGS). */
412288f1 1356 bp = streamer_read_bitpack (ib);
d7f09764 1357 fs->pure_const_state
2465dcc2 1358 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
d7f09764 1359 fs->state_previously_known
2465dcc2
RG
1360 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1361 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1362 fs->looping = bp_unpack_value (&bp, 1);
1363 fs->can_throw = bp_unpack_value (&bp, 1);
8413ca87 1364 fs->can_free = bp_unpack_value (&bp, 1);
0fab169b
PK
1365 fs->malloc_state
1366 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1367
d56026c2
JH
1368 if (dump_file)
1369 {
67348ccc 1370 int flags = flags_from_decl_or_type (node->decl);
464d0118 1371 fprintf (dump_file, "Read info for %s ", node->dump_name ());
d56026c2
JH
1372 if (flags & ECF_CONST)
1373 fprintf (dump_file, " const");
1374 if (flags & ECF_PURE)
1375 fprintf (dump_file, " pure");
1376 if (flags & ECF_NOTHROW)
1377 fprintf (dump_file, " nothrow");
1378 fprintf (dump_file, "\n pure const state: %s\n",
1379 pure_const_names[fs->pure_const_state]);
1380 fprintf (dump_file, " previously known state: %s\n",
cc950f98 1381 pure_const_names[fs->state_previously_known]);
d56026c2
JH
1382 if (fs->looping)
1383 fprintf (dump_file," function is locally looping\n");
1384 if (fs->looping_previously_known)
1385 fprintf (dump_file," function is previously known looping\n");
1386 if (fs->can_throw)
1387 fprintf (dump_file," function is locally throwing\n");
8413ca87
JJ
1388 if (fs->can_free)
1389 fprintf (dump_file," function can locally free\n");
0fab169b
PK
1390 fprintf (dump_file, "\n malloc state: %s\n",
1391 malloc_state_names[fs->malloc_state]);
d56026c2 1392 }
d7f09764
DN
1393 }
1394
b8698a0f
L
1395 lto_destroy_simple_input_block (file_data,
1396 LTO_section_ipa_pure_const,
d7f09764
DN
1397 ib, data, len);
1398 }
1399 }
1400}
1401
9644e52a
JH
1402/* We only propagate across edges that can throw externally and their callee
1403 is not interposable. */
d7f09764 1404
2505c5ed 1405static bool
9644e52a 1406ignore_edge_for_nothrow (struct cgraph_edge *e)
2505c5ed 1407{
9644e52a
JH
1408 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1409 return true;
1410
1411 enum availability avail;
97e59627
ML
1412 cgraph_node *ultimate_target
1413 = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1414 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
a2b056a3 1415 return true;
97e59627
ML
1416 return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1417 && !e->callee->binds_to_current_def_p (e->caller))
1418 || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1419 || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
2505c5ed
JH
1420}
1421
fede8efa 1422/* Return true if NODE is self recursive function.
fc11f321
JH
1423 Indirectly recursive functions appears as non-trivial strongly
1424 connected components, so we need to care about self recursion
1425 only. */
03ec7d01
JH
1426
1427static bool
1428self_recursive_p (struct cgraph_node *node)
1429{
1430 struct cgraph_edge *e;
1431 for (e = node->callees; e; e = e->next_callee)
d52f5295 1432 if (e->callee->function_symbol () == node)
03ec7d01
JH
1433 return true;
1434 return false;
1435}
1436
17e0fc92
JH
1437/* Return true if N is cdtor that is not const or pure. In this case we may
1438 need to remove unreachable function if it is marked const/pure. */
1439
1440static bool
1441cdtor_p (cgraph_node *n, void *)
1442{
1443 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
cd6d518b
JJ
1444 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1445 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
17e0fc92
JH
1446 return false;
1447}
1448
97e59627
ML
1449/* Skip edges from and to nodes without ipa_pure_const enabled.
1450 Ignore not available symbols. */
d8e3e8a5
JH
1451
1452static bool
1453ignore_edge_for_pure_const (struct cgraph_edge *e)
1454{
1455 enum availability avail;
97e59627
ML
1456 cgraph_node *ultimate_target
1457 = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
d8e3e8a5 1458
97e59627
ML
1459 return (avail <= AVAIL_INTERPOSABLE
1460 || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1461 || !opt_for_fn (ultimate_target->decl,
1462 flag_ipa_pure_const));
1463}
d8e3e8a5 1464
494bdadf
JH
1465/* Return true if function should be skipped for local pure const analysis. */
1466
1467static bool
1468skip_function_for_local_pure_const (struct cgraph_node *node)
1469{
1470 /* Because we do not schedule pass_fixup_cfg over whole program after early
1471 optimizations we must not promote functions that are called by already
1472 processed functions. */
1473
1474 if (function_called_by_processed_nodes_p ())
1475 {
1476 if (dump_file)
1477 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1478 return true;
1479 }
1480 /* Save some work and do not analyze functions which are interposable and
1481 do not have any non-interposable aliases. */
1482 if (node->get_availability () <= AVAIL_INTERPOSABLE
1483 && !flag_lto
1484 && !node->has_aliases_p ())
1485 {
1486 if (dump_file)
1487 fprintf (dump_file,
1488 "Function is interposable; not analyzing.\n");
1489 return true;
1490 }
1491 return false;
1492}
1493
1494/* Make function const and output warning. If LOCAL is true,
1495 return true if anything changed. Otherwise return true if
1496 we may have introduced removale ctors. */
1497
1498bool
1499ipa_make_function_const (struct cgraph_node *node, bool looping, bool local)
1500{
1501 bool cdtor = false;
1502
1503 if (TREE_READONLY (node->decl)
1504 && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl)))
1505 return false;
1506 warn_function_const (node->decl, !looping);
1507 if (local && skip_function_for_local_pure_const (node))
1508 return false;
1509 if (dump_file)
1510 fprintf (dump_file, "Function found to be %sconst: %s\n",
1511 looping ? "looping " : "",
1512 node->dump_name ());
6471396d 1513 if (!local && !looping)
494bdadf 1514 cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
6471396d
JH
1515 if (!dbg_cnt (ipa_attr))
1516 return false;
494bdadf
JH
1517 if (node->set_const_flag (true, looping))
1518 {
1519 if (dump_file)
1520 fprintf (dump_file,
1521 "Declaration updated to be %sconst: %s\n",
1522 looping ? "looping " : "",
1523 node->dump_name ());
1524 if (local)
1525 return true;
1526 return cdtor;
1527 }
1528 return false;
1529}
1530
1531/* Make function const and output warning. If LOCAL is true,
1532 return true if anything changed. Otherwise return true if
1533 we may have introduced removale ctors. */
1534
1535bool
1536ipa_make_function_pure (struct cgraph_node *node, bool looping, bool local)
1537{
1538 bool cdtor = false;
1539
45e09c2e
RB
1540 if (TREE_READONLY (node->decl)
1541 || (DECL_PURE_P (node->decl)
1542 && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl))))
494bdadf
JH
1543 return false;
1544 warn_function_pure (node->decl, !looping);
1545 if (local && skip_function_for_local_pure_const (node))
1546 return false;
1547 if (dump_file)
1548 fprintf (dump_file, "Function found to be %spure: %s\n",
1549 looping ? "looping " : "",
1550 node->dump_name ());
6471396d 1551 if (!local && !looping)
494bdadf 1552 cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
6471396d
JH
1553 if (!dbg_cnt (ipa_attr))
1554 return false;
494bdadf
JH
1555 if (node->set_pure_flag (true, looping))
1556 {
1557 if (dump_file)
1558 fprintf (dump_file,
1559 "Declaration updated to be %spure: %s\n",
1560 looping ? "looping " : "",
1561 node->dump_name ());
1562 if (local)
1563 return true;
1564 return cdtor;
1565 }
1566 return false;
1567}
1568
15e80fc3
JH
1569/* Produce transitive closure over the callgraph and compute pure/const
1570 attributes. */
f10ea640 1571
17e0fc92 1572static bool
15e80fc3 1573propagate_pure_const (void)
812dbce5
JH
1574{
1575 struct cgraph_node *node;
1576 struct cgraph_node *w;
1577 struct cgraph_node **order =
3dafb85c 1578 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
812dbce5
JH
1579 int order_pos;
1580 int i;
1581 struct ipa_dfs_info * w_info;
17e0fc92 1582 bool remove_p = false;
812dbce5 1583
45272fd2 1584 order_pos = ipa_reduced_postorder (order, true,
d8e3e8a5 1585 ignore_edge_for_pure_const);
ea900239
DB
1586 if (dump_file)
1587 {
d52f5295 1588 cgraph_node::dump_cgraph (dump_file);
40a7fe1e 1589 ipa_print_order (dump_file, "reduced", order, order_pos);
ea900239
DB
1590 }
1591
073a8998 1592 /* Propagate the local information through the call graph to produce
ea900239
DB
1593 the global information. All the nodes within a cycle will have
1594 the same info so we collapse cycles first. Then we can do the
1595 propagation in one pass from the leaves to the roots. */
1596 for (i = 0; i < order_pos; i++ )
1597 {
1598 enum pure_const_state_e pure_const_state = IPA_CONST;
becfd6e5 1599 bool looping = false;
17541d72 1600 int count = 0;
ea900239
DB
1601 node = order[i];
1602
67348ccc 1603 if (node->alias)
71fb4f92
JH
1604 continue;
1605
d56026c2
JH
1606 if (dump_file && (dump_flags & TDF_DETAILS))
1607 fprintf (dump_file, "Starting cycle\n");
1608
ea900239
DB
1609 /* Find the worst state for any node in the cycle. */
1610 w = node;
f10ea640 1611 while (w && pure_const_state != IPA_NEITHER)
ea900239 1612 {
33977f81 1613 struct cgraph_edge *e;
f10ea640
JH
1614 struct cgraph_edge *ie;
1615 int i;
d122681a 1616 struct ipa_ref *ref = NULL;
f10ea640 1617
36330f82 1618 funct_state w_l = funct_state_summaries->get_create (w);
d56026c2 1619 if (dump_file && (dump_flags & TDF_DETAILS))
464d0118
ML
1620 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1621 w->dump_name (),
d56026c2
JH
1622 pure_const_names[w_l->pure_const_state],
1623 w_l->looping);
ea900239 1624
cc950f98
JH
1625 /* First merge in function body properties.
1626 We are safe to pass NULL as FROM and TO because we will take care
1627 of possible interposition when walking callees. */
f10ea640 1628 worse_state (&pure_const_state, &looping,
cc950f98
JH
1629 w_l->pure_const_state, w_l->looping,
1630 NULL, NULL);
f10ea640
JH
1631 if (pure_const_state == IPA_NEITHER)
1632 break;
1633
33977f81
JH
1634 count++;
1635
f10ea640
JH
1636 /* We consider recursive cycles as possibly infinite.
1637 This might be relaxed since infinite recursion leads to stack
1638 overflow. */
33977f81
JH
1639 if (count > 1)
1640 looping = true;
b8698a0f 1641
f10ea640 1642 /* Now walk the edges and merge in callee properties. */
d8e3e8a5
JH
1643 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1644 e = e->next_callee)
ea900239 1645 {
fede8efa 1646 enum availability avail;
6cbde2e3 1647 struct cgraph_node *y = e->callee->
cc950f98
JH
1648 function_or_virtual_thunk_symbol (&avail,
1649 e->caller);
d56026c2
JH
1650 enum pure_const_state_e edge_state = IPA_CONST;
1651 bool edge_looping = false;
17541d72 1652
6e30c481
JH
1653 if (e->recursive_p ())
1654 looping = true;
1655
d56026c2
JH
1656 if (dump_file && (dump_flags & TDF_DETAILS))
1657 {
464d0118
ML
1658 fprintf (dump_file, " Call to %s",
1659 e->callee->dump_name ());
d56026c2 1660 }
d52f5295 1661 if (avail > AVAIL_INTERPOSABLE)
ea900239 1662 {
b3f2b048
ML
1663 funct_state y_l = funct_state_summaries->get_create (y);
1664
d56026c2
JH
1665 if (dump_file && (dump_flags & TDF_DETAILS))
1666 {
1667 fprintf (dump_file,
1668 " state:%s looping:%i\n",
1669 pure_const_names[y_l->pure_const_state],
1670 y_l->looping);
1671 }
da8c7675 1672 if (y_l->pure_const_state > IPA_PURE
3dafb85c 1673 && e->cannot_lead_to_return_p ())
d56026c2
JH
1674 {
1675 if (dump_file && (dump_flags & TDF_DETAILS))
f10ea640
JH
1676 fprintf (dump_file,
1677 " Ignoring side effects"
1678 " -> pure, looping\n");
d56026c2
JH
1679 edge_state = IPA_PURE;
1680 edge_looping = true;
1681 }
1682 else
1683 {
1684 edge_state = y_l->pure_const_state;
1685 edge_looping = y_l->looping;
1686 }
ea900239 1687 }
992644c3
JH
1688 else if (builtin_safe_for_const_function_p (&edge_looping,
1689 y->decl))
1690 edge_state = IPA_CONST;
c59f5d1b 1691 else
f10ea640 1692 state_from_flags (&edge_state, &edge_looping,
67348ccc 1693 flags_from_decl_or_type (y->decl),
3dafb85c 1694 e->cannot_lead_to_return_p ());
f10ea640
JH
1695
1696 /* Merge the results with what we already know. */
1697 better_state (&edge_state, &edge_looping,
1698 w_l->state_previously_known,
1699 w_l->looping_previously_known);
1700 worse_state (&pure_const_state, &looping,
cc950f98 1701 edge_state, edge_looping, e->caller, e->callee);
f10ea640
JH
1702 if (pure_const_state == IPA_NEITHER)
1703 break;
1704 }
c59f5d1b 1705
f10ea640 1706 /* Now process the indirect call. */
d8e3e8a5
JH
1707 for (ie = w->indirect_calls;
1708 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
f10ea640
JH
1709 {
1710 enum pure_const_state_e edge_state = IPA_CONST;
1711 bool edge_looping = false;
da8c7675 1712
f10ea640
JH
1713 if (dump_file && (dump_flags & TDF_DETAILS))
1714 fprintf (dump_file, " Indirect call");
1715 state_from_flags (&edge_state, &edge_looping,
1716 ie->indirect_info->ecf_flags,
3dafb85c 1717 ie->cannot_lead_to_return_p ());
f10ea640
JH
1718 /* Merge the results with what we already know. */
1719 better_state (&edge_state, &edge_looping,
1720 w_l->state_previously_known,
1721 w_l->looping_previously_known);
1722 worse_state (&pure_const_state, &looping,
cc950f98 1723 edge_state, edge_looping, NULL, NULL);
d56026c2
JH
1724 if (pure_const_state == IPA_NEITHER)
1725 break;
ea900239 1726 }
f10ea640
JH
1727
1728 /* And finally all loads and stores. */
d8e3e8a5
JH
1729 for (i = 0; w->iterate_reference (i, ref)
1730 && pure_const_state != IPA_NEITHER; i++)
f10ea640
JH
1731 {
1732 enum pure_const_state_e ref_state = IPA_CONST;
1733 bool ref_looping = false;
1734 switch (ref->use)
1735 {
1736 case IPA_REF_LOAD:
1737 /* readonly reads are safe. */
d122681a 1738 if (TREE_READONLY (ref->referred->decl))
f10ea640
JH
1739 break;
1740 if (dump_file && (dump_flags & TDF_DETAILS))
1741 fprintf (dump_file, " nonreadonly global var read\n");
1742 ref_state = IPA_PURE;
1743 break;
1744 case IPA_REF_STORE:
d122681a 1745 if (ref->cannot_lead_to_return ())
f10ea640
JH
1746 break;
1747 ref_state = IPA_NEITHER;
1748 if (dump_file && (dump_flags & TDF_DETAILS))
1749 fprintf (dump_file, " global var write\n");
1750 break;
1751 case IPA_REF_ADDR:
1752 break;
7d2268ea
MJ
1753 default:
1754 gcc_unreachable ();
f10ea640
JH
1755 }
1756 better_state (&ref_state, &ref_looping,
1757 w_l->state_previously_known,
1758 w_l->looping_previously_known);
1759 worse_state (&pure_const_state, &looping,
cc950f98 1760 ref_state, ref_looping, NULL, NULL);
f10ea640
JH
1761 if (pure_const_state == IPA_NEITHER)
1762 break;
1763 }
67348ccc 1764 w_info = (struct ipa_dfs_info *) w->aux;
ea900239
DB
1765 w = w_info->next_cycle;
1766 }
d56026c2
JH
1767 if (dump_file && (dump_flags & TDF_DETAILS))
1768 fprintf (dump_file, "Result %s looping %i\n",
1769 pure_const_names [pure_const_state],
1770 looping);
ea900239 1771
8413ca87
JJ
1772 /* Find the worst state of can_free for any node in the cycle. */
1773 bool can_free = false;
1774 w = node;
1775 while (w && !can_free)
1776 {
1777 struct cgraph_edge *e;
a756f161 1778 funct_state w_l = funct_state_summaries->get (w);
8413ca87
JJ
1779
1780 if (w_l->can_free
1781 || w->get_availability () == AVAIL_INTERPOSABLE
1782 || w->indirect_calls)
1783 can_free = true;
1784
1785 for (e = w->callees; e && !can_free; e = e->next_callee)
1786 {
1787 enum availability avail;
6cbde2e3 1788 struct cgraph_node *y = e->callee->
cc950f98
JH
1789 function_or_virtual_thunk_symbol (&avail,
1790 e->caller);
8413ca87
JJ
1791
1792 if (avail > AVAIL_INTERPOSABLE)
a756f161 1793 can_free = funct_state_summaries->get (y)->can_free;
8413ca87
JJ
1794 else
1795 can_free = true;
1796 }
1797 w_info = (struct ipa_dfs_info *) w->aux;
1798 w = w_info->next_cycle;
1799 }
1800
ea900239
DB
1801 /* Copy back the region's pure_const_state which is shared by
1802 all nodes in the region. */
1803 w = node;
1804 while (w)
1805 {
a756f161 1806 funct_state w_l = funct_state_summaries->get (w);
33977f81
JH
1807 enum pure_const_state_e this_state = pure_const_state;
1808 bool this_looping = looping;
ea900239 1809
8413ca87
JJ
1810 w_l->can_free = can_free;
1811 w->nonfreeing_fn = !can_free;
1812 if (!can_free && dump_file)
1813 fprintf (dump_file, "Function found not to call free: %s\n",
3629ff8a 1814 w->dump_name ());
8413ca87 1815
f87c9042
JH
1816 if (w_l->state_previously_known != IPA_NEITHER
1817 && this_state > w_l->state_previously_known)
da8c7675 1818 {
d8e3e8a5 1819 if (this_state == IPA_NEITHER)
a0e99d5b
JH
1820 this_looping = w_l->looping_previously_known;
1821 this_state = w_l->state_previously_known;
da8c7675 1822 }
03ec7d01
JH
1823 if (!this_looping && self_recursive_p (w))
1824 this_looping = true;
f87c9042
JH
1825 if (!w_l->looping_previously_known)
1826 this_looping = false;
812dbce5 1827
33977f81
JH
1828 /* All nodes within a cycle share the same info. */
1829 w_l->pure_const_state = this_state;
1830 w_l->looping = this_looping;
1831
d7636f56
JH
1832 /* Inline clones share declaration with their offline copies;
1833 do not modify their declarations since the offline copy may
1834 be different. */
a62bfab5 1835 if (!w->inlined_to)
d7636f56
JH
1836 switch (this_state)
1837 {
1838 case IPA_CONST:
6471396d 1839 remove_p |= ipa_make_function_const (w, this_looping, false);
d7636f56 1840 break;
b8698a0f 1841
d7636f56 1842 case IPA_PURE:
6471396d 1843 remove_p |= ipa_make_function_pure (w, this_looping, false);
d7636f56 1844 break;
b8698a0f 1845
d7636f56
JH
1846 default:
1847 break;
1848 }
67348ccc 1849 w_info = (struct ipa_dfs_info *) w->aux;
2505c5ed
JH
1850 w = w_info->next_cycle;
1851 }
1852 }
1853
af8bca3c 1854 ipa_free_postorder_info ();
15e80fc3 1855 free (order);
17e0fc92 1856 return remove_p;
15e80fc3
JH
1857}
1858
1859/* Produce transitive closure over the callgraph and compute nothrow
1860 attributes. */
1861
1862static void
1863propagate_nothrow (void)
1864{
1865 struct cgraph_node *node;
1866 struct cgraph_node *w;
1867 struct cgraph_node **order =
3dafb85c 1868 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
15e80fc3
JH
1869 int order_pos;
1870 int i;
1871 struct ipa_dfs_info * w_info;
1872
45272fd2 1873 order_pos = ipa_reduced_postorder (order, true,
9644e52a 1874 ignore_edge_for_nothrow);
2505c5ed
JH
1875 if (dump_file)
1876 {
d52f5295 1877 cgraph_node::dump_cgraph (dump_file);
af8bca3c 1878 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
2505c5ed 1879 }
15e80fc3 1880
073a8998 1881 /* Propagate the local information through the call graph to produce
2505c5ed
JH
1882 the global information. All the nodes within a cycle will have
1883 the same info so we collapse cycles first. Then we can do the
1884 propagation in one pass from the leaves to the roots. */
1885 for (i = 0; i < order_pos; i++ )
1886 {
1887 bool can_throw = false;
1888 node = order[i];
1889
67348ccc 1890 if (node->alias)
71fb4f92
JH
1891 continue;
1892
2505c5ed
JH
1893 /* Find the worst state for any node in the cycle. */
1894 w = node;
abb50207 1895 while (w && !can_throw)
2505c5ed 1896 {
f10ea640 1897 struct cgraph_edge *e, *ie;
2505c5ed 1898
9644e52a 1899 if (!TREE_NOTHROW (w->decl))
2505c5ed 1900 {
36330f82 1901 funct_state w_l = funct_state_summaries->get_create (w);
2505c5ed 1902
9644e52a
JH
1903 if (w_l->can_throw
1904 || w->get_availability () == AVAIL_INTERPOSABLE)
1905 can_throw = true;
1906
1907 for (e = w->callees; e && !can_throw; e = e->next_callee)
2505c5ed 1908 {
9644e52a
JH
1909 enum availability avail;
1910
1911 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1912 continue;
1913
1914 struct cgraph_node *y = e->callee->
a2b056a3
JH
1915 function_or_virtual_thunk_symbol (&avail,
1916 e->caller);
2505c5ed 1917
155ed511
SL
1918 /* We can use info about the callee only if we know it
1919 cannot be interposed.
a2b056a3
JH
1920 When callee is compiled with non-call exceptions we also
1921 must check that the declaration is bound to current
1922 body as other semantically equivalent body may still
1923 throw. */
9644e52a
JH
1924 if (avail <= AVAIL_INTERPOSABLE
1925 || (!TREE_NOTHROW (y->decl)
36330f82 1926 && (funct_state_summaries->get_create (y)->can_throw
a2b056a3
JH
1927 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1928 && !e->callee->binds_to_current_def_p (w)))))
2505c5ed
JH
1929 can_throw = true;
1930 }
9644e52a
JH
1931 for (ie = w->indirect_calls; ie && !can_throw;
1932 ie = ie->next_callee)
1933 if (ie->can_throw_external
1934 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1935 can_throw = true;
2505c5ed 1936 }
67348ccc 1937 w_info = (struct ipa_dfs_info *) w->aux;
2505c5ed
JH
1938 w = w_info->next_cycle;
1939 }
1940
1941 /* Copy back the region's pure_const_state which is shared by
1942 all nodes in the region. */
1943 w = node;
1944 while (w)
1945 {
61c74e84 1946 funct_state w_l = funct_state_summaries->get_create (w);
67348ccc 1947 if (!can_throw && !TREE_NOTHROW (w->decl))
33977f81 1948 {
d7636f56
JH
1949 /* Inline clones share declaration with their offline copies;
1950 do not modify their declarations since the offline copy may
1951 be different. */
a62bfab5 1952 if (!w->inlined_to)
d7636f56 1953 {
d52f5295 1954 w->set_nothrow_flag (true);
d7636f56
JH
1955 if (dump_file)
1956 fprintf (dump_file, "Function found to be nothrow: %s\n",
3629ff8a 1957 w->dump_name ());
d7636f56 1958 }
ea900239 1959 }
67348ccc 1960 else if (can_throw && !TREE_NOTHROW (w->decl))
b6fa5b01 1961 w_l->can_throw = true;
67348ccc 1962 w_info = (struct ipa_dfs_info *) w->aux;
ea900239
DB
1963 w = w_info->next_cycle;
1964 }
1965 }
1966
af8bca3c 1967 ipa_free_postorder_info ();
ea900239 1968 free (order);
15e80fc3
JH
1969}
1970
0fab169b
PK
1971/* Debugging function to dump state of malloc lattice. */
1972
1973DEBUG_FUNCTION
1974static void
1975dump_malloc_lattice (FILE *dump_file, const char *s)
1976{
1977 if (!dump_file)
1978 return;
1979
1980 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1981 cgraph_node *node;
1982 FOR_EACH_FUNCTION (node)
1983 {
67b3b8fe
ML
1984 funct_state fs = funct_state_summaries->get (node);
1985 if (fs)
3629ff8a 1986 fprintf (dump_file, "%s: %s\n", node->dump_name (),
67b3b8fe 1987 malloc_state_names[fs->malloc_state]);
0fab169b
PK
1988 }
1989}
1990
1991/* Propagate malloc attribute across the callgraph. */
1992
1993static void
1994propagate_malloc (void)
1995{
1996 cgraph_node *node;
1997 FOR_EACH_FUNCTION (node)
1998 {
1999 if (DECL_IS_MALLOC (node->decl))
36330f82 2000 if (!funct_state_summaries->exists (node))
0fab169b 2001 {
36330f82
ML
2002 funct_state fs = funct_state_summaries->get_create (node);
2003 fs->malloc_state = STATE_MALLOC;
0fab169b
PK
2004 }
2005 }
2006
2007 dump_malloc_lattice (dump_file, "Initial");
2008 struct cgraph_node **order
2009 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
2010 int order_pos = ipa_reverse_postorder (order);
2011 bool changed = true;
2012
2013 while (changed)
2014 {
2015 changed = false;
2016 /* Walk in postorder. */
2017 for (int i = order_pos - 1; i >= 0; --i)
2018 {
2019 cgraph_node *node = order[i];
2020 if (node->alias
2021 || !node->definition
36330f82 2022 || !funct_state_summaries->exists (node))
0fab169b
PK
2023 continue;
2024
67b3b8fe 2025 funct_state l = funct_state_summaries->get (node);
0fab169b
PK
2026
2027 /* FIXME: add support for indirect-calls. */
2028 if (node->indirect_calls)
2029 {
2030 l->malloc_state = STATE_MALLOC_BOTTOM;
2031 continue;
2032 }
2033
2034 if (node->get_availability () <= AVAIL_INTERPOSABLE)
2035 {
2036 l->malloc_state = STATE_MALLOC_BOTTOM;
2037 continue;
2038 }
2039
2040 if (l->malloc_state == STATE_MALLOC_BOTTOM)
2041 continue;
2042
9eb7669c 2043 auto_vec<cgraph_node *, 16> callees;
0fab169b
PK
2044 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2045 {
99353fcf 2046 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
0fab169b
PK
2047 if (es && es->is_return_callee_uncaptured)
2048 callees.safe_push (cs->callee);
2049 }
2050
2051 malloc_state_e new_state = l->malloc_state;
2052 for (unsigned j = 0; j < callees.length (); j++)
2053 {
2054 cgraph_node *callee = callees[j];
36330f82 2055 if (!funct_state_summaries->exists (node))
0fab169b
PK
2056 {
2057 new_state = STATE_MALLOC_BOTTOM;
2058 break;
2059 }
36330f82
ML
2060 malloc_state_e callee_state
2061 = funct_state_summaries->get_create (callee)->malloc_state;
0fab169b
PK
2062 if (new_state < callee_state)
2063 new_state = callee_state;
2064 }
2065 if (new_state != l->malloc_state)
2066 {
2067 changed = true;
2068 l->malloc_state = new_state;
2069 }
2070 }
2071 }
2072
2073 FOR_EACH_DEFINED_FUNCTION (node)
36330f82 2074 if (funct_state_summaries->exists (node))
0fab169b 2075 {
67b3b8fe 2076 funct_state l = funct_state_summaries->get (node);
0fab169b
PK
2077 if (!node->alias
2078 && l->malloc_state == STATE_MALLOC
f5850e7d
ML
2079 && !node->inlined_to
2080 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (node->decl))))
0fab169b
PK
2081 {
2082 if (dump_file && (dump_flags & TDF_DETAILS))
2083 fprintf (dump_file, "Function %s found to be malloc\n",
3629ff8a 2084 node->dump_name ());
0fab169b
PK
2085
2086 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
2087 node->set_malloc_flag (true);
2088 if (!malloc_decl_p && warn_suggest_attribute_malloc)
2089 warn_function_malloc (node->decl);
2090 }
2091 }
2092
2093 dump_malloc_lattice (dump_file, "after propagation");
2094 ipa_free_postorder_info ();
2095 free (order);
2096}
15e80fc3
JH
2097
2098/* Produce the global information by preforming a transitive closure
2099 on the local information that was produced by generate_summary. */
2100
3edf64aa
DM
2101unsigned int
2102pass_ipa_pure_const::
2103execute (function *)
15e80fc3 2104{
17e0fc92 2105 bool remove_p;
15e80fc3 2106
15e80fc3
JH
2107 /* Nothrow makes more function to not lead to return and improve
2108 later analysis. */
2109 propagate_nothrow ();
0fab169b 2110 propagate_malloc ();
17e0fc92 2111 remove_p = propagate_pure_const ();
15e80fc3 2112
36330f82 2113 delete funct_state_summaries;
17e0fc92 2114 return remove_p ? TODO_remove_functions : 0;
ea900239
DB
2115}
2116
2117static bool
2118gate_pure_const (void)
2119{
2bf86c84 2120 return flag_ipa_pure_const || in_lto_p;
ea900239
DB
2121}
2122
3edf64aa
DM
2123pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2124 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2125 pure_const_generate_summary, /* generate_summary */
2126 pure_const_write_summary, /* write_summary */
2127 pure_const_read_summary, /* read_summary */
2128 NULL, /* write_optimization_summary */
2129 NULL, /* read_optimization_summary */
2130 NULL, /* stmt_fixup */
2131 0, /* function_transform_todo_flags_start */
2132 NULL, /* function_transform */
2133 NULL), /* variable_transform */
36330f82 2134 init_p (false) {}
27a4cd48
DM
2135
2136ipa_opt_pass_d *
2137make_pass_ipa_pure_const (gcc::context *ctxt)
2138{
2139 return new pass_ipa_pure_const (ctxt);
2140}
2141
5dc16b19
MLI
2142/* Simple local pass for pure const discovery reusing the analysis from
2143 ipa_pure_const. This pass is effective when executed together with
2144 other optimization passes in early optimization pass queue. */
33977f81 2145
17795822
TS
2146namespace {
2147
2148const pass_data pass_data_local_pure_const =
be55bfe6
TS
2149{
2150 GIMPLE_PASS, /* type */
2151 "local-pure-const", /* name */
2152 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
2153 TV_IPA_PURE_CONST, /* tv_id */
2154 0, /* properties_required */
2155 0, /* properties_provided */
2156 0, /* properties_destroyed */
2157 0, /* todo_flags_start */
2158 0, /* todo_flags_finish */
2159};
2160
17795822 2161class pass_local_pure_const : public gimple_opt_pass
be55bfe6
TS
2162{
2163public:
2164 pass_local_pure_const (gcc::context *ctxt)
2165 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2166 {}
2167
2168 /* opt_pass methods: */
725793af
DM
2169 opt_pass * clone () final override
2170 {
2171 return new pass_local_pure_const (m_ctxt);
2172 }
2173 bool gate (function *) final override { return gate_pure_const (); }
2174 unsigned int execute (function *) final override;
be55bfe6
TS
2175
2176}; // class pass_local_pure_const
2177
2178unsigned int
2179pass_local_pure_const::execute (function *fun)
5dc16b19
MLI
2180{
2181 bool changed = false;
2182 funct_state l;
2183 bool skip;
2184 struct cgraph_node *node;
2185
d52f5295 2186 node = cgraph_node::get (current_function_decl);
5dc16b19 2187 skip = skip_function_for_local_pure_const (node);
12b9f3ac 2188
5dc16b19
MLI
2189 if (!warn_suggest_attribute_const
2190 && !warn_suggest_attribute_pure
2191 && skip)
2192 return 0;
73add7fe 2193
269c80f2
JJ
2194 l = analyze_function (node, false);
2195
2196 /* Do NORETURN discovery. */
73add7fe 2197 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
be55bfe6 2198 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
73add7fe 2199 {
be55bfe6 2200 warn_function_noreturn (fun->decl);
73add7fe 2201 if (dump_file)
be55bfe6
TS
2202 fprintf (dump_file, "Function found to be noreturn: %s\n",
2203 current_function_name ());
73add7fe
JH
2204
2205 /* Update declaration and reduce profile to executed once. */
61396dfb
JH
2206 if (cgraph_node::get (current_function_decl)->set_noreturn_flag (true))
2207 changed = true;
73add7fe 2208 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
be55bfe6 2209 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
73add7fe 2210 }
c59f5d1b 2211
33977f81
JH
2212 switch (l->pure_const_state)
2213 {
2214 case IPA_CONST:
494bdadf
JH
2215 changed |= ipa_make_function_const
2216 (cgraph_node::get (current_function_decl), l->looping, true);
33977f81
JH
2217 break;
2218
2219 case IPA_PURE:
494bdadf
JH
2220 changed |= ipa_make_function_pure
2221 (cgraph_node::get (current_function_decl), l->looping, true);
33977f81
JH
2222 break;
2223
2224 default:
2225 break;
2226 }
2227 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2228 {
d52f5295 2229 node->set_nothrow_flag (true);
33977f81
JH
2230 changed = true;
2231 if (dump_file)
2232 fprintf (dump_file, "Function found to be nothrow: %s\n",
df92c640 2233 current_function_name ());
33977f81 2234 }
0fab169b
PK
2235
2236 if (l->malloc_state == STATE_MALLOC
2237 && !DECL_IS_MALLOC (current_function_decl))
2238 {
2239 node->set_malloc_flag (true);
2240 if (warn_suggest_attribute_malloc)
2241 warn_function_malloc (node->decl);
2242 changed = true;
2243 if (dump_file)
2244 fprintf (dump_file, "Function found to be malloc: %s\n",
3629ff8a 2245 node->dump_name ());
0fab169b
PK
2246 }
2247
04695783 2248 free (l);
33977f81
JH
2249 if (changed)
2250 return execute_fixup_cfg ();
2251 else
2252 return 0;
2253}
2254
17795822
TS
2255} // anon namespace
2256
27a4cd48
DM
2257gimple_opt_pass *
2258make_pass_local_pure_const (gcc::context *ctxt)
2259{
2260 return new pass_local_pure_const (ctxt);
2261}
c1bf2a39
AM
2262
2263/* Emit noreturn warnings. */
2264
17795822
TS
2265namespace {
2266
2267const pass_data pass_data_warn_function_noreturn =
c1bf2a39
AM
2268{
2269 GIMPLE_PASS, /* type */
2270 "*warn_function_noreturn", /* name */
2271 OPTGROUP_NONE, /* optinfo_flags */
c1bf2a39
AM
2272 TV_NONE, /* tv_id */
2273 PROP_cfg, /* properties_required */
2274 0, /* properties_provided */
2275 0, /* properties_destroyed */
2276 0, /* todo_flags_start */
2277 0, /* todo_flags_finish */
2278};
2279
17795822 2280class pass_warn_function_noreturn : public gimple_opt_pass
c1bf2a39
AM
2281{
2282public:
2283 pass_warn_function_noreturn (gcc::context *ctxt)
2284 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2285 {}
2286
2287 /* opt_pass methods: */
725793af
DM
2288 bool gate (function *) final override
2289 {
2290 return warn_suggest_attribute_noreturn;
2291 }
2292 unsigned int execute (function *fun) final override
be55bfe6
TS
2293 {
2294 if (!TREE_THIS_VOLATILE (current_function_decl)
2295 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2296 warn_function_noreturn (current_function_decl);
2297 return 0;
2298 }
c1bf2a39
AM
2299
2300}; // class pass_warn_function_noreturn
2301
17795822
TS
2302} // anon namespace
2303
c1bf2a39
AM
2304gimple_opt_pass *
2305make_pass_warn_function_noreturn (gcc::context *ctxt)
2306{
2307 return new pass_warn_function_noreturn (ctxt);
2308}
38147a2a
JH
2309
2310/* Simple local pass for pure const discovery reusing the analysis from
2311 ipa_pure_const. This pass is effective when executed together with
2312 other optimization passes in early optimization pass queue. */
2313
17795822
TS
2314namespace {
2315
2316const pass_data pass_data_nothrow =
38147a2a
JH
2317{
2318 GIMPLE_PASS, /* type */
2319 "nothrow", /* name */
2320 OPTGROUP_NONE, /* optinfo_flags */
2321 TV_IPA_PURE_CONST, /* tv_id */
2322 0, /* properties_required */
2323 0, /* properties_provided */
2324 0, /* properties_destroyed */
2325 0, /* todo_flags_start */
2326 0, /* todo_flags_finish */
2327};
2328
17795822 2329class pass_nothrow : public gimple_opt_pass
38147a2a
JH
2330{
2331public:
2332 pass_nothrow (gcc::context *ctxt)
2333 : gimple_opt_pass (pass_data_nothrow, ctxt)
2334 {}
2335
2336 /* opt_pass methods: */
725793af
DM
2337 opt_pass * clone () final override { return new pass_nothrow (m_ctxt); }
2338 bool gate (function *) final override { return optimize; }
2339 unsigned int execute (function *) final override;
38147a2a
JH
2340
2341}; // class pass_nothrow
2342
2343unsigned int
2344pass_nothrow::execute (function *)
2345{
2346 struct cgraph_node *node;
2347 basic_block this_block;
2348
2349 if (TREE_NOTHROW (current_function_decl))
2350 return 0;
2351
2352 node = cgraph_node::get (current_function_decl);
2353
67914693 2354 /* We run during lowering, we cannot really use availability yet. */
38147a2a
JH
2355 if (cgraph_node::get (current_function_decl)->get_availability ()
2356 <= AVAIL_INTERPOSABLE)
2357 {
2358 if (dump_file)
2359 fprintf (dump_file, "Function is interposable;"
2360 " not analyzing.\n");
2361 return true;
2362 }
2363
2364 FOR_EACH_BB_FN (this_block, cfun)
2365 {
2366 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2367 !gsi_end_p (gsi);
2368 gsi_next (&gsi))
36bbc05d 2369 if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
38147a2a
JH
2370 {
2371 if (is_gimple_call (gsi_stmt (gsi)))
2372 {
2373 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2374 if (callee_t && recursive_call_p (current_function_decl,
2375 callee_t))
2376 continue;
2377 }
2378
2379 if (dump_file)
2380 {
2381 fprintf (dump_file, "Statement can throw: ");
ef6cb4c7 2382 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
38147a2a
JH
2383 }
2384 return 0;
2385 }
2386 }
2387
2388 node->set_nothrow_flag (true);
d93c452f
JJ
2389
2390 bool cfg_changed = false;
2391 if (self_recursive_p (node))
2392 FOR_EACH_BB_FN (this_block, cfun)
db29daa5
RB
2393 if (gcall *g = safe_dyn_cast <gcall *> (*gsi_last_bb (this_block)))
2394 {
2395 tree callee_t = gimple_call_fndecl (g);
2396 if (callee_t
2397 && recursive_call_p (current_function_decl, callee_t)
2398 && maybe_clean_eh_stmt (g)
2399 && gimple_purge_dead_eh_edges (this_block))
2400 cfg_changed = true;
2401 }
d93c452f 2402
38147a2a
JH
2403 if (dump_file)
2404 fprintf (dump_file, "Function found to be nothrow: %s\n",
2405 current_function_name ());
d93c452f 2406 return cfg_changed ? TODO_cleanup_cfg : 0;
38147a2a
JH
2407}
2408
17795822
TS
2409} // anon namespace
2410
38147a2a
JH
2411gimple_opt_pass *
2412make_pass_nothrow (gcc::context *ctxt)
2413{
2414 return new pass_nothrow (ctxt);
2415}